Detailed changes
@@ -0,0 +1,26 @@
+name: "Run tests on Windows"
+description: "Runs the tests on Windows"
+
+inputs:
+ working-directory:
+ description: "The working directory"
+ required: true
+ default: "."
+
+runs:
+ using: "composite"
+ steps:
+ - name: Install Rust
+ shell: pwsh
+ working-directory: ${{ inputs.working-directory }}
+ run: cargo install cargo-nextest --locked
+
+ - name: Install Node
+ uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
+ with:
+ node-version: "18"
+
+ - name: Run tests
+ shell: pwsh
+ working-directory: ${{ inputs.working-directory }}
+ run: cargo nextest run --workspace --no-fail-fast
@@ -228,7 +228,6 @@ jobs:
if: always()
run: rm -rf ./../.cargo
- # todo(windows): Actually run the tests
windows_tests:
timeout-minutes: 60
name: (Windows) Run Clippy and tests
@@ -269,10 +268,20 @@ jobs:
# Windows can't run shell scripts, so we need to use `cargo xtask`.
run: cargo xtask clippy
+ - name: Run tests
+ uses: ./.github/actions/run_tests_windows
+ with:
+ working-directory: ${{ env.ZED_WORKSPACE }}
+
- name: Build Zed
working-directory: ${{ env.ZED_WORKSPACE }}
run: cargo build
+ - name: Check dev drive space
+ working-directory: ${{ env.ZED_WORKSPACE }}
+ # `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
+ run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
+
# Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug.
- name: Clean CI config file
if: always()
@@ -63,3 +63,10 @@ jobs:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js
+
+ - name: Preserve Wrangler logs
+ uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4
+ if: always()
+ with:
+ name: wrangler_logs
+ path: /home/runner/.config/.wrangler/logs/
@@ -52,3 +52,9 @@ Zed is made up of several smaller crates - let's go over those you're most likel
- [`rpc`](/crates/rpc) defines messages to be exchanged with collaboration server.
- [`theme`](/crates/theme) defines the theme system and provides a default theme.
- [`ui`](/crates/ui) is a collection of UI components and common patterns used throughout Zed.
+- [`cli`](/crates/cli) is the CLI crate which invokes the Zed binary.
+- [`zed`](/crates/zed) is where all things come together, and the `main` entry point for Zed.
+
+## Packaging Zed
+
+Check our [notes for packaging Zed](https://zed.dev/docs/development/linux#notes-for-packaging-zed).
@@ -631,7 +631,7 @@ dependencies = [
"smol",
"terminal_view",
"text",
- "toml 0.8.19",
+ "toml 0.8.20",
"ui",
"util",
"workspace",
@@ -1001,21 +1001,20 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
[[package]]
name = "async-tls"
version = "0.13.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2ae3c9eba89d472a0e4fe1dea433df78fbbe63d2b764addaf2ba3a6bde89a5e"
+source = "git+https://github.com/zed-industries/async-tls?rev=1e759a4b5e370f87dc15e40756ac4f8815b61d9d#1e759a4b5e370f87dc15e40756ac4f8815b61d9d"
dependencies = [
"futures-core",
"futures-io",
- "rustls 0.21.12",
- "rustls-pemfile 1.0.4",
- "webpki-roots 0.22.6",
+ "rustls 0.23.22",
+ "rustls-pemfile 2.2.0",
+ "webpki-roots",
]
[[package]]
name = "async-trait"
-version = "0.1.85"
+version = "0.1.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056"
+checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d"
dependencies = [
"proc-macro2",
"quote",
@@ -1068,7 +1067,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-sink",
"futures-util",
"memchr",
@@ -1182,9 +1181,9 @@ dependencies = [
[[package]]
name = "aws-config"
-version = "1.5.15"
+version = "1.5.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc47e70fc35d054c8fcd296d47a61711f043ac80534a10b4f741904f81e73a90"
+checksum = "50236e4d60fe8458de90a71c0922c761e41755adf091b1b03de1cef537179915"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1198,7 +1197,7 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fastrand 2.3.0",
"hex",
"http 0.2.12",
@@ -1222,11 +1221,36 @@ dependencies = [
"zeroize",
]
+[[package]]
+name = "aws-lc-rs"
+version = "1.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c2b7ddaa2c56a367ad27a094ad8ef4faacf8a617c2575acb2ba88949df999ca"
+dependencies = [
+ "aws-lc-sys",
+ "paste",
+ "zeroize",
+]
+
+[[package]]
+name = "aws-lc-sys"
+version = "0.25.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "71b2ddd3ada61a305e1d8bb6c005d1eaa7d14d903681edfc400406d523a9b491"
+dependencies = [
+ "bindgen 0.69.5",
+ "cc",
+ "cmake",
+ "dunce",
+ "fs_extra",
+ "paste",
+]
+
[[package]]
name = "aws-runtime"
-version = "1.5.4"
+version = "1.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bee7643696e7fdd74c10f9eb42848a87fe469d35eae9c3323f80aa98f350baac"
+checksum = "76dd04d39cc12844c0994f2c9c5a6f5184c22e9188ec1ff723de41910a21dcad"
dependencies = [
"aws-credential-types",
"aws-sigv4",
@@ -1237,7 +1261,7 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fastrand 2.3.0",
"http 0.2.12",
"http-body 0.4.6",
@@ -1250,9 +1274,9 @@ dependencies = [
[[package]]
name = "aws-sdk-kinesis"
-version = "1.59.0"
+version = "1.60.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7963cf7a0f49ba4f8351044751f4d42c003c4a5f31d9e084f0d0e68b6fb8b8cf"
+checksum = "9b8052335b6ba19b08ba2b363c7505f8ed34074ac23fa14a652ff6a0a02a4c06"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1263,7 +1287,7 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"http 0.2.12",
"once_cell",
"regex-lite",
@@ -1272,9 +1296,9 @@ dependencies = [
[[package]]
name = "aws-sdk-s3"
-version = "1.72.0"
+version = "1.73.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1c7ce6d85596c4bcb3aba8ad5bb134b08e204c8a475c9999c1af9290f80aa8ad"
+checksum = "3978e0a211bdc5cddecfd91fb468665a662a27fbdaef39ddf36a2a18fef12cb4"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1289,7 +1313,7 @@ dependencies = [
"aws-smithy-types",
"aws-smithy-xml",
"aws-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fastrand 2.3.0",
"hex",
"hmac",
@@ -1306,9 +1330,9 @@ dependencies = [
[[package]]
name = "aws-sdk-sso"
-version = "1.57.0"
+version = "1.58.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c54bab121fe1881a74c338c5f723d1592bf3b53167f80268a1274f404e1acc38"
+checksum = "16ff718c9ee45cc1ebd4774a0e086bb80a6ab752b4902edf1c9f56b86ee1f770"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1319,7 +1343,7 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"http 0.2.12",
"once_cell",
"regex-lite",
@@ -1328,9 +1352,9 @@ dependencies = [
[[package]]
name = "aws-sdk-ssooidc"
-version = "1.58.0"
+version = "1.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c8234fd024f7ac61c4e44ea008029bde934250f371efe7d4a39708397b1080c"
+checksum = "5183e088715cc135d8d396fdd3bc02f018f0da4c511f53cb8d795b6a31c55809"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1341,7 +1365,7 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"http 0.2.12",
"once_cell",
"regex-lite",
@@ -1350,9 +1374,9 @@ dependencies = [
[[package]]
name = "aws-sdk-sts"
-version = "1.58.0"
+version = "1.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba60e1d519d6f23a9df712c04fdeadd7872ac911c84b2f62a8bda92e129b7962"
+checksum = "c9f944ef032717596639cea4a2118a3a457268ef51bbb5fde9637e54c465da00"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1373,16 +1397,16 @@ dependencies = [
[[package]]
name = "aws-sigv4"
-version = "1.2.7"
+version = "1.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "690118821e46967b3c4501d67d7d52dd75106a9c54cf36cefa1985cedbe94e05"
+checksum = "0bc5bbd1e4a2648fd8c5982af03935972c24a2f9846b396de661d351ee3ce837"
dependencies = [
"aws-credential-types",
"aws-smithy-eventstream",
"aws-smithy-http",
"aws-smithy-runtime-api",
"aws-smithy-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"crypto-bigint 0.5.5",
"form_urlencoded",
"hex",
@@ -1419,7 +1443,7 @@ checksum = "f2f45a1c384d7a393026bc5f5c177105aa9fa68e4749653b985707ac27d77295"
dependencies = [
"aws-smithy-http",
"aws-smithy-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"crc32c",
"crc32fast",
"crc64fast-nvme",
@@ -1440,7 +1464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b18559a41e0c909b77625adf2b8c50de480a8041e5e4a3f5f7d177db70abc5a"
dependencies = [
"aws-smithy-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"crc32fast",
]
@@ -1453,7 +1477,7 @@ dependencies = [
"aws-smithy-eventstream",
"aws-smithy-runtime-api",
"aws-smithy-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"bytes-utils",
"futures-core",
"http 0.2.12",
@@ -1486,15 +1510,15 @@ dependencies = [
[[package]]
name = "aws-smithy-runtime"
-version = "1.7.7"
+version = "1.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "865f7050bbc7107a6c98a397a9fcd9413690c27fa718446967cf03b2d3ac517e"
+checksum = "d526a12d9ed61fadefda24abe2e682892ba288c2018bcb38b1b4c111d13f6d92"
dependencies = [
"aws-smithy-async",
"aws-smithy-http",
"aws-smithy-runtime-api",
"aws-smithy-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fastrand 2.3.0",
"h2 0.3.26",
"http 0.2.12",
@@ -1519,7 +1543,7 @@ checksum = "92165296a47a812b267b4f41032ff8069ab7ff783696d217f0994a0d7ab585cd"
dependencies = [
"aws-smithy-async",
"aws-smithy-types",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"http 0.2.12",
"http 1.2.0",
"pin-project-lite",
@@ -1530,12 +1554,12 @@ dependencies = [
[[package]]
name = "aws-smithy-types"
-version = "1.2.12"
+version = "1.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a28f6feb647fb5e0d5b50f0472c19a7db9462b74e2fec01bb0b44eedcc834e97"
+checksum = "c7b8a53819e42f10d0821f56da995e1470b199686a1809168db6ca485665f042"
dependencies = [
"base64-simd",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"bytes-utils",
"futures-core",
"http 0.2.12",
@@ -1565,9 +1589,9 @@ dependencies = [
[[package]]
name = "aws-types"
-version = "1.3.4"
+version = "1.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b0df5a18c4f951c645300d365fec53a61418bcf4650f604f85fe2a665bfaa0c2"
+checksum = "dfbd0a668309ec1f66c0f6bda4840dd6d4796ae26d699ebc266d7cc95c6d040f"
dependencies = [
"aws-credential-types",
"aws-smithy-async",
@@ -1587,7 +1611,7 @@ dependencies = [
"axum-core",
"base64 0.21.7",
"bitflags 1.3.2",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-util",
"headers",
"http 0.2.12",
@@ -1620,7 +1644,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c"
dependencies = [
"async-trait",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-util",
"http 0.2.12",
"http-body 0.4.6",
@@ -1637,7 +1661,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9a320103719de37b7b4da4c8eb629d4573f6bcfd3dfe80d3208806895ccf81d"
dependencies = [
"axum",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-util",
"http 0.2.12",
"mime",
@@ -1729,6 +1753,29 @@ dependencies = [
"serde",
]
+[[package]]
+name = "bindgen"
+version = "0.69.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088"
+dependencies = [
+ "bitflags 2.8.0",
+ "cexpr",
+ "clang-sys",
+ "itertools 0.12.1",
+ "lazy_static",
+ "lazycell",
+ "log",
+ "prettyplease",
+ "proc-macro2",
+ "quote",
+ "regex",
+ "rustc-hash 1.1.0",
+ "shlex",
+ "syn 2.0.90",
+ "which 4.4.2",
+]
+
[[package]]
name = "bindgen"
version = "0.70.1"
@@ -2061,9 +2108,9 @@ dependencies = [
[[package]]
name = "bytes"
-version = "1.9.0"
+version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
+checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9"
[[package]]
name = "bytes-utils"
@@ -2071,7 +2118,7 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"either",
]
@@ -2264,7 +2311,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fbd1fe9db3ebf71b89060adaf7b0504c2d6a425cf061313099547e382c2e472"
dependencies = [
"serde",
- "toml 0.8.19",
+ "toml 0.8.20",
]
[[package]]
@@ -2298,7 +2345,7 @@ dependencies = [
"serde_json",
"syn 2.0.90",
"tempfile",
- "toml 0.8.19",
+ "toml 0.8.20",
]
[[package]]
@@ -2316,7 +2363,7 @@ dependencies = [
"serde_json",
"syn 2.0.90",
"tempfile",
- "toml 0.8.19",
+ "toml 0.8.20",
]
[[package]]
@@ -2468,9 +2515,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.5.23"
+version = "4.5.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
+checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff"
dependencies = [
"clap_builder",
"clap_derive",
@@ -2478,9 +2525,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.5.23"
+version = "4.5.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
+checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7"
dependencies = [
"anstream",
"anstyle",
@@ -2500,9 +2547,9 @@ dependencies = [
[[package]]
name = "clap_derive"
-version = "4.5.18"
+version = "4.5.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
+checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed"
dependencies = [
"heck 0.5.0",
"proc-macro2",
@@ -2560,7 +2607,7 @@ dependencies = [
"rand 0.8.5",
"release_channel",
"rpc",
- "rustls 0.21.12",
+ "rustls 0.23.22",
"rustls-native-certs 0.8.1",
"schemars",
"serde",
@@ -2590,6 +2637,15 @@ dependencies = [
"smallvec",
]
+[[package]]
+name = "cmake"
+version = "0.1.53"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e24a03c8b52922d68a1589ad61032f2c1aa5a8158d2aa0d93c6e9534944bbad6"
+dependencies = [
+ "cc",
+]
+
[[package]]
name = "cobs"
version = "0.2.3"
@@ -2697,6 +2753,7 @@ dependencies = [
"ctor",
"dashmap 6.1.0",
"derive_more",
+ "diff 0.1.0",
"editor",
"env_logger 0.11.6",
"envy",
@@ -2761,7 +2818,7 @@ dependencies = [
"thiserror 1.0.69",
"time",
"tokio",
- "toml 0.8.19",
+ "toml 0.8.20",
"tower",
"tower-http 0.4.4",
"tracing",
@@ -2822,7 +2879,7 @@ name = "collections"
version = "0.1.0"
dependencies = [
"indexmap",
- "rustc-hash 2.1.0",
+ "rustc-hash 2.1.1",
]
[[package]]
@@ -2843,7 +2900,7 @@ version = "4.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"memchr",
]
@@ -3162,7 +3219,7 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ce857aa0b77d77287acc1ac3e37a05a8c95a2af3647d23b15f263bdaeb7562b"
dependencies = [
- "bindgen",
+ "bindgen 0.70.1",
]
[[package]]
@@ -3719,9 +3776,9 @@ dependencies = [
[[package]]
name = "derive_more"
-version = "0.99.18"
+version = "0.99.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce"
+checksum = "3da29a38df43d6f156149c9b43ded5e018ddff2a855cf2cfd62e8cd7d079c69f"
dependencies = [
"convert_case 0.4.0",
"proc-macro2",
@@ -3781,6 +3838,24 @@ dependencies = [
"zeroize",
]
+[[package]]
+name = "diff"
+version = "0.1.0"
+dependencies = [
+ "futures 0.3.31",
+ "git2",
+ "gpui",
+ "language",
+ "log",
+ "pretty_assertions",
+ "rope",
+ "serde_json",
+ "sum_tree",
+ "text",
+ "unindent",
+ "util",
+]
+
[[package]]
name = "diff"
version = "0.1.13"
@@ -3951,6 +4026,7 @@ dependencies = [
"convert_case 0.7.1",
"ctor",
"db",
+ "diff 0.1.0",
"emojis",
"env_logger 0.11.6",
"file_icons",
@@ -4056,7 +4132,7 @@ dependencies = [
"cc",
"memchr",
"rustc_version",
- "toml 0.8.19",
+ "toml 0.8.20",
"vswhom",
"winreg 0.52.0",
]
@@ -4347,7 +4423,7 @@ dependencies = [
"semantic_version",
"serde",
"serde_json",
- "toml 0.8.19",
+ "toml 0.8.20",
"util",
"wasm-encoder 0.215.0",
"wasmparser 0.215.0",
@@ -4371,7 +4447,7 @@ dependencies = [
"serde_json",
"theme",
"tokio",
- "toml 0.8.19",
+ "toml 0.8.20",
"tree-sitter",
"wasmtime",
]
@@ -4416,7 +4492,7 @@ dependencies = [
"tempfile",
"theme",
"theme_extension",
- "toml 0.8.19",
+ "toml 0.8.20",
"url",
"util",
"wasmparser 0.215.0",
@@ -4434,6 +4510,7 @@ dependencies = [
"db",
"editor",
"extension_host",
+ "feature_flags",
"fs",
"fuzzy",
"gpui",
@@ -4879,6 +4956,12 @@ dependencies = [
"winapi",
]
+[[package]]
+name = "fs_extra"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
+
[[package]]
name = "fsevent"
version = "0.1.0"
@@ -5243,6 +5326,7 @@ dependencies = [
"anyhow",
"collections",
"db",
+ "diff 0.1.0",
"editor",
"feature_flags",
"futures 0.3.31",
@@ -5251,10 +5335,10 @@ dependencies = [
"language",
"menu",
"multi_buffer",
+ "panel",
"picker",
"postage",
"project",
- "rpc",
"schemars",
"serde",
"serde_derive",
@@ -5387,7 +5471,7 @@ dependencies = [
"ashpd",
"async-task",
"backtrace",
- "bindgen",
+ "bindgen 0.70.1",
"blade-graphics",
"blade-macros",
"blade-util",
@@ -5511,7 +5595,7 @@ version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fnv",
"futures-core",
"futures-sink",
@@ -5531,7 +5615,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e"
dependencies = [
"atomic-waker",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fnv",
"futures-core",
"futures-sink",
@@ -5647,7 +5731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270"
dependencies = [
"base64 0.21.7",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"headers-core",
"http 0.2.12",
"httpdate",
@@ -5826,7 +5910,7 @@ version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fnv",
"itoa",
]
@@ -5837,7 +5921,7 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"fnv",
"itoa",
]
@@ -5848,7 +5932,7 @@ version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"http 0.2.12",
"pin-project-lite",
]
@@ -5859,7 +5943,7 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"http 1.2.0",
]
@@ -5869,7 +5953,7 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
@@ -5908,7 +5992,7 @@ name = "http_client"
version = "0.1.0"
dependencies = [
"anyhow",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"derive_more",
"futures 0.3.31",
"http 1.2.0",
@@ -5948,7 +6032,7 @@ version = "0.14.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-channel",
"futures-core",
"futures-util",
@@ -5972,7 +6056,7 @@ version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-channel",
"futures-util",
"h2 0.4.7",
@@ -6012,7 +6096,7 @@ dependencies = [
"http 1.2.0",
"hyper 1.5.1",
"hyper-util",
- "rustls 0.23.20",
+ "rustls 0.23.22",
"rustls-native-certs 0.8.1",
"rustls-pki-types",
"tokio",
@@ -6026,7 +6110,7 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"hyper 0.14.32",
"native-tls",
"tokio",
@@ -6039,7 +6123,7 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"futures-channel",
"futures-util",
"http 1.2.0",
@@ -6279,6 +6363,8 @@ dependencies = [
"file_icons",
"gpui",
"project",
+ "schemars",
+ "serde",
"settings",
"theme",
"ui",
@@ -6365,6 +6451,7 @@ version = "0.1.0"
dependencies = [
"gpui",
"language",
+ "project",
]
[[package]]
@@ -6385,9 +6472,11 @@ dependencies = [
"lsp",
"paths",
"project",
+ "regex",
"serde_json",
"settings",
"supermaven",
+ "telemetry",
"theme",
"ui",
"workspace",
@@ -6689,7 +6778,7 @@ checksum = "c9ae6296f9476658b3550293c113996daf75fa542cd8d078abb4c60207bded14"
dependencies = [
"anyhow",
"async-trait",
- "bytes 1.9.0",
+ "bytes 1.10.0",
"chrono",
"futures 0.3.31",
"serde",
@@ -7000,13 +7089,14 @@ dependencies = [
"task",
"text",
"theme",
- "toml 0.8.19",
+ "toml 0.8.20",
"tree-sitter",
"tree-sitter-bash",
"tree-sitter-c",
"tree-sitter-cpp",
"tree-sitter-css",
"tree-sitter-diff",
+ "tree-sitter-gitcommit",
"tree-sitter-go",
"tree-sitter-gomod",
"tree-sitter-gowork",
@@ -7032,6 +7122,12 @@ dependencies = [
"spin",
]
+[[package]]
+name = "lazycell"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
+
[[package]]
name = "leb128"
version = "0.2.5"
@@ -7088,7 +7184,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
- "windows-targets 0.52.6",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -7424,7 +7520,7 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.95.1"
-source = "git+https://github.com/zed-industries/lsp-types?rev=72357d6f6d212bdffba3b5ef4b31d8ca856058e7#72357d6f6d212bdffba3b5ef4b31d8ca856058e7"
+source = "git+https://github.com/zed-industries/lsp-types?rev=1fff0dd12e2071c5667327394cfec163d2a466ab#1fff0dd12e2071c5667327394cfec163d2a466ab"
dependencies = [
"bitflags 1.3.2",
"serde",
@@ -7676,7 +7772,7 @@ name = "media"
version = "0.1.0"
dependencies = [
"anyhow",
- "bindgen",
+ "bindgen 0.70.1",
"core-foundation 0.9.4",
"ctor",
"foreign-types 0.5.0",
@@ -7740,6 +7836,17 @@ dependencies = [
"paste",
]
+[[package]]
+name = "migrator"
+version = "0.1.0"
+dependencies = [
+ "collections",
+ "convert_case 0.7.1",
+ "pretty_assertions",
+ "tree-sitter",
+ "tree-sitter-json",
+]
+
[[package]]
name = "mimalloc"
version = "0.1.43"
@@ -7837,9 +7944,9 @@ dependencies = [
"clock",
"collections",
"ctor",
+ "diff 0.1.0",
"env_logger 0.11.6",
"futures 0.3.31",
- "git",
"gpui",
"indoc",
"itertools 0.14.0",
@@ -8664,9 +8771,9 @@ dependencies = [
[[package]]
name = "openssl"
-version = "0.10.68"
+version = "0.10.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5"
+checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
dependencies = [
"bitflags 2.8.0",
"cfg-if",
@@ -8705,9 +8812,9 @@ dependencies = [
[[package]]
name = "openssl-sys"
-version = "0.9.104"
+version = "0.9.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741"
+checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc"
dependencies = [
"cc",
"libc",
@@ -8889,6 +8996,15 @@ dependencies = [
"syn 2.0.90",
]
+[[package]]
+name = "panel"
+version = "0.1.0"
+dependencies = [
+ "gpui",
+ "ui",
+ "workspace",
+]
+
[[package]]
name = "parity-tokio-ipc"
version = "0.9.0"
@@ -9021,7 +9137,7 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18f596653ba4ac51bdecbb4ef6773bc7f56042dc13927910de1684ad3d32aa12"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"chrono",
"pbjson",
"pbjson-build",
@@ -9365,7 +9481,7 @@ dependencies = [
"serde",
"serde_json",
"sha2",
- "toml 0.8.19",
+ "toml 0.8.20",
]
[[package]]
@@ -9837,7 +9953,7 @@ version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d"
dependencies = [
- "diff",
+ "diff 0.1.13",
"yansi",
]
@@ -9933,6 +10049,7 @@ dependencies = [
"client",
"clock",
"collections",
+ "diff 0.1.0",
"env_logger 0.11.6",
"fancy-regex 0.14.0",
"fs",
@@ -9975,7 +10092,7 @@ dependencies = [
"tempfile",
"terminal",
"text",
- "toml 0.8.19",
+ "toml 0.8.20",
"unindent",
"url",
"util",
@@ -10093,7 +10210,7 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"prost-derive 0.9.0",
]
@@ -10103,7 +10220,7 @@ version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29"
dependencies = [
- "bytes 1.9.0",
+ "bytes 1.10.0",
"prost-derive 0.12.6",
]
@@ -32,6 +32,7 @@ members = [
"crates/db",
"crates/deepseek",
"crates/diagnostics",
+ "crates/diff",
"crates/docs_preprocessor",
"crates/editor",
"crates/evals",
@@ -80,6 +81,7 @@ members = [
"crates/markdown_preview",
"crates/media",
"crates/menu",
+ "crates/migrator",
"crates/multi_buffer",
"crates/node_runtime",
"crates/notifications",
@@ -87,6 +89,7 @@ members = [
"crates/open_ai",
"crates/outline",
"crates/outline_panel",
+ "crates/panel",
"crates/paths",
"crates/picker",
"crates/prettier",
@@ -103,7 +106,6 @@ members = [
"crates/remote_server",
"crates/repl",
"crates/reqwest_client",
- "crates/reqwest_client",
"crates/rich_text",
"crates/rope",
"crates/rpc",
@@ -144,6 +146,7 @@ members = [
"crates/ui_input",
"crates/ui_macros",
"crates/util",
+ "crates/util_macros",
"crates/vcs_menu",
"crates/vim",
"crates/vim_mode_setting",
@@ -230,6 +233,7 @@ copilot = { path = "crates/copilot" }
db = { path = "crates/db" }
deepseek = { path = "crates/deepseek" }
diagnostics = { path = "crates/diagnostics" }
+diff = { path = "crates/diff" }
editor = { path = "crates/editor" }
extension = { path = "crates/extension" }
extension_host = { path = "crates/extension_host" }
@@ -242,8 +246,8 @@ fs = { path = "crates/fs" }
fsevent = { path = "crates/fsevent" }
fuzzy = { path = "crates/fuzzy" }
git = { path = "crates/git" }
-git_ui = { path = "crates/git_ui" }
git_hosting_providers = { path = "crates/git_hosting_providers" }
+git_ui = { path = "crates/git_ui" }
go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui", default-features = false, features = [
@@ -276,6 +280,7 @@ markdown = { path = "crates/markdown" }
markdown_preview = { path = "crates/markdown_preview" }
media = { path = "crates/media" }
menu = { path = "crates/menu" }
+migrator = { path = "crates/migrator" }
multi_buffer = { path = "crates/multi_buffer" }
node_runtime = { path = "crates/node_runtime" }
notifications = { path = "crates/notifications" }
@@ -284,6 +289,7 @@ open_ai = { path = "crates/open_ai" }
outline = { path = "crates/outline" }
outline_panel = { path = "crates/outline_panel" }
paths = { path = "crates/paths" }
+panel = { path = "crates/panel" }
picker = { path = "crates/picker" }
plugin = { path = "crates/plugin" }
plugin_macros = { path = "crates/plugin_macros" }
@@ -339,6 +345,7 @@ ui = { path = "crates/ui" }
ui_input = { path = "crates/ui_input" }
ui_macros = { path = "crates/ui_macros" }
util = { path = "crates/util" }
+util_macros = { path = "crates/util_macros" }
vcs_menu = { path = "crates/vcs_menu" }
vim = { path = "crates/vim" }
vim_mode_setting = { path = "crates/vim_mode_setting" }
@@ -359,7 +366,7 @@ alacritty_terminal = { git = "https://github.com/alacritty/alacritty.git", rev =
any_vec = "0.14"
anyhow = "1.0.86"
arrayvec = { version = "0.7.4", features = ["serde"] }
-ashpd = { version = "0.10", default-features = false, features = ["async-std"]}
+ashpd = { version = "0.10", default-features = false, features = ["async-std"] }
async-compat = "0.2.1"
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-dispatcher = "0.1"
@@ -421,7 +428,11 @@ jupyter-websocket-client = { version = "0.9.0" }
libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0"
-livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev="811ceae29fabee455f110c56cd66b3f49a7e5003", features = ["dispatcher", "services-dispatcher", "rustls-tls-native-roots"], default-features = false }
+livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "811ceae29fabee455f110c56cd66b3f49a7e5003", features = [
+ "dispatcher",
+ "services-dispatcher",
+ "rustls-tls-native-roots",
+], default-features = false }
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
markup5ever_rcdom = "0.3.0"
nanoid = "0.4"
@@ -441,11 +452,13 @@ pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git"
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
postage = { version = "0.5", features = ["futures-traits"] }
pretty_assertions = { version = "1.3.0", features = ["unstable"] }
+proc-macro2 = "1.0.93"
profiling = "1"
prost = "0.9"
prost-build = "0.9"
prost-types = "0.9"
pulldown-cmark = { version = "0.12.0", default-features = false }
+quote = "1.0.9"
rand = "0.8.5"
rayon = "1.8"
regex = "1.5"
@@ -465,7 +478,7 @@ runtimelib = { version = "0.25.0", default-features = false, features = [
rustc-demangle = "0.1.23"
rust-embed = { version = "8.4", features = ["include-exclude"] }
rustc-hash = "2.1.0"
-rustls = "0.21.12"
+rustls = { version = "0.23.22" }
rustls-native-certs = "0.8.0"
schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] }
semver = "1.0"
@@ -489,6 +502,7 @@ sqlformat = "0.2"
strsim = "0.11"
strum = { version = "0.26.0", features = ["derive"] }
subtle = "2.5.0"
+syn = { version = "1.0.72", features = ["full", "extra-traits"] }
sys-locale = "0.3.1"
sysinfo = "0.31.0"
take-until = "0.2.0"
@@ -513,6 +527,7 @@ tree-sitter-cpp = "0.23"
tree-sitter-css = "0.23"
tree-sitter-elixir = "0.3"
tree-sitter-embedded-template = "0.23.0"
+tree-sitter-gitcommit = {git = "https://github.com/zed-industries/tree-sitter-git-commit", rev = "88309716a69dd13ab83443721ba6e0b491d37ee9"}
tree-sitter-go = "0.23"
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c", package = "tree-sitter-gomod" }
tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" }
@@ -546,6 +561,7 @@ wasmtime = { version = "24", default-features = false, features = [
wasmtime-wasi = "24"
which = "6.0.0"
wit-component = "0.201"
+zed_llm_client = "0.2"
zstd = "0.11"
metal = "0.31"
@@ -606,6 +622,7 @@ features = [
# TODO livekit https://github.com/RustAudio/cpal/pull/891
[patch.crates-io]
cpal = { git = "https://github.com/zed-industries/cpal", rev = "fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50" }
+real-async-tls = { git = "https://github.com/zed-industries/async-tls", rev = "1e759a4b5e370f87dc15e40756ac4f8815b61d9d", package = "async-tls"}
[profile.dev]
split-debuginfo = "unpacked"
@@ -0,0 +1 @@
+<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><circle cx="7.25" cy="7.25" r="3" fill="currentColor"></circle></svg>
@@ -86,8 +86,8 @@
"hpp": "cpp",
"hrl": "erlang",
"hs": "haskell",
- "htm": "template",
- "html": "template",
+ "htm": "html",
+ "html": "html",
"hxx": "cpp",
"ib": "storage",
"ico": "image",
@@ -101,7 +101,7 @@
"jpeg": "image",
"jpg": "image",
"js": "javascript",
- "json": "storage",
+ "json": "json",
"jsonc": "storage",
"jsx": "react",
"jxl": "image",
@@ -0,0 +1,6 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path opacity="0.6" fill-rule="evenodd" clip-rule="evenodd" d="M6.75 9.31247L8.25 10.5576V11.75H1.75V10.0803L4.49751 7.44273L5.65909 8.40693L3.73923 10.25H6.75V9.31247ZM8.25 5.85739V4.25H6.31358L8.25 5.85739ZM1.75 5.16209V7.1H3.25V6.4072L1.75 5.16209Z" fill="black"/>
+<path opacity="0.6" fill-rule="evenodd" clip-rule="evenodd" d="M10.9624 9.40853L11.9014 8L10.6241 6.08397L9.37598 6.91603L10.0986 8L9.80184 8.44518L10.9624 9.40853Z" fill="black"/>
+<path opacity="0.6" fill-rule="evenodd" clip-rule="evenodd" d="M12.8936 11.0116L14.9014 8L12.6241 4.58397L11.376 5.41603L13.0986 8L11.7331 10.0483L12.8936 11.0116Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M14.1225 13.809C14.0341 13.9146 13.877 13.9289 13.7711 13.8409L1.19311 3.40021C1.08659 3.31178 1.07221 3.15362 1.16104 3.04743L1.87752 2.19101C1.96588 2.0854 2.123 2.07112 2.22895 2.15906L14.8069 12.5998C14.9134 12.6882 14.9278 12.8464 14.839 12.9526L14.1225 13.809Z" fill="black"/>
+</svg>
@@ -32,7 +32,7 @@
"ctrl-q": "zed::Quit",
"f11": "zed::ToggleFullScreen",
"ctrl-alt-z": "zeta::RateCompletions",
- "ctrl-shift-i": "inline_completion::ToggleMenu"
+ "ctrl-shift-i": "edit_prediction::ToggleMenu"
}
},
{
@@ -145,17 +145,17 @@
}
},
{
- "context": "Editor && mode == full && inline_completion",
+ "context": "Editor && mode == full && edit_prediction",
"bindings": {
- "alt-]": "editor::NextInlineCompletion",
- "alt-[": "editor::PreviousInlineCompletion",
- "alt-right": "editor::AcceptPartialInlineCompletion"
+ "alt-]": "editor::NextEditPrediction",
+ "alt-[": "editor::PreviousEditPrediction",
+ "alt-right": "editor::AcceptPartialEditPrediction"
}
},
{
- "context": "Editor && !inline_completion",
+ "context": "Editor && !edit_prediction",
"bindings": {
- "alt-\\": "editor::ShowInlineCompletion"
+ "alt-\\": "editor::ShowEditPrediction"
}
},
{
@@ -203,8 +203,8 @@
"enter": "search::SelectNextMatch",
"shift-enter": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches",
- "ctrl-f": "search::FocusSearch",
"find": "search::FocusSearch",
+ "ctrl-f": "search::FocusSearch",
"ctrl-h": "search::ToggleReplace",
"ctrl-l": "search::ToggleSelection"
}
@@ -274,8 +274,8 @@
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-shift-pageup": "pane::SwapItemLeft",
"ctrl-shift-pagedown": "pane::SwapItemRight",
- "ctrl-f4": "pane::CloseActiveItem",
- "ctrl-w": "pane::CloseActiveItem",
+ "ctrl-f4": ["pane::CloseActiveItem", { "close_pinned": false }],
+ "ctrl-w": ["pane::CloseActiveItem", { "close_pinned": false }],
"alt-ctrl-t": ["pane::CloseInactiveItems", { "close_pinned": false }],
"alt-ctrl-shift-w": "workspace::CloseInactiveTabsAndPanes",
"ctrl-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }],
@@ -290,15 +290,15 @@
"f3": "search::SelectNextMatch",
"ctrl-alt-shift-g": "search::SelectPrevMatch",
"shift-f3": "search::SelectPrevMatch",
- "ctrl-shift-f": "project_search::ToggleFocus",
"shift-find": "project_search::ToggleFocus",
+ "ctrl-shift-f": "project_search::ToggleFocus",
"ctrl-alt-shift-h": "search::ToggleReplace",
"ctrl-alt-shift-l": "search::ToggleSelection",
"alt-enter": "search::SelectAllMatches",
"alt-c": "search::ToggleCaseSensitive",
"alt-w": "search::ToggleWholeWord",
- "alt-ctrl-f": "project_search::ToggleFilters",
"alt-find": "project_search::ToggleFilters",
+ "alt-ctrl-f": "project_search::ToggleFilters",
"ctrl-alt-shift-r": "search::ToggleRegex",
"ctrl-alt-shift-x": "search::ToggleRegex",
"alt-r": "search::ToggleRegex",
@@ -348,15 +348,15 @@
"ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
- "ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
- "ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
- "ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
- "ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
- "ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
- "ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
- "ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
- "ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
- "ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
+ "ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
+ "ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
+ "ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
+ "ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
+ "ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
+ "ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
+ "ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
+ "ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
+ "ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
"ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -432,14 +432,14 @@
"ctrl-alt-s": "workspace::SaveAll",
"ctrl-k m": "language_selector::Toggle",
"escape": "workspace::Unfollow",
- "ctrl-k ctrl-left": ["workspace::ActivatePaneInDirection", "Left"],
- "ctrl-k ctrl-right": ["workspace::ActivatePaneInDirection", "Right"],
- "ctrl-k ctrl-up": ["workspace::ActivatePaneInDirection", "Up"],
- "ctrl-k ctrl-down": ["workspace::ActivatePaneInDirection", "Down"],
- "ctrl-k shift-left": ["workspace::SwapPaneInDirection", "Left"],
- "ctrl-k shift-right": ["workspace::SwapPaneInDirection", "Right"],
- "ctrl-k shift-up": ["workspace::SwapPaneInDirection", "Up"],
- "ctrl-k shift-down": ["workspace::SwapPaneInDirection", "Down"],
+ "ctrl-k ctrl-left": "workspace::ActivatePaneLeft",
+ "ctrl-k ctrl-right": "workspace::ActivatePaneRight",
+ "ctrl-k ctrl-up": "workspace::ActivatePaneUp",
+ "ctrl-k ctrl-down": "workspace::ActivatePaneDown",
+ "ctrl-k shift-left": "workspace::SwapPaneLeft",
+ "ctrl-k shift-right": "workspace::SwapPaneRight",
+ "ctrl-k shift-up": "workspace::SwapPaneUp",
+ "ctrl-k shift-down": "workspace::SwapPaneDown",
"ctrl-shift-x": "zed::Extensions",
"ctrl-shift-r": "task::Rerun",
"ctrl-alt-r": "task::Rerun",
@@ -453,8 +453,8 @@
{
"context": "ApplicationMenu",
"bindings": {
- "left": ["app_menu::NavigateApplicationMenuInDirection", "Left"],
- "right": ["app_menu::NavigateApplicationMenuInDirection", "Right"]
+ "left": "app_menu::ActivateMenuLeft",
+ "right": "app_menu::ActivateMenuRight"
}
},
// Bindings from Sublime Text
@@ -496,24 +496,22 @@
},
{
"context": "Editor && showing_completions",
- "use_key_equivalents": true,
"bindings": {
"enter": "editor::ConfirmCompletion",
"tab": "editor::ComposeCompletion"
}
},
{
- "context": "Editor && inline_completion && !showing_completions",
- "use_key_equivalents": true,
+ "context": "Editor && edit_prediction",
"bindings": {
- "tab": "editor::AcceptInlineCompletion"
+ // Changing the modifier currently breaks accepting while you also an LSP completions menu open
+ "alt-enter": "editor::AcceptEditPrediction"
}
},
{
- "context": "Editor && inline_completion && showing_completions",
+ "context": "Editor && edit_prediction && !edit_prediction_requires_modifier",
"bindings": {
- // Currently, changing this binding breaks the preview behavior
- "alt-enter": "editor::AcceptInlineCompletion"
+ "tab": "editor::AcceptEditPrediction"
}
},
{
@@ -602,14 +600,12 @@
},
{
"context": "MessageEditor > Editor",
- "use_key_equivalents": true,
"bindings": {
"enter": "assistant2::Chat"
}
},
{
"context": "ContextStrip",
- "use_key_equivalents": true,
"bindings": {
"up": "assistant2::FocusUp",
"right": "assistant2::FocusRight",
@@ -687,8 +683,8 @@
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }],
"alt-ctrl-r": "project_panel::RevealInFileManager",
"ctrl-shift-enter": "project_panel::OpenWithSystem",
- "ctrl-shift-f": "project_panel::NewSearchInDirectory",
"shift-find": "project_panel::NewSearchInDirectory",
+ "ctrl-shift-f": "project_panel::NewSearchInDirectory",
"shift-down": "menu::SelectNext",
"shift-up": "menu::SelectPrev",
"escape": "menu::Cancel"
@@ -702,30 +698,32 @@
},
{
"context": "GitPanel && !CommitEditor",
- "use_key_equivalents": true,
"bindings": {
"escape": "git_panel::Close"
}
},
{
"context": "GitPanel && ChangesList",
- "use_key_equivalents": true,
"bindings": {
"up": "menu::SelectPrev",
"down": "menu::SelectNext",
"enter": "menu::Confirm",
"space": "git::ToggleStaged",
"ctrl-space": "git::StageAll",
- "ctrl-shift-space": "git::UnstageAll"
+ "ctrl-shift-space": "git::UnstageAll",
+ "tab": "git_panel::FocusEditor",
+ "shift-tab": "git_panel::FocusEditor",
+ "escape": "git_panel::ToggleFocus"
}
},
{
- "context": "GitPanel && CommitEditor > Editor",
- "use_key_equivalents": true,
+ "context": "GitPanel > Editor",
"bindings": {
"escape": "git_panel::FocusChanges",
- "ctrl-enter": "git::CommitChanges",
- "ctrl-shift-enter": "git::CommitAllChanges"
+ "ctrl-enter": "git::Commit",
+ "tab": "git_panel::FocusChanges",
+ "shift-tab": "git_panel::FocusChanges",
+ "alt-up": "git_panel::FocusChanges"
}
},
{
@@ -833,7 +831,6 @@
},
{
"context": "ZedPredictModal",
- "use_key_equivalents": true,
"bindings": {
"escape": "menu::Cancel"
}
@@ -40,7 +40,7 @@
"fn-f": "zed::ToggleFullScreen",
"ctrl-cmd-f": "zed::ToggleFullScreen",
"ctrl-shift-z": "zeta::RateCompletions",
- "ctrl-shift-i": "inline_completion::ToggleMenu"
+ "ctrl-shift-i": "edit_prediction::ToggleMenu"
}
},
{
@@ -155,19 +155,19 @@
}
},
{
- "context": "Editor && mode == full && inline_completion",
+ "context": "Editor && mode == full && edit_prediction",
"use_key_equivalents": true,
"bindings": {
- "alt-tab": "editor::NextInlineCompletion",
- "alt-shift-tab": "editor::PreviousInlineCompletion",
- "ctrl-cmd-right": "editor::AcceptPartialInlineCompletion"
+ "alt-tab": "editor::NextEditPrediction",
+ "alt-shift-tab": "editor::PreviousEditPrediction",
+ "ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
}
},
{
- "context": "Editor && !inline_completion",
+ "context": "Editor && !edit_prediction",
"use_key_equivalents": true,
"bindings": {
- "alt-tab": "editor::ShowInlineCompletion"
+ "alt-tab": "editor::ShowEditPrediction"
}
},
{
@@ -349,7 +349,7 @@
"cmd-}": "pane::ActivateNextItem",
"ctrl-shift-pageup": "pane::SwapItemLeft",
"ctrl-shift-pagedown": "pane::SwapItemRight",
- "cmd-w": "pane::CloseActiveItem",
+ "cmd-w": ["pane::CloseActiveItem", { "close_pinned": false }],
"alt-cmd-t": ["pane::CloseInactiveItems", { "close_pinned": false }],
"ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes",
"cmd-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }],
@@ -413,15 +413,15 @@
"cmd-k cmd-l": "editor::ToggleFold",
"cmd-k cmd-[": "editor::FoldRecursive",
"cmd-k cmd-]": "editor::UnfoldRecursive",
- "cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
- "cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
- "cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
- "cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
- "cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
- "cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
- "cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
- "cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
- "cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
+ "cmd-k cmd-1": ["editor::FoldAtLevel", 1],
+ "cmd-k cmd-2": ["editor::FoldAtLevel", 2],
+ "cmd-k cmd-3": ["editor::FoldAtLevel", 3],
+ "cmd-k cmd-4": ["editor::FoldAtLevel", 4],
+ "cmd-k cmd-5": ["editor::FoldAtLevel", 5],
+ "cmd-k cmd-6": ["editor::FoldAtLevel", 6],
+ "cmd-k cmd-7": ["editor::FoldAtLevel", 7],
+ "cmd-k cmd-8": ["editor::FoldAtLevel", 8],
+ "cmd-k cmd-9": ["editor::FoldAtLevel", 9],
"cmd-k cmd-0": "editor::FoldAll",
"cmd-k cmd-j": "editor::UnfoldAll",
// Using `ctrl-space` in Zed requires disabling the macOS global shortcut.
@@ -509,14 +509,14 @@
"cmd-alt-s": "workspace::SaveAll",
"cmd-k m": "language_selector::Toggle",
"escape": "workspace::Unfollow",
- "cmd-k cmd-left": ["workspace::ActivatePaneInDirection", "Left"],
- "cmd-k cmd-right": ["workspace::ActivatePaneInDirection", "Right"],
- "cmd-k cmd-up": ["workspace::ActivatePaneInDirection", "Up"],
- "cmd-k cmd-down": ["workspace::ActivatePaneInDirection", "Down"],
- "cmd-k shift-left": ["workspace::SwapPaneInDirection", "Left"],
- "cmd-k shift-right": ["workspace::SwapPaneInDirection", "Right"],
- "cmd-k shift-up": ["workspace::SwapPaneInDirection", "Up"],
- "cmd-k shift-down": ["workspace::SwapPaneInDirection", "Down"],
+ "cmd-k cmd-left": "workspace::ActivatePaneLeft",
+ "cmd-k cmd-right": "workspace::ActivatePaneRight",
+ "cmd-k cmd-up": "workspace::ActivatePaneUp",
+ "cmd-k cmd-down": "workspace::ActivatePaneDown",
+ "cmd-k shift-left": "workspace::SwapPaneLeft",
+ "cmd-k shift-right": "workspace::SwapPaneRight",
+ "cmd-k shift-up": "workspace::SwapPaneUp",
+ "cmd-k shift-down": "workspace::SwapPaneDown",
"cmd-shift-x": "zed::Extensions"
}
},
@@ -580,17 +580,17 @@
}
},
{
- "context": "Editor && inline_completion && !showing_completions",
- "use_key_equivalents": true,
+ "context": "Editor && edit_prediction",
"bindings": {
- "tab": "editor::AcceptInlineCompletion"
+ // Changing the modifier currently breaks accepting while you also an LSP completions menu open
+ "alt-tab": "editor::AcceptEditPrediction"
}
},
{
- "context": "Editor && inline_completion && showing_completions",
+ "context": "Editor && edit_prediction && !edit_prediction_requires_modifier",
+ "use_key_equivalents": true,
"bindings": {
- // Currently, changing this binding breaks the preview behavior
- "alt-tab": "editor::AcceptInlineCompletion"
+ "tab": "editor::AcceptEditPrediction"
}
},
{
@@ -715,13 +715,6 @@
"space": "project_panel::Open"
}
},
- {
- "context": "GitPanel && !CommitEditor",
- "use_key_equivalents": true,
- "bindings": {
- "escape": "git_panel::Close"
- }
- },
{
"context": "GitPanel && ChangesList",
"use_key_equivalents": true,
@@ -734,17 +727,20 @@
"space": "git::ToggleStaged",
"cmd-shift-space": "git::StageAll",
"ctrl-shift-space": "git::UnstageAll",
- "alt-down": "git_panel::FocusEditor"
+ "alt-down": "git_panel::FocusEditor",
+ "tab": "git_panel::FocusEditor",
+ "shift-tab": "git_panel::FocusEditor",
+ "escape": "git_panel::ToggleFocus"
}
},
{
- "context": "GitPanel && CommitEditor > Editor",
+ "context": "GitPanel > Editor",
"use_key_equivalents": true,
"bindings": {
- "alt-up": "git_panel::FocusChanges",
- "escape": "git_panel::FocusChanges",
- "cmd-enter": "git::CommitChanges",
- "cmd-alt-enter": "git::CommitAllChanges"
+ "cmd-enter": "git::Commit",
+ "tab": "git_panel::FocusChanges",
+ "shift-tab": "git_panel::FocusChanges",
+ "alt-up": "git_panel::FocusChanges"
}
},
{
@@ -2,8 +2,8 @@
{
"context": "VimControl && !menu",
"bindings": {
- "i": ["vim::PushOperator", { "Object": { "around": false } }],
- "a": ["vim::PushOperator", { "Object": { "around": true } }],
+ "i": ["vim::PushObject", { "around": false }],
+ "a": ["vim::PushObject", { "around": true }],
"left": "vim::Left",
"h": "vim::Left",
"backspace": "vim::Backspace",
@@ -54,10 +54,10 @@
// "b": "vim::PreviousSubwordStart",
// "e": "vim::NextSubwordEnd",
// "g e": "vim::PreviousSubwordEnd",
- "shift-w": ["vim::NextWordStart", { "ignorePunctuation": true }],
- "shift-e": ["vim::NextWordEnd", { "ignorePunctuation": true }],
- "shift-b": ["vim::PreviousWordStart", { "ignorePunctuation": true }],
- "g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
+ "shift-w": ["vim::NextWordStart", { "ignore_punctuation": true }],
+ "shift-e": ["vim::NextWordEnd", { "ignore_punctuation": true }],
+ "shift-b": ["vim::PreviousWordStart", { "ignore_punctuation": true }],
+ "g shift-e": ["vim::PreviousWordEnd", { "ignore_punctuation": true }],
"/": "vim::Search",
"g /": "pane::DeploySearch",
"?": ["vim::Search", { "backwards": true }],
@@ -70,20 +70,20 @@
"[ {": ["vim::UnmatchedBackward", { "char": "{" }],
"] )": ["vim::UnmatchedForward", { "char": ")" }],
"[ (": ["vim::UnmatchedBackward", { "char": "(" }],
- "f": ["vim::PushOperator", { "FindForward": { "before": false } }],
- "t": ["vim::PushOperator", { "FindForward": { "before": true } }],
- "shift-f": ["vim::PushOperator", { "FindBackward": { "after": false } }],
- "shift-t": ["vim::PushOperator", { "FindBackward": { "after": true } }],
- "m": ["vim::PushOperator", "Mark"],
- "'": ["vim::PushOperator", { "Jump": { "line": true } }],
- "`": ["vim::PushOperator", { "Jump": { "line": false } }],
+ "f": ["vim::PushFindForward", { "before": false }],
+ "t": ["vim::PushFindForward", { "before": true }],
+ "shift-f": ["vim::PushFindBackward", { "after": false }],
+ "shift-t": ["vim::PushFindBackward", { "after": true }],
+ "m": "vim::PushMark",
+ "'": ["vim::PushJump", { "line": true }],
+ "`": ["vim::PushJump", { "line": false }],
";": "vim::RepeatFind",
",": "vim::RepeatFindReversed",
"ctrl-o": "pane::GoBack",
"ctrl-i": "pane::GoForward",
"ctrl-]": "editor::GoToDefinition",
- "escape": ["vim::SwitchMode", "Normal"],
- "ctrl-[": ["vim::SwitchMode", "Normal"],
+ "escape": "vim::SwitchToNormalMode",
+ "ctrl-[": "vim::SwitchToNormalMode",
"v": "vim::ToggleVisual",
"shift-v": "vim::ToggleVisualLine",
"ctrl-g": "vim::ShowLocation",
@@ -102,6 +102,7 @@
"ctrl-e": "vim::LineDown",
"ctrl-y": "vim::LineUp",
// "g" commands
+ "g r": "vim::PushReplaceWithRegister",
"g g": "vim::StartOfDocument",
"g h": "editor::Hover",
"g t": "pane::ActivateNextItem",
@@ -124,17 +125,17 @@
"g .": "editor::ToggleCodeActions", // zed specific
"g shift-a": "editor::FindAllReferences", // zed specific
"g space": "editor::OpenExcerpts", // zed specific
- "g *": ["vim::MoveToNext", { "partialWord": true }],
- "g #": ["vim::MoveToPrev", { "partialWord": true }],
- "g j": ["vim::Down", { "displayLines": true }],
- "g down": ["vim::Down", { "displayLines": true }],
- "g k": ["vim::Up", { "displayLines": true }],
- "g up": ["vim::Up", { "displayLines": true }],
- "g $": ["vim::EndOfLine", { "displayLines": true }],
- "g end": ["vim::EndOfLine", { "displayLines": true }],
- "g 0": ["vim::StartOfLine", { "displayLines": true }],
- "g home": ["vim::StartOfLine", { "displayLines": true }],
- "g ^": ["vim::FirstNonWhitespace", { "displayLines": true }],
+ "g *": ["vim::MoveToNext", { "partial_word": true }],
+ "g #": ["vim::MoveToPrev", { "partial_word": true }],
+ "g j": ["vim::Down", { "display_lines": true }],
+ "g down": ["vim::Down", { "display_lines": true }],
+ "g k": ["vim::Up", { "display_lines": true }],
+ "g up": ["vim::Up", { "display_lines": true }],
+ "g $": ["vim::EndOfLine", { "display_lines": true }],
+ "g end": ["vim::EndOfLine", { "display_lines": true }],
+ "g 0": ["vim::StartOfLine", { "display_lines": true }],
+ "g home": ["vim::StartOfLine", { "display_lines": true }],
+ "g ^": ["vim::FirstNonWhitespace", { "display_lines": true }],
"g v": "vim::RestoreVisualSelection",
"g ]": "editor::GoToDiagnostic",
"g [": "editor::GoToPrevDiagnostic",
@@ -146,7 +147,7 @@
"shift-l": "vim::WindowBottom",
"q": "vim::ToggleRecord",
"shift-q": "vim::ReplayLastRecording",
- "@": ["vim::PushOperator", "ReplayRegister"],
+ "@": "vim::PushReplayRegister",
// z commands
"z enter": ["workspace::SendKeystrokes", "z t ^"],
"z -": ["workspace::SendKeystrokes", "z b ^"],
@@ -165,8 +166,8 @@
"z f": "editor::FoldSelectedRanges",
"z shift-m": "editor::FoldAll",
"z shift-r": "editor::UnfoldAll",
- "shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }],
- "shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }],
+ "shift-z shift-q": ["pane::CloseActiveItem", { "save_intent": "skip" }],
+ "shift-z shift-z": ["pane::CloseActiveItem", { "save_intent": "save_all" }],
// Count support
"1": ["vim::Number", 1],
"2": ["vim::Number", 2],
@@ -193,13 +194,13 @@
"escape": "editor::Cancel",
":": "command_palette::Toggle",
".": "vim::Repeat",
- "c": ["vim::PushOperator", "Change"],
+ "c": "vim::PushChange",
"shift-c": "vim::ChangeToEndOfLine",
- "d": ["vim::PushOperator", "Delete"],
+ "d": "vim::PushDelete",
"shift-d": "vim::DeleteToEndOfLine",
"shift-j": "vim::JoinLines",
"g shift-j": "vim::JoinLinesNoWhitespace",
- "y": ["vim::PushOperator", "Yank"],
+ "y": "vim::PushYank",
"shift-y": "vim::YankLine",
"i": "vim::InsertBefore",
"shift-i": "vim::InsertFirstNonWhitespace",
@@ -216,19 +217,19 @@
"shift-p": ["vim::Paste", { "before": true }],
"u": "vim::Undo",
"ctrl-r": "vim::Redo",
- "r": ["vim::PushOperator", "Replace"],
+ "r": "vim::PushReplace",
"s": "vim::Substitute",
"shift-s": "vim::SubstituteLine",
- ">": ["vim::PushOperator", "Indent"],
- "<": ["vim::PushOperator", "Outdent"],
- "=": ["vim::PushOperator", "AutoIndent"],
- "!": ["vim::PushOperator", "ShellCommand"],
- "g u": ["vim::PushOperator", "Lowercase"],
- "g shift-u": ["vim::PushOperator", "Uppercase"],
- "g ~": ["vim::PushOperator", "OppositeCase"],
- "\"": ["vim::PushOperator", "Register"],
- "g w": ["vim::PushOperator", "Rewrap"],
- "g q": ["vim::PushOperator", "Rewrap"],
+ ">": "vim::PushIndent",
+ "<": "vim::PushOutdent",
+ "=": "vim::PushAutoIndent",
+ "!": "vim::PushShellCommand",
+ "g u": "vim::PushLowercase",
+ "g shift-u": "vim::PushUppercase",
+ "g ~": "vim::PushOppositeCase",
+ "\"": "vim::PushRegister",
+ "g w": "vim::PushRewrap",
+ "g q": "vim::PushRewrap",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"insert": "vim::InsertBefore",
@@ -239,7 +240,7 @@
"[ d": "editor::GoToPrevDiagnostic",
"] c": "editor::GoToHunk",
"[ c": "editor::GoToPrevHunk",
- "g c": ["vim::PushOperator", "ToggleComments"]
+ "g c": "vim::PushToggleComments"
}
},
{
@@ -264,14 +265,14 @@
"y": "vim::VisualYank",
"shift-y": "vim::VisualYankLine",
"p": "vim::Paste",
- "shift-p": ["vim::Paste", { "preserveClipboard": true }],
+ "shift-p": ["vim::Paste", { "preserve_clipboard": true }],
"c": "vim::Substitute",
"s": "vim::Substitute",
"shift-r": "vim::SubstituteLine",
"shift-s": "vim::SubstituteLine",
"~": "vim::ChangeCase",
- "*": ["vim::MoveToNext", { "partialWord": true }],
- "#": ["vim::MoveToPrev", { "partialWord": true }],
+ "*": ["vim::MoveToNext", { "partial_word": true }],
+ "#": ["vim::MoveToPrev", { "partial_word": true }],
"ctrl-a": "vim::Increment",
"ctrl-x": "vim::Decrement",
"g ctrl-a": ["vim::Increment", { "step": true }],
@@ -282,19 +283,19 @@
"g shift-a": "vim::VisualInsertEndOfLine",
"shift-j": "vim::JoinLines",
"g shift-j": "vim::JoinLinesNoWhitespace",
- "r": ["vim::PushOperator", "Replace"],
- "ctrl-c": ["vim::SwitchMode", "Normal"],
- "ctrl-[": ["vim::SwitchMode", "Normal"],
- "escape": ["vim::SwitchMode", "Normal"],
+ "r": "vim::PushReplace",
+ "ctrl-c": "vim::SwitchToNormalMode",
+ "ctrl-[": "vim::SwitchToNormalMode",
+ "escape": "vim::SwitchToNormalMode",
">": "vim::Indent",
"<": "vim::Outdent",
"=": "vim::AutoIndent",
"!": "vim::ShellCommand",
- "i": ["vim::PushOperator", { "Object": { "around": false } }],
- "a": ["vim::PushOperator", { "Object": { "around": true } }],
+ "i": ["vim::PushObject", { "around": false }],
+ "a": ["vim::PushObject", { "around": true }],
"g c": "vim::ToggleComments",
"g q": "vim::Rewrap",
- "\"": ["vim::PushOperator", "Register"],
+ "\"": "vim::PushRegister",
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
@@ -309,19 +310,19 @@
"ctrl-x": null,
"ctrl-x ctrl-o": "editor::ShowCompletions",
"ctrl-x ctrl-a": "assistant::InlineAssist", // zed specific
- "ctrl-x ctrl-c": "editor::ShowInlineCompletion", // zed specific
+ "ctrl-x ctrl-c": "editor::ShowEditPrediction", // zed specific
"ctrl-x ctrl-l": "editor::ToggleCodeActions", // zed specific
"ctrl-x ctrl-z": "editor::Cancel",
"ctrl-w": "editor::DeleteToPreviousWordStart",
"ctrl-u": "editor::DeleteToBeginningOfLine",
"ctrl-t": "vim::Indent",
"ctrl-d": "vim::Outdent",
- "ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
- "ctrl-v": ["vim::PushOperator", { "Literal": {} }],
+ "ctrl-k": ["vim::PushDigraph", {}],
+ "ctrl-v": ["vim::PushLiteral", {}],
"ctrl-shift-v": "editor::Paste", // note: this is *very* similar to ctrl-v in vim, but ctrl-shift-v on linux is the typical shortcut for paste when ctrl-v is already in use.
- "ctrl-q": ["vim::PushOperator", { "Literal": {} }],
- "ctrl-shift-q": ["vim::PushOperator", { "Literal": {} }],
- "ctrl-r": ["vim::PushOperator", "Register"],
+ "ctrl-q": ["vim::PushLiteral", {}],
+ "ctrl-shift-q": ["vim::PushLiteral", {}],
+ "ctrl-r": "vim::PushRegister",
"insert": "vim::ToggleReplace",
"ctrl-o": "vim::TemporaryNormal"
}
@@ -356,11 +357,11 @@
"ctrl-c": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore",
"escape": "vim::NormalBefore",
- "ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
- "ctrl-v": ["vim::PushOperator", { "Literal": {} }],
+ "ctrl-k": ["vim::PushDigraph", {}],
+ "ctrl-v": ["vim::PushLiteral", {}],
"ctrl-shift-v": "editor::Paste", // note: this is *very* similar to ctrl-v in vim, but ctrl-shift-v on linux is the typical shortcut for paste when ctrl-v is already in use.
- "ctrl-q": ["vim::PushOperator", { "Literal": {} }],
- "ctrl-shift-q": ["vim::PushOperator", { "Literal": {} }],
+ "ctrl-q": ["vim::PushLiteral", {}],
+ "ctrl-shift-q": ["vim::PushLiteral", {}],
"backspace": "vim::UndoReplace",
"tab": "vim::Tab",
"enter": "vim::Enter",
@@ -375,9 +376,9 @@
"ctrl-c": "vim::ClearOperators",
"ctrl-[": "vim::ClearOperators",
"escape": "vim::ClearOperators",
- "ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
- "ctrl-v": ["vim::PushOperator", { "Literal": {} }],
- "ctrl-q": ["vim::PushOperator", { "Literal": {} }]
+ "ctrl-k": ["vim::PushDigraph", {}],
+ "ctrl-v": ["vim::PushLiteral", {}],
+ "ctrl-q": ["vim::PushLiteral", {}]
}
},
{
@@ -393,10 +394,10 @@
"context": "vim_operator == a || vim_operator == i || vim_operator == cs",
"bindings": {
"w": "vim::Word",
- "shift-w": ["vim::Word", { "ignorePunctuation": true }],
+ "shift-w": ["vim::Word", { "ignore_punctuation": true }],
// Subword TextObject
// "w": "vim::Subword",
- // "shift-w": ["vim::Subword", { "ignorePunctuation": true }],
+ // "shift-w": ["vim::Subword", { "ignore_punctuation": true }],
"t": "vim::Tag",
"s": "vim::Sentence",
"p": "vim::Paragraph",
@@ -419,7 +420,7 @@
">": "vim::AngleBrackets",
"a": "vim::Argument",
"i": "vim::IndentObj",
- "shift-i": ["vim::IndentObj", { "includeBelow": true }],
+ "shift-i": ["vim::IndentObj", { "include_below": true }],
"f": "vim::Method",
"c": "vim::Class",
"e": "vim::EntireFile"
@@ -430,14 +431,14 @@
"bindings": {
"c": "vim::CurrentLine",
"d": "editor::Rename", // zed specific
- "s": ["vim::PushOperator", { "ChangeSurrounds": {} }]
+ "s": ["vim::PushChangeSurrounds", {}]
}
},
{
"context": "vim_operator == d",
"bindings": {
"d": "vim::CurrentLine",
- "s": ["vim::PushOperator", "DeleteSurrounds"],
+ "s": "vim::PushDeleteSurrounds",
"o": "editor::ToggleSelectedDiffHunks", // "d o"
"p": "editor::RevertSelectedHunks" // "d p"
}
@@ -476,7 +477,7 @@
"context": "vim_operator == y",
"bindings": {
"y": "vim::CurrentLine",
- "s": ["vim::PushOperator", { "AddSurrounds": {} }]
+ "s": ["vim::PushAddSurrounds", {}]
}
},
{
@@ -566,34 +567,34 @@
}
},
{
- "context": "ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView",
+ "context": "GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView",
"bindings": {
// window related commands (ctrl-w X)
"ctrl-w": null,
- "ctrl-w left": ["workspace::ActivatePaneInDirection", "Left"],
- "ctrl-w right": ["workspace::ActivatePaneInDirection", "Right"],
- "ctrl-w up": ["workspace::ActivatePaneInDirection", "Up"],
- "ctrl-w down": ["workspace::ActivatePaneInDirection", "Down"],
- "ctrl-w ctrl-h": ["workspace::ActivatePaneInDirection", "Left"],
- "ctrl-w ctrl-l": ["workspace::ActivatePaneInDirection", "Right"],
- "ctrl-w ctrl-k": ["workspace::ActivatePaneInDirection", "Up"],
- "ctrl-w ctrl-j": ["workspace::ActivatePaneInDirection", "Down"],
- "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"],
- "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"],
- "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"],
- "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"],
- "ctrl-w shift-left": ["workspace::SwapPaneInDirection", "Left"],
- "ctrl-w shift-right": ["workspace::SwapPaneInDirection", "Right"],
- "ctrl-w shift-up": ["workspace::SwapPaneInDirection", "Up"],
- "ctrl-w shift-down": ["workspace::SwapPaneInDirection", "Down"],
- "ctrl-w shift-h": ["workspace::SwapPaneInDirection", "Left"],
- "ctrl-w shift-l": ["workspace::SwapPaneInDirection", "Right"],
- "ctrl-w shift-k": ["workspace::SwapPaneInDirection", "Up"],
- "ctrl-w shift-j": ["workspace::SwapPaneInDirection", "Down"],
- "ctrl-w >": ["vim::ResizePane", "Widen"],
- "ctrl-w <": ["vim::ResizePane", "Narrow"],
- "ctrl-w -": ["vim::ResizePane", "Shorten"],
- "ctrl-w +": ["vim::ResizePane", "Lengthen"],
+ "ctrl-w left": "workspace::ActivatePaneLeft",
+ "ctrl-w right": "workspace::ActivatePaneRight",
+ "ctrl-w up": "workspace::ActivatePaneUp",
+ "ctrl-w down": "workspace::ActivatePaneDown",
+ "ctrl-w ctrl-h": "workspace::ActivatePaneLeft",
+ "ctrl-w ctrl-l": "workspace::ActivatePaneRight",
+ "ctrl-w ctrl-k": "workspace::ActivatePaneUp",
+ "ctrl-w ctrl-j": "workspace::ActivatePaneDown",
+ "ctrl-w h": "workspace::ActivatePaneLeft",
+ "ctrl-w l": "workspace::ActivatePaneRight",
+ "ctrl-w k": "workspace::ActivatePaneUp",
+ "ctrl-w j": "workspace::ActivatePaneDown",
+ "ctrl-w shift-left": "workspace::SwapPaneLeft",
+ "ctrl-w shift-right": "workspace::SwapPaneRight",
+ "ctrl-w shift-up": "workspace::SwapPaneUp",
+ "ctrl-w shift-down": "workspace::SwapPaneDown",
+ "ctrl-w shift-h": "workspace::SwapPaneLeft",
+ "ctrl-w shift-l": "workspace::SwapPaneRight",
+ "ctrl-w shift-k": "workspace::SwapPaneUp",
+ "ctrl-w shift-j": "workspace::SwapPaneDown",
+ "ctrl-w >": "vim::ResizePaneRight",
+ "ctrl-w <": "vim::ResizePaneLeft",
+ "ctrl-w -": "vim::ResizePaneDown",
+ "ctrl-w +": "vim::ResizePaneUp",
"ctrl-w _": "vim::MaximizePane",
"ctrl-w =": "vim::ResetPaneSizes",
"ctrl-w g t": "pane::ActivateNextItem",
@@ -624,7 +625,7 @@
}
},
{
- "context": "EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome",
+ "context": "GitPanel || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome",
"bindings": {
":": "command_palette::Toggle",
"g /": "pane::DeploySearch"
@@ -25,7 +25,7 @@
// Features that can be globally enabled or disabled
"features": {
// Which edit prediction provider to use.
- "inline_completion_provider": "copilot"
+ "edit_prediction_provider": "copilot"
},
// The name of a font to use for rendering text in the editor
"buffer_font_family": "Zed Plex Mono",
@@ -93,6 +93,13 @@
// workspace when the centered layout is used.
"right_padding": 0.2
},
+ // All settings related to the image viewer.
+ "image_viewer": {
+ // The unit for image file sizes.
+ // By default we're setting it to binary.
+ // The second option is decimal.
+ "unit": "binary"
+ },
// The key to use for adding multiple cursors
// Currently "alt" or "cmd_or_ctrl" (also aliased as
// "cmd" and "ctrl") are supported.
@@ -163,7 +170,7 @@
"show_signature_help_after_edits": false,
/// Whether to show the edit predictions next to the completions provided by a language server.
/// Only has an effect if edit prediction provider supports it.
- "show_inline_completions_in_menu": true,
+ "show_edit_predictions_in_menu": true,
// Whether to show wrap guides (vertical rulers) in the editor.
// Setting this to true will show a guide at the 'preferred_line_length' value
// if 'soft_wrap' is set to 'preferred_line_length', and will show any
@@ -197,11 +204,11 @@
// no matter how they were inserted.
"always_treat_brackets_as_autoclosed": false,
// Controls whether edit predictions are shown immediately (true)
- // or manually by triggering `editor::ShowInlineCompletion` (false).
- "show_inline_completions": true,
+ // or manually by triggering `editor::ShowEditPrediction` (false).
+ "show_edit_predictions": true,
// Controls whether edit predictions are shown in a given language scope.
// Example: ["string", "comment"]
- "inline_completions_disabled_in": [],
+ "edit_predictions_disabled_in": [],
// Whether to show tabs and spaces in the editor.
// This setting can take four values:
//
@@ -774,8 +781,10 @@
// 2. Load direnv configuration through the shell hook, works for POSIX shells and fish.
// "load_direnv": "shell_hook"
"load_direnv": "direct",
- "inline_completions": {
+ "edit_predictions": {
// A list of globs representing files that edit predictions should be disabled for.
+ // There's a sensible default list of globs already included.
+ // Any addition to this list will be merged with the default list.
"disabled_globs": [
"**/.env*",
"**/*.pem",
@@ -783,7 +792,14 @@
"**/*.cert",
"**/*.crt",
"**/secrets.yml"
- ]
+ ],
+ // When to show edit predictions previews in buffer.
+ // This setting takes two possible values:
+ // 1. Display inline when there are no language server completions available.
+ // "inline_preview": "auto"
+ // 2. Display inline when holding modifier key (alt by default).
+ // "inline_preview": "when_holding_modifier"
+ "inline_preview": "auto"
},
// Settings specific to journaling
"journal": {
@@ -459,7 +459,7 @@ impl ContextEditor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- if self.editor.read(cx).has_active_completions_menu() {
+ if self.editor.read(cx).has_visible_completions_menu() {
return;
}
@@ -832,12 +832,13 @@ impl ContextEditor {
let render_block: RenderBlock = Arc::new({
let this = this.clone();
let patch_range = range.clone();
- move |cx: &mut BlockContext<'_, '_>| {
+ move |cx: &mut BlockContext| {
let max_width = cx.max_width;
let gutter_width = cx.gutter_dimensions.full_width();
let block_id = cx.block_id;
let selected = cx.selected;
- this.update_in(cx, |this, window, cx| {
+ let window = &mut cx.window;
+ this.update(cx.app, |this, cx| {
this.render_patch_block(
patch_range.clone(),
max_width,
@@ -323,7 +323,14 @@ fn collect_files(
)))?;
directory_stack.push(entry.path.clone());
} else {
- let entry_name = format!("{}/{}", prefix_paths, &filename);
+ // todo(windows)
+ // Potential bug: this assumes that the path separator is always `\` on Windows
+ let entry_name = format!(
+ "{}{}{}",
+ prefix_paths,
+ std::path::MAIN_SEPARATOR_STR,
+ &filename
+ );
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: entry_name.clone().into(),
@@ -455,6 +462,7 @@ mod custom_path_matcher {
use std::{fmt::Debug as _, path::Path};
use globset::{Glob, GlobSet, GlobSetBuilder};
+ use util::paths::SanitizedPath;
#[derive(Clone, Debug, Default)]
pub struct PathMatcher {
@@ -481,7 +489,7 @@ mod custom_path_matcher {
pub fn new(globs: &[String]) -> Result<Self, globset::Error> {
let globs = globs
.into_iter()
- .map(|glob| Glob::new(&glob))
+ .map(|glob| Glob::new(&SanitizedPath::from(glob).to_glob_string()))
.collect::<Result<Vec<_>, _>>()?;
let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect();
let sources_with_trailing_slash = globs
@@ -507,7 +515,9 @@ mod custom_path_matcher {
.zip(self.sources_with_trailing_slash.iter())
.any(|(source, with_slash)| {
let as_bytes = other_path.as_os_str().as_encoded_bytes();
- let with_slash = if source.ends_with("/") {
+ // todo(windows)
+ // Potential bug: this assumes that the path separator is always `\` on Windows
+ let with_slash = if source.ends_with(std::path::MAIN_SEPARATOR_STR) {
source.as_bytes()
} else {
with_slash.as_bytes()
@@ -569,6 +579,7 @@ mod test {
use serde_json::json;
use settings::SettingsStore;
use smol::stream::StreamExt;
+ use util::{path, separator};
use super::collect_files;
@@ -592,7 +603,7 @@ mod test {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/root",
+ path!("/root"),
json!({
"dir": {
"subdir": {
@@ -607,7 +618,7 @@ mod test {
)
.await;
- let project = Project::test(fs, ["/root".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
let result_1 =
cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx));
@@ -615,7 +626,7 @@ mod test {
.await
.unwrap();
- assert!(result_1.text.starts_with("root/dir"));
+ assert!(result_1.text.starts_with(separator!("root/dir")));
// 4 files + 2 directories
assert_eq!(result_1.sections.len(), 6);
@@ -631,7 +642,7 @@ mod test {
cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed());
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
- assert!(result.text.starts_with("root/dir"));
+ assert!(result.text.starts_with(separator!("root/dir")));
// 5 files + 2 directories
assert_eq!(result.sections.len(), 7);
@@ -645,7 +656,7 @@ mod test {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/zed",
+ path!("/zed"),
json!({
"assets": {
"dir1": {
@@ -670,7 +681,7 @@ mod test {
)
.await;
- let project = Project::test(fs, ["/zed".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/zed").as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
@@ -679,27 +690,36 @@ mod test {
.unwrap();
// Sanity check
- assert!(result.text.starts_with("zed/assets/themes\n"));
+ assert!(result.text.starts_with(separator!("zed/assets/themes\n")));
assert_eq!(result.sections.len(), 7);
// Ensure that full file paths are included in the real output
- assert!(result.text.contains("zed/assets/themes/andromeda/LICENSE"));
- assert!(result.text.contains("zed/assets/themes/ayu/LICENSE"));
- assert!(result.text.contains("zed/assets/themes/summercamp/LICENSE"));
+ assert!(result
+ .text
+ .contains(separator!("zed/assets/themes/andromeda/LICENSE")));
+ assert!(result
+ .text
+ .contains(separator!("zed/assets/themes/ayu/LICENSE")));
+ assert!(result
+ .text
+ .contains(separator!("zed/assets/themes/summercamp/LICENSE")));
assert_eq!(result.sections[5].label, "summercamp");
// Ensure that things are in descending order, with properly relativized paths
assert_eq!(
result.sections[0].label,
- "zed/assets/themes/andromeda/LICENSE"
+ separator!("zed/assets/themes/andromeda/LICENSE")
);
assert_eq!(result.sections[1].label, "andromeda");
- assert_eq!(result.sections[2].label, "zed/assets/themes/ayu/LICENSE");
+ assert_eq!(
+ result.sections[2].label,
+ separator!("zed/assets/themes/ayu/LICENSE")
+ );
assert_eq!(result.sections[3].label, "ayu");
assert_eq!(
result.sections[4].label,
- "zed/assets/themes/summercamp/LICENSE"
+ separator!("zed/assets/themes/summercamp/LICENSE")
);
// Ensure that the project lasts until after the last await
@@ -712,7 +732,7 @@ mod test {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/zed",
+ path!("/zed"),
json!({
"assets": {
"themes": {
@@ -732,7 +752,7 @@ mod test {
)
.await;
- let project = Project::test(fs, ["/zed".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/zed").as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
@@ -740,26 +760,29 @@ mod test {
.await
.unwrap();
- assert!(result.text.starts_with("zed/assets/themes\n"));
- assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE");
+ assert!(result.text.starts_with(separator!("zed/assets/themes\n")));
+ assert_eq!(
+ result.sections[0].label,
+ separator!("zed/assets/themes/LICENSE")
+ );
assert_eq!(
result.sections[1].label,
- "zed/assets/themes/summercamp/LICENSE"
+ separator!("zed/assets/themes/summercamp/LICENSE")
);
assert_eq!(
result.sections[2].label,
- "zed/assets/themes/summercamp/subdir/LICENSE"
+ separator!("zed/assets/themes/summercamp/subdir/LICENSE")
);
assert_eq!(
result.sections[3].label,
- "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE"
+ separator!("zed/assets/themes/summercamp/subdir/subsubdir/LICENSE")
);
assert_eq!(result.sections[4].label, "subsubdir");
assert_eq!(result.sections[5].label, "subdir");
assert_eq!(result.sections[6].label, "summercamp");
- assert_eq!(result.sections[7].label, "zed/assets/themes");
+ assert_eq!(result.sections[7].label, separator!("zed/assets/themes"));
- assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n");
+ assert_eq!(result.text, separator!("zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n"));
// Ensure that the project lasts until after the last await
drop(project);
@@ -9,7 +9,7 @@ use release_channel::{AppVersion, ReleaseChannel};
use serde::Deserialize;
use smol::io::AsyncReadExt;
use util::ResultExt as _;
-use workspace::notifications::NotificationId;
+use workspace::notifications::{show_app_notification, NotificationId};
use workspace::Workspace;
use crate::update_notification::UpdateNotification;
@@ -17,6 +17,7 @@ use crate::update_notification::UpdateNotification;
actions!(auto_update, [ViewReleaseNotesLocally]);
pub fn init(cx: &mut App) {
+ notify_if_app_was_updated(cx);
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
workspace.register_action(|workspace, _: &ViewReleaseNotesLocally, window, cx| {
view_release_notes_locally(workspace, window, cx);
@@ -124,31 +125,35 @@ fn view_release_notes_locally(
.detach();
}
-pub fn notify_of_any_new_update(window: &mut Window, cx: &mut Context<Workspace>) -> Option<()> {
- let updater = AutoUpdater::get(cx)?;
+/// Shows a notification across all workspaces if an update was previously automatically installed
+/// and this notification had not yet been shown.
+pub fn notify_if_app_was_updated(cx: &mut App) {
+ let Some(updater) = AutoUpdater::get(cx) else {
+ return;
+ };
let version = updater.read(cx).current_version();
let should_show_notification = updater.read(cx).should_show_update_notification(cx);
- cx.spawn_in(window, |workspace, mut cx| async move {
+ cx.spawn(|cx| async move {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
- workspace.update(&mut cx, |workspace, cx| {
- let workspace_handle = workspace.weak_handle();
- workspace.show_notification(
+ cx.update(|cx| {
+ show_app_notification(
NotificationId::unique::<UpdateNotification>(),
cx,
- |cx| cx.new(|_| UpdateNotification::new(version, workspace_handle)),
+ move |cx| {
+ let workspace_handle = cx.entity().downgrade();
+ cx.new(|_| UpdateNotification::new(version, workspace_handle))
+ },
);
updater.update(cx, |updater, cx| {
updater
.set_should_show_update_notification(false, cx)
.detach_and_log_err(cx);
- });
+ })
})?;
}
anyhow::Ok(())
})
.detach();
-
- None
}
@@ -1,3 +1,5 @@
+use std::process::Command;
+
fn main() {
if std::env::var("ZED_UPDATE_EXPLANATION").is_ok() {
println!(r#"cargo:rustc-cfg=feature="no-bundled-uninstall""#);
@@ -8,4 +10,18 @@ fn main() {
// Weakly link ScreenCaptureKit to ensure can be used on macOS 10.15+.
println!("cargo:rustc-link-arg=-Wl,-weak_framework,ScreenCaptureKit");
}
+
+ // Populate git sha environment variable if git is available
+ println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
+ if let Some(output) = Command::new("git")
+ .args(["rev-parse", "HEAD"])
+ .output()
+ .ok()
+ .filter(|output| output.status.success())
+ {
+ let git_sha = String::from_utf8_lossy(&output.stdout);
+ let git_sha = git_sha.trim();
+
+ println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
+ }
}
@@ -339,13 +339,17 @@ mod linux {
impl InstalledApp for App {
fn zed_version_string(&self) -> String {
format!(
- "Zed {}{} – {}",
+ "Zed {}{}{} – {}",
if *RELEASE_CHANNEL == "stable" {
"".to_string()
} else {
- format!(" {} ", *RELEASE_CHANNEL)
+ format!("{} ", *RELEASE_CHANNEL)
},
option_env!("RELEASE_VERSION").unwrap_or_default(),
+ match option_env!("ZED_COMMIT_SHA") {
+ Some(commit_sha) => format!(" {commit_sha} "),
+ None => "".to_string(),
+ },
self.0.display(),
)
}
@@ -146,6 +146,8 @@ pub fn init_settings(cx: &mut App) {
}
pub fn init(client: &Arc<Client>, cx: &mut App) {
+ let _ = rustls::crypto::aws_lc_rs::default_provider().install_default();
+
let client = Arc::downgrade(client);
cx.on_action({
let client = client.clone();
@@ -1131,15 +1133,8 @@ impl Client {
for error in root_certs.errors {
log::warn!("error loading native certs: {:?}", error);
}
- root_store.add_parsable_certificates(
- &root_certs
- .certs
- .into_iter()
- .map(|cert| cert.as_ref().to_owned())
- .collect::<Vec<_>>(),
- );
+ root_store.add_parsable_certificates(root_certs.certs);
rustls::ClientConfig::builder()
- .with_safe_defaults()
.with_root_certificates(root_store)
.with_no_client_auth()
};
@@ -33,6 +33,7 @@ clock.workspace = true
collections.workspace = true
dashmap.workspace = true
derive_more.workspace = true
+diff.workspace = true
envy = "0.4.2"
futures.workspace = true
google_ai.workspace = true
@@ -130,7 +131,7 @@ worktree = { workspace = true, features = ["test-support"] }
livekit_client_macos = { workspace = true, features = ["test-support"] }
[target.'cfg(not(target_os = "macos"))'.dev-dependencies]
-livekit_client = {workspace = true, features = ["test-support"] }
+livekit_client = { workspace = true, features = ["test-support"] }
[package.metadata.cargo-machete]
ignored = ["async-stripe"]
@@ -100,6 +100,7 @@ CREATE TABLE "worktree_repositories" (
"branch" VARCHAR,
"scan_id" INTEGER NOT NULL,
"is_deleted" BOOL NOT NULL,
+ "current_merge_conflicts" VARCHAR,
PRIMARY KEY(project_id, worktree_id, work_directory_id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
@@ -401,6 +402,15 @@ CREATE TABLE extension_versions (
schema_version INTEGER NOT NULL DEFAULT 0,
wasm_api_version TEXT,
download_count INTEGER NOT NULL DEFAULT 0,
+ provides_themes BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_icon_themes BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_languages BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_grammars BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_language_servers BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_context_servers BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_snippets BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (extension_id, version)
);
@@ -430,6 +440,7 @@ CREATE TABLE IF NOT EXISTS billing_customers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
user_id INTEGER NOT NULL REFERENCES users(id),
+ has_overdue_invoices BOOLEAN NOT NULL DEFAULT FALSE,
stripe_customer_id TEXT NOT NULL
);
@@ -0,0 +1,2 @@
+alter table billing_customers
+add column has_overdue_invoices bool not null default false;
@@ -0,0 +1,10 @@
+alter table extension_versions
+add column provides_themes bool not null default false,
+add column provides_icon_themes bool not null default false,
+add column provides_languages bool not null default false,
+add column provides_grammars bool not null default false,
+add column provides_language_servers bool not null default false,
+add column provides_context_servers bool not null default false,
+add column provides_slash_commands bool not null default false,
+add column provides_indexed_docs_providers bool not null default false,
+add column provides_snippets bool not null default false;
@@ -0,0 +1,2 @@
+ALTER TABLE worktree_repositories
+ADD COLUMN current_merge_conflicts VARCHAR NULL;
@@ -249,29 +249,31 @@ async fn create_billing_subscription(
));
}
- if app.db.has_overdue_billing_subscriptions(user.id).await? {
- return Err(Error::http(
- StatusCode::PAYMENT_REQUIRED,
- "user has overdue billing subscriptions".into(),
- ));
+ let existing_billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
+ if let Some(existing_billing_customer) = &existing_billing_customer {
+ if existing_billing_customer.has_overdue_invoices {
+ return Err(Error::http(
+ StatusCode::PAYMENT_REQUIRED,
+ "user has overdue invoices".into(),
+ ));
+ }
}
- let customer_id =
- if let Some(existing_customer) = app.db.get_billing_customer_by_user_id(user.id).await? {
- CustomerId::from_str(&existing_customer.stripe_customer_id)
- .context("failed to parse customer ID")?
- } else {
- let customer = Customer::create(
- &stripe_client,
- CreateCustomer {
- email: user.email_address.as_deref(),
- ..Default::default()
- },
- )
- .await?;
+ let customer_id = if let Some(existing_customer) = existing_billing_customer {
+ CustomerId::from_str(&existing_customer.stripe_customer_id)
+ .context("failed to parse customer ID")?
+ } else {
+ let customer = Customer::create(
+ &stripe_client,
+ CreateCustomer {
+ email: user.email_address.as_deref(),
+ ..Default::default()
+ },
+ )
+ .await?;
- customer.id
- };
+ customer.id
+ };
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
let stripe_model = stripe_billing.register_model(default_model).await?;
@@ -666,6 +668,27 @@ async fn handle_customer_subscription_event(
.await?
.ok_or_else(|| anyhow!("billing customer not found"))?;
+ let was_canceled_due_to_payment_failure = subscription.status == SubscriptionStatus::Canceled
+ && subscription
+ .cancellation_details
+ .as_ref()
+ .and_then(|details| details.reason)
+ .map_or(false, |reason| {
+ reason == CancellationDetailsReason::PaymentFailed
+ });
+
+ if was_canceled_due_to_payment_failure {
+ app.db
+ .update_billing_customer(
+ billing_customer.id,
+ &UpdateBillingCustomerParams {
+ has_overdue_invoices: ActiveValue::set(true),
+ ..Default::default()
+ },
+ )
+ .await?;
+ }
+
if let Some(existing_subscription) = app
.db
.get_billing_subscription_by_stripe_subscription_id(&subscription.id)
@@ -9,10 +9,11 @@ use axum::{
routing::get,
Extension, Json, Router,
};
-use collections::HashMap;
-use rpc::{ExtensionApiManifest, GetExtensionsResponse};
+use collections::{BTreeSet, HashMap};
+use rpc::{ExtensionApiManifest, ExtensionProvides, GetExtensionsResponse};
use semantic_version::SemanticVersion;
use serde::Deserialize;
+use std::str::FromStr;
use std::{sync::Arc, time::Duration};
use time::PrimitiveDateTime;
use util::{maybe, ResultExt};
@@ -35,6 +36,14 @@ pub fn router() -> Router {
#[derive(Debug, Deserialize)]
struct GetExtensionsParams {
filter: Option<String>,
+ /// A comma-delimited list of features that the extension must provide.
+ ///
+ /// For example:
+ /// - `themes`
+ /// - `themes,icon-themes`
+ /// - `languages,language-servers`
+ #[serde(default)]
+ provides: Option<String>,
#[serde(default)]
max_schema_version: i32,
}
@@ -43,9 +52,22 @@ async fn get_extensions(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
+ let provides_filter = params.provides.map(|provides| {
+ provides
+ .split(',')
+ .map(|value| value.trim())
+ .filter_map(|value| ExtensionProvides::from_str(value).ok())
+ .collect::<BTreeSet<_>>()
+ });
+
let mut extensions = app
.db
- .get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
+ .get_extensions(
+ params.filter.as_deref(),
+ provides_filter.as_ref(),
+ params.max_schema_version,
+ 500,
+ )
.await?;
if let Some(filter) = params.filter.as_deref() {
@@ -391,6 +413,7 @@ async fn fetch_extension_manifest(
repository: manifest.repository,
schema_version: manifest.schema_version.unwrap_or(0),
wasm_api_version: manifest.wasm_api_version,
+ provides: manifest.provides,
published_at,
})
}
@@ -6,10 +6,11 @@ pub mod tests;
use crate::{executor::Executor, Error, Result};
use anyhow::anyhow;
-use collections::{BTreeMap, HashMap, HashSet};
+use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use dashmap::DashMap;
use futures::StreamExt;
use rand::{prelude::StdRng, Rng, SeedableRng};
+use rpc::ExtensionProvides;
use rpc::{
proto::{self},
ConnectionId, ExtensionMetadata,
@@ -781,6 +782,7 @@ pub struct NewExtensionVersion {
pub repository: String,
pub schema_version: i32,
pub wasm_api_version: Option<String>,
+ pub provides: BTreeSet<ExtensionProvides>,
pub published_at: PrimitiveDateTime,
}
@@ -10,6 +10,7 @@ pub struct CreateBillingCustomerParams {
pub struct UpdateBillingCustomerParams {
pub user_id: ActiveValue<UserId>,
pub stripe_customer_id: ActiveValue<String>,
+ pub has_overdue_invoices: ActiveValue<bool>,
}
impl Database {
@@ -43,6 +44,7 @@ impl Database {
id: ActiveValue::set(id),
user_id: params.user_id.clone(),
stripe_customer_id: params.stripe_customer_id.clone(),
+ has_overdue_invoices: params.has_overdue_invoices.clone(),
..Default::default()
})
.exec(&*tx)
@@ -170,40 +170,4 @@ impl Database {
})
.await
}
-
- /// Returns whether the user has any overdue billing subscriptions.
- pub async fn has_overdue_billing_subscriptions(&self, user_id: UserId) -> Result<bool> {
- Ok(self.count_overdue_billing_subscriptions(user_id).await? > 0)
- }
-
- /// Returns the count of the overdue billing subscriptions for the user with the specified ID.
- ///
- /// This includes subscriptions:
- /// - Whose status is `past_due`
- /// - Whose status is `canceled` and the cancellation reason is `payment_failed`
- pub async fn count_overdue_billing_subscriptions(&self, user_id: UserId) -> Result<usize> {
- self.transaction(|tx| async move {
- let past_due = billing_subscription::Column::StripeSubscriptionStatus
- .eq(StripeSubscriptionStatus::PastDue);
- let payment_failed = billing_subscription::Column::StripeSubscriptionStatus
- .eq(StripeSubscriptionStatus::Canceled)
- .and(
- billing_subscription::Column::StripeCancellationReason
- .eq(StripeCancellationReason::PaymentFailed),
- );
-
- let count = billing_subscription::Entity::find()
- .inner_join(billing_customer::Entity)
- .filter(
- billing_customer::Column::UserId
- .eq(user_id)
- .and(past_due.or(payment_failed)),
- )
- .count(&*tx)
- .await?;
-
- Ok(count as usize)
- })
- .await
- }
}
@@ -10,6 +10,7 @@ impl Database {
pub async fn get_extensions(
&self,
filter: Option<&str>,
+ provides_filter: Option<&BTreeSet<ExtensionProvides>>,
max_schema_version: i32,
limit: usize,
) -> Result<Vec<ExtensionMetadata>> {
@@ -26,6 +27,10 @@ impl Database {
condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter));
}
+ if let Some(provides_filter) = provides_filter {
+ condition = apply_provides_filter(condition, provides_filter);
+ }
+
self.get_extensions_where(condition, Some(limit as u64), &tx)
.await
})
@@ -282,6 +287,39 @@ impl Database {
description: ActiveValue::Set(version.description.clone()),
schema_version: ActiveValue::Set(version.schema_version),
wasm_api_version: ActiveValue::Set(version.wasm_api_version.clone()),
+ provides_themes: ActiveValue::Set(
+ version.provides.contains(&ExtensionProvides::Themes),
+ ),
+ provides_icon_themes: ActiveValue::Set(
+ version.provides.contains(&ExtensionProvides::IconThemes),
+ ),
+ provides_languages: ActiveValue::Set(
+ version.provides.contains(&ExtensionProvides::Languages),
+ ),
+ provides_grammars: ActiveValue::Set(
+ version.provides.contains(&ExtensionProvides::Grammars),
+ ),
+ provides_language_servers: ActiveValue::Set(
+ version
+ .provides
+ .contains(&ExtensionProvides::LanguageServers),
+ ),
+ provides_context_servers: ActiveValue::Set(
+ version
+ .provides
+ .contains(&ExtensionProvides::ContextServers),
+ ),
+ provides_slash_commands: ActiveValue::Set(
+ version.provides.contains(&ExtensionProvides::SlashCommands),
+ ),
+ provides_indexed_docs_providers: ActiveValue::Set(
+ version
+ .provides
+ .contains(&ExtensionProvides::IndexedDocsProviders),
+ ),
+ provides_snippets: ActiveValue::Set(
+ version.provides.contains(&ExtensionProvides::Snippets),
+ ),
download_count: ActiveValue::NotSet,
}
}))
@@ -352,10 +390,55 @@ impl Database {
}
}
+fn apply_provides_filter(
+ mut condition: Condition,
+ provides_filter: &BTreeSet<ExtensionProvides>,
+) -> Condition {
+ if provides_filter.contains(&ExtensionProvides::Themes) {
+ condition = condition.add(extension_version::Column::ProvidesThemes.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::IconThemes) {
+ condition = condition.add(extension_version::Column::ProvidesIconThemes.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::Languages) {
+ condition = condition.add(extension_version::Column::ProvidesLanguages.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::Grammars) {
+ condition = condition.add(extension_version::Column::ProvidesGrammars.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::LanguageServers) {
+ condition = condition.add(extension_version::Column::ProvidesLanguageServers.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::ContextServers) {
+ condition = condition.add(extension_version::Column::ProvidesContextServers.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::SlashCommands) {
+ condition = condition.add(extension_version::Column::ProvidesSlashCommands.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::IndexedDocsProviders) {
+ condition = condition.add(extension_version::Column::ProvidesIndexedDocsProviders.eq(true));
+ }
+
+ if provides_filter.contains(&ExtensionProvides::Snippets) {
+ condition = condition.add(extension_version::Column::ProvidesSnippets.eq(true));
+ }
+
+ condition
+}
+
fn metadata_from_extension_and_version(
extension: extension::Model,
version: extension_version::Model,
) -> ExtensionMetadata {
+ let provides = version.provides();
+
ExtensionMetadata {
id: extension.external_id.into(),
manifest: rpc::ExtensionApiManifest {
@@ -370,6 +453,7 @@ fn metadata_from_extension_and_version(
repository: version.repository,
schema_version: Some(version.schema_version),
wasm_api_version: version.wasm_api_version,
+ provides,
},
published_at: convert_time_to_chrono(version.published_at),
@@ -333,6 +333,9 @@ impl Database {
scan_id: ActiveValue::set(update.scan_id as i64),
branch: ActiveValue::set(repository.branch.clone()),
is_deleted: ActiveValue::set(false),
+ current_merge_conflicts: ActiveValue::Set(Some(
+ serde_json::to_string(&repository.current_merge_conflicts).unwrap(),
+ )),
},
))
.on_conflict(
@@ -769,6 +772,13 @@ impl Database {
updated_statuses.push(db_status_to_proto(status_entry)?);
}
+ let current_merge_conflicts = db_repository_entry
+ .current_merge_conflicts
+ .as_ref()
+ .map(|conflicts| serde_json::from_str(&conflicts))
+ .transpose()?
+ .unwrap_or_default();
+
worktree.repository_entries.insert(
db_repository_entry.work_directory_id as u64,
proto::RepositoryEntry {
@@ -776,6 +786,7 @@ impl Database {
branch: db_repository_entry.branch,
updated_statuses,
removed_statuses: Vec::new(),
+ current_merge_conflicts,
},
);
}
@@ -736,11 +736,19 @@ impl Database {
}
}
+ let current_merge_conflicts = db_repository
+ .current_merge_conflicts
+ .as_ref()
+ .map(|conflicts| serde_json::from_str(&conflicts))
+ .transpose()?
+ .unwrap_or_default();
+
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
updated_statuses,
removed_statuses,
+ current_merge_conflicts,
});
}
}
@@ -9,6 +9,7 @@ pub struct Model {
pub id: BillingCustomerId,
pub user_id: UserId,
pub stripe_customer_id: String,
+ pub has_overdue_invoices: bool,
pub created_at: DateTime,
}
@@ -1,4 +1,6 @@
use crate::db::ExtensionId;
+use collections::BTreeSet;
+use rpc::ExtensionProvides;
use sea_orm::entity::prelude::*;
use time::PrimitiveDateTime;
@@ -16,6 +18,58 @@ pub struct Model {
pub schema_version: i32,
pub wasm_api_version: Option<String>,
pub download_count: i64,
+ pub provides_themes: bool,
+ pub provides_icon_themes: bool,
+ pub provides_languages: bool,
+ pub provides_grammars: bool,
+ pub provides_language_servers: bool,
+ pub provides_context_servers: bool,
+ pub provides_slash_commands: bool,
+ pub provides_indexed_docs_providers: bool,
+ pub provides_snippets: bool,
+}
+
+impl Model {
+ pub fn provides(&self) -> BTreeSet<ExtensionProvides> {
+ let mut provides = BTreeSet::default();
+ if self.provides_themes {
+ provides.insert(ExtensionProvides::Themes);
+ }
+
+ if self.provides_icon_themes {
+ provides.insert(ExtensionProvides::IconThemes);
+ }
+
+ if self.provides_languages {
+ provides.insert(ExtensionProvides::Languages);
+ }
+
+ if self.provides_grammars {
+ provides.insert(ExtensionProvides::Grammars);
+ }
+
+ if self.provides_language_servers {
+ provides.insert(ExtensionProvides::LanguageServers);
+ }
+
+ if self.provides_context_servers {
+ provides.insert(ExtensionProvides::ContextServers);
+ }
+
+ if self.provides_slash_commands {
+ provides.insert(ExtensionProvides::SlashCommands);
+ }
+
+ if self.provides_indexed_docs_providers {
+ provides.insert(ExtensionProvides::IndexedDocsProviders);
+ }
+
+ if self.provides_snippets {
+ provides.insert(ExtensionProvides::Snippets);
+ }
+
+ provides
+ }
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -13,6 +13,8 @@ pub struct Model {
pub scan_id: i64,
pub branch: Option<String>,
pub is_deleted: bool,
+ // JSON array typed string
+ pub current_merge_conflicts: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use crate::db::billing_subscription::{StripeCancellationReason, StripeSubscriptionStatus};
+use crate::db::billing_subscription::StripeSubscriptionStatus;
use crate::db::tests::new_test_user;
use crate::db::{CreateBillingCustomerParams, CreateBillingSubscriptionParams};
use crate::test_both_dbs;
@@ -88,113 +88,3 @@ async fn test_get_active_billing_subscriptions(db: &Arc<Database>) {
assert_eq!(subscription_count, 0);
}
}
-
-test_both_dbs!(
- test_count_overdue_billing_subscriptions,
- test_count_overdue_billing_subscriptions_postgres,
- test_count_overdue_billing_subscriptions_sqlite
-);
-
-async fn test_count_overdue_billing_subscriptions(db: &Arc<Database>) {
- // A user with no subscription has no overdue billing subscriptions.
- {
- let user_id = new_test_user(db, "no-subscription-user@example.com").await;
- let subscription_count = db
- .count_overdue_billing_subscriptions(user_id)
- .await
- .unwrap();
-
- assert_eq!(subscription_count, 0);
- }
-
- // A user with a past-due subscription has an overdue billing subscription.
- {
- let user_id = new_test_user(db, "past-due-user@example.com").await;
- let customer = db
- .create_billing_customer(&CreateBillingCustomerParams {
- user_id,
- stripe_customer_id: "cus_past_due_user".into(),
- })
- .await
- .unwrap();
- assert_eq!(customer.stripe_customer_id, "cus_past_due_user".to_string());
-
- db.create_billing_subscription(&CreateBillingSubscriptionParams {
- billing_customer_id: customer.id,
- stripe_subscription_id: "sub_past_due_user".into(),
- stripe_subscription_status: StripeSubscriptionStatus::PastDue,
- stripe_cancellation_reason: None,
- })
- .await
- .unwrap();
-
- let subscription_count = db
- .count_overdue_billing_subscriptions(user_id)
- .await
- .unwrap();
- assert_eq!(subscription_count, 1);
- }
-
- // A user with a canceled subscription with a reason of `payment_failed` has an overdue billing subscription.
- {
- let user_id =
- new_test_user(db, "canceled-subscription-payment-failed-user@example.com").await;
- let customer = db
- .create_billing_customer(&CreateBillingCustomerParams {
- user_id,
- stripe_customer_id: "cus_canceled_subscription_payment_failed_user".into(),
- })
- .await
- .unwrap();
- assert_eq!(
- customer.stripe_customer_id,
- "cus_canceled_subscription_payment_failed_user".to_string()
- );
-
- db.create_billing_subscription(&CreateBillingSubscriptionParams {
- billing_customer_id: customer.id,
- stripe_subscription_id: "sub_canceled_subscription_payment_failed_user".into(),
- stripe_subscription_status: StripeSubscriptionStatus::Canceled,
- stripe_cancellation_reason: Some(StripeCancellationReason::PaymentFailed),
- })
- .await
- .unwrap();
-
- let subscription_count = db
- .count_overdue_billing_subscriptions(user_id)
- .await
- .unwrap();
- assert_eq!(subscription_count, 1);
- }
-
- // A user with a canceled subscription with a reason of `cancellation_requested` has no overdue billing subscriptions.
- {
- let user_id = new_test_user(db, "canceled-subscription-user@example.com").await;
- let customer = db
- .create_billing_customer(&CreateBillingCustomerParams {
- user_id,
- stripe_customer_id: "cus_canceled_subscription_user".into(),
- })
- .await
- .unwrap();
- assert_eq!(
- customer.stripe_customer_id,
- "cus_canceled_subscription_user".to_string()
- );
-
- db.create_billing_subscription(&CreateBillingSubscriptionParams {
- billing_customer_id: customer.id,
- stripe_subscription_id: "sub_canceled_subscription_user".into(),
- stripe_subscription_status: StripeSubscriptionStatus::Canceled,
- stripe_cancellation_reason: Some(StripeCancellationReason::CancellationRequested),
- })
- .await
- .unwrap();
-
- let subscription_count = db
- .count_overdue_billing_subscriptions(user_id)
- .await
- .unwrap();
- assert_eq!(subscription_count, 0);
- }
-}
@@ -1,10 +1,14 @@
+use std::collections::BTreeSet;
+use std::sync::Arc;
+
+use rpc::ExtensionProvides;
+
use super::Database;
use crate::db::ExtensionVersionConstraints;
use crate::{
db::{queries::extensions::convert_time_to_chrono, ExtensionMetadata, NewExtensionVersion},
test_both_dbs,
};
-use std::sync::Arc;
test_both_dbs!(
test_extensions,
@@ -16,7 +20,7 @@ async fn test_extensions(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
- let extensions = db.get_extensions(None, 1, 5).await.unwrap();
+ let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
assert!(extensions.is_empty());
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
@@ -37,6 +41,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: None,
+ provides: BTreeSet::default(),
published_at: t0,
},
NewExtensionVersion {
@@ -47,6 +52,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: None,
+ provides: BTreeSet::default(),
published_at: t0,
},
],
@@ -61,6 +67,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
+ provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -83,7 +90,7 @@ async fn test_extensions(db: &Arc<Database>) {
);
// The latest version of each extension is returned.
- let extensions = db.get_extensions(None, 1, 5).await.unwrap();
+ let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
assert_eq!(
extensions,
&[
@@ -97,6 +104,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: None,
+ provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 0,
@@ -111,6 +119,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
+ provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 0
@@ -119,7 +128,7 @@ async fn test_extensions(db: &Arc<Database>) {
);
// Extensions with too new of a schema version are excluded.
- let extensions = db.get_extensions(None, 0, 5).await.unwrap();
+ let extensions = db.get_extensions(None, None, 0, 5).await.unwrap();
assert_eq!(
extensions,
&[ExtensionMetadata {
@@ -132,6 +141,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
+ provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 0
@@ -158,7 +168,7 @@ async fn test_extensions(db: &Arc<Database>) {
.unwrap());
// Extensions are returned in descending order of total downloads.
- let extensions = db.get_extensions(None, 1, 5).await.unwrap();
+ let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
assert_eq!(
extensions,
&[
@@ -172,6 +182,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
+ provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 7
@@ -186,6 +197,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: None,
+ provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 5,
@@ -207,6 +219,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: None,
+ provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -220,6 +233,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
+ provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -244,7 +258,7 @@ async fn test_extensions(db: &Arc<Database>) {
.collect()
);
- let extensions = db.get_extensions(None, 1, 5).await.unwrap();
+ let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
assert_eq!(
extensions,
&[
@@ -258,6 +272,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: Some(0),
wasm_api_version: None,
+ provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 7
@@ -272,6 +287,7 @@ async fn test_extensions(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: None,
+ provides: BTreeSet::default(),
},
published_at: t0_chrono,
download_count: 5,
@@ -290,7 +306,7 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
- let extensions = db.get_extensions(None, 1, 5).await.unwrap();
+ let extensions = db.get_extensions(None, None, 1, 5).await.unwrap();
assert!(extensions.is_empty());
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
@@ -311,6 +327,10 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
+ provides: BTreeSet::from_iter([
+ ExtensionProvides::Grammars,
+ ExtensionProvides::Languages,
+ ]),
published_at: t0,
},
NewExtensionVersion {
@@ -321,6 +341,11 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
+ provides: BTreeSet::from_iter([
+ ExtensionProvides::Grammars,
+ ExtensionProvides::Languages,
+ ExtensionProvides::LanguageServers,
+ ]),
published_at: t0,
},
NewExtensionVersion {
@@ -331,6 +356,11 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.5".into()),
+ provides: BTreeSet::from_iter([
+ ExtensionProvides::Grammars,
+ ExtensionProvides::Languages,
+ ExtensionProvides::LanguageServers,
+ ]),
published_at: t0,
},
],
@@ -345,6 +375,7 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
+ provides: BTreeSet::default(),
published_at: t0,
}],
),
@@ -378,6 +409,11 @@ async fn test_extensions_by_id(db: &Arc<Database>) {
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: Some("0.0.4".into()),
+ provides: BTreeSet::from_iter([
+ ExtensionProvides::Grammars,
+ ExtensionProvides::Languages,
+ ExtensionProvides::LanguageServers,
+ ]),
},
published_at: t0_chrono,
download_count: 0,
@@ -309,7 +309,8 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
- .add_request_handler(forward_read_only_project_request::<proto::GetStagedText>)
+ .add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
+ .add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedDiff>)
.add_request_handler(
forward_mutating_project_request::<proto::RegisterBufferWithLanguageServers>,
)
@@ -348,7 +349,7 @@ impl Server {
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
- .add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBase>)
+ .add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBases>)
.add_request_handler(get_users)
.add_request_handler(fuzzy_search_users)
.add_request_handler(request_contact)
@@ -1991,10 +1991,9 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
.collect(),
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
};
- client_a.fs().set_blame_for_repo(
- Path::new("/my-repo/.git"),
- vec![(Path::new("file.txt"), blame)],
- );
+ client_a
+ .fs()
+ .set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]);
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
let project_id = active_call_a
@@ -2558,13 +2558,27 @@ async fn test_git_diff_base_change(
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
- let diff_base = "
+ let staged_text = "
one
three
"
.unindent();
- let new_diff_base = "
+ let committed_text = "
+ one
+ TWO
+ three
+ "
+ .unindent();
+
+ let new_committed_text = "
+ one
+ TWO_HUNDRED
+ three
+ "
+ .unindent();
+
+ let new_staged_text = "
one
two
"
@@ -2572,7 +2586,11 @@ async fn test_git_diff_base_change(
client_a.fs().set_index_for_repo(
Path::new("/dir/.git"),
- &[(Path::new("a.txt"), diff_base.clone())],
+ &[("a.txt".into(), staged_text.clone())],
+ );
+ client_a.fs().set_head_for_repo(
+ Path::new("/dir/.git"),
+ &[("a.txt".into(), committed_text.clone())],
);
// Create the buffer
@@ -2580,25 +2598,25 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
- let change_set_local_a = project_local
+ let local_unstaged_diff_a = project_local
.update(cx_a, |p, cx| {
- p.open_unstaged_changes(buffer_local_a.clone(), cx)
+ p.open_unstaged_diff(buffer_local_a.clone(), cx)
})
.await
.unwrap();
// Wait for it to catch up to the new diff
executor.run_until_parked();
- change_set_local_a.read_with(cx_a, |change_set, cx| {
+ local_unstaged_diff_a.read_with(cx_a, |diff, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &diff_base,
+ &diff.base_text_string().unwrap(),
&[(1..2, "", "two\n")],
);
});
@@ -2608,73 +2626,113 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
- let change_set_remote_a = project_remote
+ let remote_unstaged_diff_a = project_remote
.update(cx_b, |p, cx| {
- p.open_unstaged_changes(buffer_remote_a.clone(), cx)
+ p.open_unstaged_diff(buffer_remote_a.clone(), cx)
})
.await
.unwrap();
// Wait remote buffer to catch up to the new diff
executor.run_until_parked();
- change_set_remote_a.read_with(cx_b, |change_set, cx| {
+ remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &diff_base,
+ &diff.base_text_string().unwrap(),
&[(1..2, "", "two\n")],
);
});
- // Update the staged text of the open buffer
+ // Open uncommitted changes on the guest, without opening them on the host first
+ let remote_uncommitted_diff_a = project_remote
+ .update(cx_b, |p, cx| {
+ p.open_uncommitted_diff(buffer_remote_a.clone(), cx)
+ })
+ .await
+ .unwrap();
+ executor.run_until_parked();
+ remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
+ let buffer = buffer_remote_a.read(cx);
+ assert_eq!(
+ diff.base_text_string().as_deref(),
+ Some(committed_text.as_str())
+ );
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
+ buffer,
+ &diff.base_text_string().unwrap(),
+ &[(1..2, "TWO\n", "two\n")],
+ );
+ });
+
+ // Update the index text of the open buffer
client_a.fs().set_index_for_repo(
Path::new("/dir/.git"),
- &[(Path::new("a.txt"), new_diff_base.clone())],
+ &[("a.txt".into(), new_staged_text.clone())],
+ );
+ client_a.fs().set_head_for_repo(
+ Path::new("/dir/.git"),
+ &[("a.txt".into(), new_committed_text.clone())],
);
// Wait for buffer_local_a to receive it
executor.run_until_parked();
- change_set_local_a.read_with(cx_a, |change_set, cx| {
+ local_unstaged_diff_a.read_with(cx_a, |diff, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(new_diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(new_staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &new_diff_base,
+ &diff.base_text_string().unwrap(),
&[(2..3, "", "three\n")],
);
});
- change_set_remote_a.read_with(cx_b, |change_set, cx| {
+ remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(new_diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(new_staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &new_diff_base,
+ &diff.base_text_string().unwrap(),
&[(2..3, "", "three\n")],
);
});
+ remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
+ let buffer = buffer_remote_a.read(cx);
+ assert_eq!(
+ diff.base_text_string().as_deref(),
+ Some(new_committed_text.as_str())
+ );
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
+ buffer,
+ &diff.base_text_string().unwrap(),
+ &[(1..2, "TWO_HUNDRED\n", "two\n")],
+ );
+ });
+
// Nested git dir
- let diff_base = "
+ let staged_text = "
one
three
"
.unindent();
- let new_diff_base = "
+ let new_staged_text = "
one
two
"
@@ -2682,7 +2740,7 @@ async fn test_git_diff_base_change(
client_a.fs().set_index_for_repo(
Path::new("/dir/sub/.git"),
- &[(Path::new("b.txt"), diff_base.clone())],
+ &[("b.txt".into(), staged_text.clone())],
);
// Create the buffer
@@ -2690,25 +2748,25 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
- let change_set_local_b = project_local
+ let local_unstaged_diff_b = project_local
.update(cx_a, |p, cx| {
- p.open_unstaged_changes(buffer_local_b.clone(), cx)
+ p.open_unstaged_diff(buffer_local_b.clone(), cx)
})
.await
.unwrap();
// Wait for it to catch up to the new diff
executor.run_until_parked();
- change_set_local_b.read_with(cx_a, |change_set, cx| {
+ local_unstaged_diff_b.read_with(cx_a, |diff, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &diff_base,
+ &diff.base_text_string().unwrap(),
&[(1..2, "", "two\n")],
);
});
@@ -2718,60 +2776,60 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
- let change_set_remote_b = project_remote
+ let remote_unstaged_diff_b = project_remote
.update(cx_b, |p, cx| {
- p.open_unstaged_changes(buffer_remote_b.clone(), cx)
+ p.open_unstaged_diff(buffer_remote_b.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
- change_set_remote_b.read_with(cx_b, |change_set, cx| {
+ remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &diff_base,
+ &staged_text,
&[(1..2, "", "two\n")],
);
});
- // Update the staged text
+ // Updatet the staged text
client_a.fs().set_index_for_repo(
Path::new("/dir/sub/.git"),
- &[(Path::new("b.txt"), new_diff_base.clone())],
+ &[("b.txt".into(), new_staged_text.clone())],
);
// Wait for buffer_local_b to receive it
executor.run_until_parked();
- change_set_local_b.read_with(cx_a, |change_set, cx| {
+ local_unstaged_diff_b.read_with(cx_a, |diff, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(new_diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(new_staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &new_diff_base,
+ &new_staged_text,
&[(2..3, "", "three\n")],
);
});
- change_set_remote_b.read_with(cx_b, |change_set, cx| {
+ remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
- change_set.base_text_string().as_deref(),
- Some(new_diff_base.as_str())
+ diff.base_text_string().as_deref(),
+ Some(new_staged_text.as_str())
);
- git::diff::assert_hunks(
- change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
+ diff::assert_hunks(
+ diff.snapshot.hunks_in_row_range(0..4, buffer),
buffer,
- &new_diff_base,
+ &new_staged_text,
&[(2..3, "", "three\n")],
);
});
@@ -953,8 +953,8 @@ impl RandomizedTest for ProjectCollaborationTest {
let dot_git_dir = repo_path.join(".git");
let contents = contents
- .iter()
- .map(|(path, contents)| (path.as_path(), contents.clone()))
+ .into_iter()
+ .map(|(path, contents)| (path.into(), contents))
.collect::<Vec<_>>();
if client.fs().metadata(&dot_git_dir).await?.is_none() {
client.fs().create_dir(&dot_git_dir).await?;
@@ -1339,7 +1339,7 @@ impl RandomizedTest for ProjectCollaborationTest {
project
.buffer_store()
.read(cx)
- .get_unstaged_changes(host_buffer.read(cx).remote_id())
+ .get_unstaged_diff(host_buffer.read(cx).remote_id(), cx)
.unwrap()
.read(cx)
.base_text_string()
@@ -1348,7 +1348,7 @@ impl RandomizedTest for ProjectCollaborationTest {
project
.buffer_store()
.read(cx)
- .get_unstaged_changes(guest_buffer.read(cx).remote_id())
+ .get_unstaged_diff(guest_buffer.read(cx).remote_id(), cx)
.unwrap()
.read(cx)
.base_text_string()
@@ -59,20 +59,20 @@ workspace.workspace = true
async-std = { version = "1.12.0", features = ["unstable"] }
[dev-dependencies]
-indoc.workspace = true
-serde_json.workspace = true
-clock = { workspace = true, features = ["test-support"] }
client = { workspace = true, features = ["test-support"] }
+clock = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
+indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
lsp = { workspace = true, features = ["test-support"] }
node_runtime = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
rpc = { workspace = true, features = ["test-support"] }
+serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
theme = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }
@@ -17,7 +17,7 @@ use gpui::{
use http_client::github::get_release_by_tag_name;
use http_client::HttpClient;
use language::{
- language_settings::{all_language_settings, language_settings, InlineCompletionProvider},
+ language_settings::{all_language_settings, language_settings, EditPredictionProvider},
point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16,
ToPointUtf16,
};
@@ -368,8 +368,8 @@ impl Copilot {
let server_id = self.server_id;
let http = self.http.clone();
let node_runtime = self.node_runtime.clone();
- if all_language_settings(None, cx).inline_completions.provider
- == InlineCompletionProvider::Copilot
+ if all_language_settings(None, cx).edit_predictions.provider
+ == EditPredictionProvider::Copilot
{
if matches!(self.server, CopilotServer::Disabled) {
let start_task = cx
@@ -1061,6 +1061,7 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
mod tests {
use super::*;
use gpui::TestAppContext;
+ use util::path;
#[gpui::test(iterations = 10)]
async fn test_buffer_management(cx: &mut TestAppContext) {
@@ -1123,7 +1124,7 @@ mod tests {
buffer_1.update(cx, |buffer, cx| {
buffer.file_updated(
Arc::new(File {
- abs_path: "/root/child/buffer-1".into(),
+ abs_path: path!("/root/child/buffer-1").into(),
path: Path::new("child/buffer-1").into(),
}),
cx,
@@ -1136,7 +1137,7 @@ mod tests {
text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
}
);
- let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap();
+ let buffer_1_uri = lsp::Url::from_file_path(path!("/root/child/buffer-1")).unwrap();
assert_eq!(
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
.await,
@@ -89,7 +89,7 @@ impl Model {
Self::Gpt4o => 64000,
Self::Gpt4 => 32768,
Self::Gpt3_5Turbo => 12288,
- Self::O3Mini => 200_000,
+ Self::O3Mini => 20000,
Self::O1 => 20000,
Self::Claude3_5Sonnet => 200_000,
}
@@ -1,11 +1,9 @@
use crate::{Completion, Copilot};
use anyhow::Result;
use gpui::{App, Context, Entity, EntityId, Task};
-use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider};
-use language::{
- language_settings::{all_language_settings, AllLanguageSettings},
- Buffer, OffsetRangeExt, ToOffset,
-};
+use inline_completion::{Direction, EditPredictionProvider, InlineCompletion};
+use language::{language_settings::AllLanguageSettings, Buffer, OffsetRangeExt, ToOffset};
+use project::Project;
use settings::Settings;
use std::{path::Path, time::Duration};
@@ -50,7 +48,7 @@ impl CopilotCompletionProvider {
}
}
-impl InlineCompletionProvider for CopilotCompletionProvider {
+impl EditPredictionProvider for CopilotCompletionProvider {
fn name() -> &'static str {
"copilot"
}
@@ -63,33 +61,22 @@ impl InlineCompletionProvider for CopilotCompletionProvider {
false
}
- fn show_completions_in_normal_mode() -> bool {
- false
- }
-
fn is_refreshing(&self) -> bool {
self.pending_refresh.is_some()
}
fn is_enabled(
&self,
- buffer: &Entity<Buffer>,
- cursor_position: language::Anchor,
+ _buffer: &Entity<Buffer>,
+ _cursor_position: language::Anchor,
cx: &App,
) -> bool {
- if !self.copilot.read(cx).status().is_authorized() {
- return false;
- }
-
- let buffer = buffer.read(cx);
- let file = buffer.file();
- let language = buffer.language_at(cursor_position);
- let settings = all_language_settings(file, cx);
- settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx)
+ self.copilot.read(cx).status().is_authorized()
}
fn refresh(
&mut self,
+ _project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
@@ -205,7 +192,7 @@ impl InlineCompletionProvider for CopilotCompletionProvider {
fn discard(&mut self, cx: &mut Context<Self>) {
let settings = AllLanguageSettings::get_global(cx);
- let copilot_enabled = settings.inline_completions_enabled(None, None, cx);
+ let copilot_enabled = settings.show_inline_completions(None, cx);
if !copilot_enabled {
return;
@@ -290,7 +277,10 @@ mod tests {
use serde_json::json;
use settings::SettingsStore;
use std::future::Future;
- use util::test::{marked_text_ranges_by, TextRangeMarker};
+ use util::{
+ path,
+ test::{marked_text_ranges_by, TextRangeMarker},
+ };
#[gpui::test(iterations = 10)]
async fn test_copilot(executor: BackgroundExecutor, cx: &mut TestAppContext) {
@@ -311,7 +301,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
- editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
+ editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
});
cx.set_state(indoc! {"
@@ -446,8 +436,8 @@ mod tests {
assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n");
assert_eq!(editor.text(cx), "one.co\ntwo\nthree\n");
- // AcceptInlineCompletion when there is an active suggestion inserts it.
- editor.accept_inline_completion(&Default::default(), window, cx);
+ // AcceptEditPrediction when there is an active suggestion inserts it.
+ editor.accept_edit_prediction(&Default::default(), window, cx);
assert!(!editor.has_active_inline_completion());
assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n");
assert_eq!(editor.text(cx), "one.copilot2\ntwo\nthree\n");
@@ -492,7 +482,7 @@ mod tests {
);
cx.update_editor(|editor, window, cx| {
- editor.next_inline_completion(&Default::default(), window, cx)
+ editor.next_edit_prediction(&Default::default(), window, cx)
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, window, cx| {
@@ -506,8 +496,8 @@ mod tests {
assert_eq!(editor.text(cx), "fn foo() {\n \n}");
assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}");
- // Using AcceptInlineCompletion again accepts the suggestion.
- editor.accept_inline_completion(&Default::default(), window, cx);
+ // Using AcceptEditPrediction again accepts the suggestion.
+ editor.accept_edit_prediction(&Default::default(), window, cx);
assert!(!editor.has_active_inline_completion());
assert_eq!(editor.text(cx), "fn foo() {\n let x = 4;\n}");
assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}");
@@ -536,7 +526,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
- editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
+ editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
});
// Setup the editor with a completion request.
@@ -660,7 +650,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
- editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
+ editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
});
cx.set_state(indoc! {"
@@ -679,7 +669,7 @@ mod tests {
vec![],
);
cx.update_editor(|editor, window, cx| {
- editor.next_inline_completion(&Default::default(), window, cx)
+ editor.next_edit_prediction(&Default::default(), window, cx)
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, window, cx| {
@@ -750,7 +740,7 @@ mod tests {
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
editor
.update(cx, |editor, window, cx| {
- editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
+ editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
})
.unwrap();
@@ -768,7 +758,7 @@ mod tests {
editor.change_selections(None, window, cx, |s| {
s.select_ranges([Point::new(1, 5)..Point::new(1, 5)])
});
- editor.next_inline_completion(&Default::default(), window, cx);
+ editor.next_edit_prediction(&Default::default(), window, cx);
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
_ = editor.update(cx, |editor, _, cx| {
@@ -844,7 +834,7 @@ mod tests {
.await;
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
cx.update_editor(|editor, window, cx| {
- editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
+ editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
});
cx.set_state(indoc! {"
@@ -872,7 +862,7 @@ mod tests {
vec![],
);
cx.update_editor(|editor, window, cx| {
- editor.next_inline_completion(&Default::default(), window, cx)
+ editor.next_edit_prediction(&Default::default(), window, cx)
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
cx.update_editor(|editor, _, cx| {
@@ -940,7 +930,7 @@ mod tests {
async fn test_copilot_disabled_globs(executor: BackgroundExecutor, cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings
- .inline_completions
+ .edit_predictions
.get_or_insert(Default::default())
.disabled_globs = Some(vec![".env*".to_string()]);
});
@@ -949,24 +939,24 @@ mod tests {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/test",
+ path!("/test"),
json!({
".env": "SECRET=something\n",
"README.md": "hello\nworld\nhow\nare\nyou\ntoday"
}),
)
.await;
- let project = Project::test(fs, ["/test".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/test").as_ref()], cx).await;
let private_buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/test/.env", cx)
+ project.open_local_buffer(path!("/test/.env"), cx)
})
.await
.unwrap();
let public_buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/test/README.md", cx)
+ project.open_local_buffer(path!("/test/README.md"), cx)
})
.await
.unwrap();
@@ -1002,7 +992,7 @@ mod tests {
let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
editor
.update(cx, |editor, window, cx| {
- editor.set_inline_completion_provider(Some(copilot_provider), window, cx)
+ editor.set_edit_prediction_provider(Some(copilot_provider), window, cx)
})
.unwrap();
@@ -933,7 +933,7 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
.when_some(diagnostic.code.as_ref(), |stack, code| {
stack.child(
div()
- .child(SharedString::from(format!("({code})")))
+ .child(SharedString::from(format!("({code:?})")))
.text_color(color.text_muted),
)
}),
@@ -18,7 +18,7 @@ use std::{
path::{Path, PathBuf},
};
use unindent::Unindent as _;
-use util::{post_inc, RandomCharIter};
+use util::{path, post_inc, RandomCharIter};
#[ctor::ctor]
fn init_logger() {
@@ -33,7 +33,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/test",
+ path!("/test"),
json!({
"consts.rs": "
const a: i32 = 'a';
@@ -59,7 +59,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
.await;
let language_server_id = LanguageServerId(0);
- let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
@@ -70,7 +70,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
language_server_id,
- PathBuf::from("/test/main.rs"),
+ PathBuf::from(path!("/test/main.rs")),
None,
vec![
DiagnosticEntry {
@@ -234,7 +234,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
language_server_id,
- PathBuf::from("/test/consts.rs"),
+ PathBuf::from(path!("/test/consts.rs")),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)),
@@ -341,7 +341,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
language_server_id,
- PathBuf::from("/test/consts.rs"),
+ PathBuf::from(path!("/test/consts.rs")),
None,
vec![
DiagnosticEntry {
@@ -464,7 +464,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/test",
+ path!("/test"),
json!({
"main.js": "
a();
@@ -479,7 +479,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
let server_id_1 = LanguageServerId(100);
let server_id_2 = LanguageServerId(101);
- let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
@@ -504,7 +504,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_1,
- PathBuf::from("/test/main.js"),
+ PathBuf::from(path!("/test/main.js")),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)),
@@ -557,7 +557,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_2,
- PathBuf::from("/test/main.js"),
+ PathBuf::from(path!("/test/main.js")),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)),
@@ -619,7 +619,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_1,
- PathBuf::from("/test/main.js"),
+ PathBuf::from(path!("/test/main.js")),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)),
@@ -638,7 +638,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_2,
- PathBuf::from("/test/main.rs"),
+ PathBuf::from(path!("/test/main.rs")),
None,
vec![],
cx,
@@ -689,7 +689,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
lsp_store
.update_diagnostic_entries(
server_id_2,
- PathBuf::from("/test/main.js"),
+ PathBuf::from(path!("/test/main.js")),
None,
vec![DiagnosticEntry {
range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)),
@@ -755,9 +755,9 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
.unwrap_or(10);
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/test", json!({})).await;
+ fs.insert_tree(path!("/test"), json!({})).await;
- let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
@@ -817,7 +817,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
// insert a set of diagnostics for a new path
_ => {
let path: PathBuf =
- format!("/test/{}.rs", post_inc(&mut next_filename)).into();
+ format!(path!("/test/{}.rs"), post_inc(&mut next_filename)).into();
let len = rng.gen_range(128..256);
let content =
RandomCharIter::new(&mut rng).take(len).collect::<String>();
@@ -891,7 +891,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
for diagnostic in diagnostics {
let found_excerpt = reference_excerpts.iter().any(|info| {
let row_range = info.range.context.start.row..info.range.context.end.row;
- info.path == path.strip_prefix("/test").unwrap()
+ info.path == path.strip_prefix(path!("/test")).unwrap()
&& info.language_server == language_server_id
&& row_range.contains(&diagnostic.range.start.0.row)
});
@@ -157,7 +157,7 @@ impl DiagnosticIndicator {
(buffer, cursor_position)
});
let new_diagnostic = buffer
- .diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
+ .diagnostics_in_range::<usize>(cursor_position..cursor_position)
.filter(|entry| !entry.range.is_empty())
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
.map(|entry| entry.diagnostic);
@@ -0,0 +1,32 @@
+[package]
+name = "diff"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/diff.rs"
+
+[features]
+test-support = []
+
+[dependencies]
+futures.workspace = true
+git2.workspace = true
+gpui.workspace = true
+language.workspace = true
+log.workspace = true
+rope.workspace = true
+sum_tree.workspace = true
+text.workspace = true
+util.workspace = true
+
+[dev-dependencies]
+pretty_assertions.workspace = true
+serde_json.workspace = true
+text = { workspace = true, features = ["test-support"] }
+unindent.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -1,10 +1,12 @@
+use futures::{channel::oneshot, future::OptionFuture};
+use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
+use gpui::{App, Context, Entity, EventEmitter};
+use language::{Language, LanguageRegistry};
use rope::Rope;
-use std::{cmp, iter, ops::Range};
+use std::{cmp, future::Future, iter, ops::Range, sync::Arc};
use sum_tree::SumTree;
-use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point};
-
-pub use git2 as libgit;
-use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
+use text::{Anchor, BufferId, OffsetRangeExt, Point};
+use util::ResultExt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum DiffHunkStatus {
@@ -62,57 +64,146 @@ impl sum_tree::Summary for DiffHunkSummary {
}
}
-#[derive(Debug, Clone)]
-pub struct BufferDiff {
- tree: SumTree<InternalDiffHunk>,
+#[derive(Clone)]
+pub struct BufferDiffSnapshot {
+ hunks: SumTree<InternalDiffHunk>,
+ pub base_text: Option<language::BufferSnapshot>,
}
-impl BufferDiff {
- pub fn new(buffer: &BufferSnapshot) -> BufferDiff {
- BufferDiff {
- tree: SumTree::new(buffer),
- }
+impl std::fmt::Debug for BufferDiffSnapshot {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("BufferDiffSnapshot")
+ .field("hunks", &self.hunks)
+ .finish()
}
+}
- pub fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self {
- let mut tree = SumTree::new(buffer);
-
- let buffer_text = buffer.as_rope().to_string();
- let patch = Self::diff(diff_base, &buffer_text);
+impl BufferDiffSnapshot {
+ pub fn new(buffer: &text::BufferSnapshot) -> BufferDiffSnapshot {
+ BufferDiffSnapshot {
+ hunks: SumTree::new(buffer),
+ base_text: None,
+ }
+ }
- // A common case in Zed is that the empty buffer is represented as just a newline,
- // but if we just compute a naive diff you get a "preserved" line in the middle,
- // which is a bit odd.
- if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 {
- tree.push(
+ pub fn new_with_single_insertion(cx: &mut App) -> Self {
+ let base_text = language::Buffer::build_empty_snapshot(cx);
+ Self {
+ hunks: SumTree::from_item(
InternalDiffHunk {
- buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0),
- diff_base_byte_range: 0..diff_base.len() - 1,
+ buffer_range: Anchor::MIN..Anchor::MAX,
+ diff_base_byte_range: 0..0,
},
- buffer,
- );
- return Self { tree };
+ &base_text,
+ ),
+ base_text: Some(base_text),
+ }
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn build_sync(
+ buffer: text::BufferSnapshot,
+ diff_base: String,
+ cx: &mut gpui::TestAppContext,
+ ) -> Self {
+ let snapshot =
+ cx.update(|cx| Self::build(buffer, Some(Arc::new(diff_base)), None, None, cx));
+ cx.executor().block(snapshot)
+ }
+
+ pub fn build(
+ buffer: text::BufferSnapshot,
+ diff_base: Option<Arc<String>>,
+ language: Option<Arc<Language>>,
+ language_registry: Option<Arc<LanguageRegistry>>,
+ cx: &mut App,
+ ) -> impl Future<Output = Self> {
+ let base_text_snapshot = diff_base.as_ref().map(|base_text| {
+ language::Buffer::build_snapshot(
+ Rope::from(base_text.as_str()),
+ language.clone(),
+ language_registry.clone(),
+ cx,
+ )
+ });
+ let base_text_snapshot = cx
+ .background_executor()
+ .spawn(OptionFuture::from(base_text_snapshot));
+
+ let hunks = cx.background_executor().spawn({
+ let buffer = buffer.clone();
+ async move { Self::recalculate_hunks(diff_base, buffer) }
+ });
+
+ async move {
+ let (base_text, hunks) = futures::join!(base_text_snapshot, hunks);
+ Self { base_text, hunks }
}
+ }
- if let Some(patch) = patch {
- let mut divergence = 0;
- for hunk_index in 0..patch.num_hunks() {
- let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
- tree.push(hunk, buffer);
+ pub fn build_with_base_buffer(
+ buffer: text::BufferSnapshot,
+ diff_base: Option<Arc<String>>,
+ diff_base_buffer: Option<language::BufferSnapshot>,
+ cx: &App,
+ ) -> impl Future<Output = Self> {
+ cx.background_executor().spawn({
+ let buffer = buffer.clone();
+ async move {
+ let hunks = Self::recalculate_hunks(diff_base, buffer);
+ Self {
+ hunks,
+ base_text: diff_base_buffer,
+ }
+ }
+ })
+ }
+
+ fn recalculate_hunks(
+ diff_base: Option<Arc<String>>,
+ buffer: text::BufferSnapshot,
+ ) -> SumTree<InternalDiffHunk> {
+ let mut tree = SumTree::new(&buffer);
+
+ if let Some(diff_base) = diff_base {
+ let buffer_text = buffer.as_rope().to_string();
+ let patch = Self::diff(&diff_base, &buffer_text);
+
+ // A common case in Zed is that the empty buffer is represented as just a newline,
+ // but if we just compute a naive diff you get a "preserved" line in the middle,
+ // which is a bit odd.
+ if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 {
+ tree.push(
+ InternalDiffHunk {
+ buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0),
+ diff_base_byte_range: 0..diff_base.len() - 1,
+ },
+ &buffer,
+ );
+ return tree;
+ }
+
+ if let Some(patch) = patch {
+ let mut divergence = 0;
+ for hunk_index in 0..patch.num_hunks() {
+ let hunk =
+ Self::process_patch_hunk(&patch, hunk_index, &buffer, &mut divergence);
+ tree.push(hunk, &buffer);
+ }
}
}
- Self { tree }
+ tree
}
pub fn is_empty(&self) -> bool {
- self.tree.is_empty()
+ self.hunks.is_empty()
}
pub fn hunks_in_row_range<'a>(
&'a self,
range: Range<u32>,
- buffer: &'a BufferSnapshot,
+ buffer: &'a text::BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk> {
let start = buffer.anchor_before(Point::new(range.start, 0));
let end = buffer.anchor_after(Point::new(range.end, 0));
@@ -123,13 +214,16 @@ impl BufferDiff {
pub fn hunks_intersecting_range<'a>(
&'a self,
range: Range<Anchor>,
- buffer: &'a BufferSnapshot,
+ buffer: &'a text::BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk> {
+ let range = range.to_offset(buffer);
+
let mut cursor = self
- .tree
+ .hunks
.filter::<_, DiffHunkSummary>(buffer, move |summary| {
- let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
- let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
+ let summary_range = summary.buffer_range.to_offset(buffer);
+ let before_start = summary_range.end < range.start;
+ let after_end = summary_range.start > range.end;
!before_start && !after_end
});
@@ -151,31 +245,35 @@ impl BufferDiff {
});
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
- iter::from_fn(move || {
+ iter::from_fn(move || loop {
let (start_point, (start_anchor, start_base)) = summaries.next()?;
let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?;
+ if !start_anchor.is_valid(buffer) {
+ continue;
+ }
+
if end_point.column > 0 {
end_point.row += 1;
end_point.column = 0;
end_anchor = buffer.anchor_before(end_point);
}
- Some(DiffHunk {
+ return Some(DiffHunk {
row_range: start_point.row..end_point.row,
diff_base_byte_range: start_base..end_base,
buffer_range: start_anchor..end_anchor,
- })
+ });
})
}
pub fn hunks_intersecting_range_rev<'a>(
&'a self,
range: Range<Anchor>,
- buffer: &'a BufferSnapshot,
+ buffer: &'a text::BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk> {
let mut cursor = self
- .tree
+ .hunks
.filter::<_, DiffHunkSummary>(buffer, move |summary| {
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
@@ -201,9 +299,13 @@ impl BufferDiff {
})
}
- pub fn compare(&self, old: &Self, new_snapshot: &BufferSnapshot) -> Option<Range<Anchor>> {
- let mut new_cursor = self.tree.cursor::<()>(new_snapshot);
- let mut old_cursor = old.tree.cursor::<()>(new_snapshot);
+ pub fn compare(
+ &self,
+ old: &Self,
+ new_snapshot: &text::BufferSnapshot,
+ ) -> Option<Range<Anchor>> {
+ let mut new_cursor = self.hunks.cursor::<()>(new_snapshot);
+ let mut old_cursor = old.hunks.cursor::<()>(new_snapshot);
old_cursor.next(new_snapshot);
new_cursor.next(new_snapshot);
let mut start = None;
@@ -266,15 +368,11 @@ impl BufferDiff {
#[cfg(test)]
fn clear(&mut self, buffer: &text::BufferSnapshot) {
- self.tree = SumTree::new(buffer);
- }
-
- pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) {
- *self = Self::build(&diff_base.to_string(), buffer);
+ self.hunks = SumTree::new(buffer);
}
#[cfg(test)]
- fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk> {
+ fn hunks<'a>(&'a self, text: &'a text::BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk> {
let start = text.anchor_before(Point::new(0, 0));
let end = text.anchor_after(Point::new(u32::MAX, u32::MAX));
self.hunks_intersecting_range(start..end, text)
@@ -369,12 +467,171 @@ impl BufferDiff {
}
}
+pub struct BufferDiff {
+ pub buffer_id: BufferId,
+ pub snapshot: BufferDiffSnapshot,
+ pub unstaged_diff: Option<Entity<BufferDiff>>,
+}
+
+impl std::fmt::Debug for BufferDiff {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("BufferChangeSet")
+ .field("buffer_id", &self.buffer_id)
+ .field("snapshot", &self.snapshot)
+ .finish()
+ }
+}
+
+pub enum BufferDiffEvent {
+ DiffChanged { changed_range: Range<text::Anchor> },
+ LanguageChanged,
+}
+
+impl EventEmitter<BufferDiffEvent> for BufferDiff {}
+
+impl BufferDiff {
+ pub fn set_state(
+ &mut self,
+ snapshot: BufferDiffSnapshot,
+ buffer: &text::BufferSnapshot,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(base_text) = snapshot.base_text.as_ref() {
+ let changed_range = if Some(base_text.remote_id())
+ != self
+ .snapshot
+ .base_text
+ .as_ref()
+ .map(|buffer| buffer.remote_id())
+ {
+ Some(text::Anchor::MIN..text::Anchor::MAX)
+ } else {
+ snapshot.compare(&self.snapshot, buffer)
+ };
+ if let Some(changed_range) = changed_range {
+ cx.emit(BufferDiffEvent::DiffChanged { changed_range });
+ }
+ }
+ self.snapshot = snapshot;
+ }
+
+ pub fn diff_hunks_intersecting_range<'a>(
+ &'a self,
+ range: Range<text::Anchor>,
+ buffer_snapshot: &'a text::BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiffHunk> {
+ self.snapshot
+ .hunks_intersecting_range(range, buffer_snapshot)
+ }
+
+ pub fn diff_hunks_intersecting_range_rev<'a>(
+ &'a self,
+ range: Range<text::Anchor>,
+ buffer_snapshot: &'a text::BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiffHunk> {
+ self.snapshot
+ .hunks_intersecting_range_rev(range, buffer_snapshot)
+ }
+
+ /// Used in cases where the change set isn't derived from git.
+ pub fn set_base_text(
+ &mut self,
+ base_buffer: Entity<language::Buffer>,
+ buffer: text::BufferSnapshot,
+ cx: &mut Context<Self>,
+ ) -> oneshot::Receiver<()> {
+ let (tx, rx) = oneshot::channel();
+ let this = cx.weak_entity();
+ let base_buffer = base_buffer.read(cx);
+ let language_registry = base_buffer.language_registry();
+ let base_buffer = base_buffer.snapshot();
+ let base_text = Arc::new(base_buffer.text());
+
+ let snapshot = BufferDiffSnapshot::build(
+ buffer.clone(),
+ Some(base_text),
+ base_buffer.language().cloned(),
+ language_registry,
+ cx,
+ );
+ let complete_on_drop = util::defer(|| {
+ tx.send(()).ok();
+ });
+ cx.spawn(|_, mut cx| async move {
+ let snapshot = snapshot.await;
+ let Some(this) = this.upgrade() else {
+ return;
+ };
+ this.update(&mut cx, |this, cx| {
+ this.set_state(snapshot, &buffer, cx);
+ })
+ .log_err();
+ drop(complete_on_drop)
+ })
+ .detach();
+ rx
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn base_text_string(&self) -> Option<String> {
+ self.snapshot.base_text.as_ref().map(|buffer| buffer.text())
+ }
+
+ pub fn new(buffer: &Entity<language::Buffer>, cx: &mut App) -> Self {
+ BufferDiff {
+ buffer_id: buffer.read(cx).remote_id(),
+ snapshot: BufferDiffSnapshot::new(&buffer.read(cx)),
+ unstaged_diff: None,
+ }
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn new_with_base_text(
+ base_text: &str,
+ buffer: &Entity<language::Buffer>,
+ cx: &mut App,
+ ) -> Self {
+ let mut base_text = base_text.to_owned();
+ text::LineEnding::normalize(&mut base_text);
+ let snapshot = BufferDiffSnapshot::build(
+ buffer.read(cx).text_snapshot(),
+ Some(base_text.into()),
+ None,
+ None,
+ cx,
+ );
+ let snapshot = cx.background_executor().block(snapshot);
+ BufferDiff {
+ buffer_id: buffer.read(cx).remote_id(),
+ snapshot,
+ unstaged_diff: None,
+ }
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn recalculate_diff_sync(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
+ let base_text = self
+ .snapshot
+ .base_text
+ .as_ref()
+ .map(|base_text| base_text.text());
+ let snapshot = BufferDiffSnapshot::build_with_base_buffer(
+ buffer.clone(),
+ base_text.clone().map(Arc::new),
+ self.snapshot.base_text.clone(),
+ cx,
+ );
+ let snapshot = cx.background_executor().block(snapshot);
+ self.set_state(snapshot, &buffer, cx);
+ }
+}
+
/// Range (crossing new lines), old, new
#[cfg(any(test, feature = "test-support"))]
#[track_caller]
pub fn assert_hunks<Iter>(
diff_hunks: Iter,
- buffer: &BufferSnapshot,
+ buffer: &text::BufferSnapshot,
diff_base: &str,
expected_hunks: &[(Range<u32>, &str, &str)],
) where
@@ -407,18 +664,18 @@ mod tests {
use std::assert_eq;
use super::*;
+ use gpui::TestAppContext;
use text::{Buffer, BufferId};
use unindent::Unindent as _;
- #[test]
- fn test_buffer_diff_simple() {
+ #[gpui::test]
+ async fn test_buffer_diff_simple(cx: &mut gpui::TestAppContext) {
let diff_base = "
one
two
three
"
.unindent();
- let diff_base_rope = Rope::from(diff_base.clone());
let buffer_text = "
one
@@ -428,8 +685,7 @@ mod tests {
.unindent();
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
- let mut diff = BufferDiff::new(&buffer);
- diff.update(&diff_base_rope, &buffer);
+ let mut diff = BufferDiffSnapshot::build_sync(buffer.clone(), diff_base.clone(), cx);
assert_hunks(
diff.hunks(&buffer),
&buffer,
@@ -438,7 +694,7 @@ mod tests {
);
buffer.edit([(0..0, "point five\n")]);
- diff.update(&diff_base_rope, &buffer);
+ diff = BufferDiffSnapshot::build_sync(buffer.clone(), diff_base.clone(), cx);
assert_hunks(
diff.hunks(&buffer),
&buffer,
@@ -450,9 +706,10 @@ mod tests {
assert_hunks(diff.hunks(&buffer), &buffer, &diff_base, &[]);
}
- #[test]
- fn test_buffer_diff_range() {
- let diff_base = "
+ #[gpui::test]
+ async fn test_buffer_diff_range(cx: &mut TestAppContext) {
+ let diff_base = Arc::new(
+ "
one
two
three
@@ -464,8 +721,8 @@ mod tests {
nine
ten
"
- .unindent();
- let diff_base_rope = Rope::from(diff_base.clone());
+ .unindent(),
+ );
let buffer_text = "
A
@@ -489,8 +746,17 @@ mod tests {
.unindent();
let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
- let mut diff = BufferDiff::new(&buffer);
- diff.update(&diff_base_rope, &buffer);
+ let diff = cx
+ .update(|cx| {
+ BufferDiffSnapshot::build(
+ buffer.snapshot(),
+ Some(diff_base.clone()),
+ None,
+ None,
+ cx,
+ )
+ })
+ .await;
assert_eq!(diff.hunks(&buffer).count(), 8);
assert_hunks(
@@ -505,8 +771,8 @@ mod tests {
);
}
- #[test]
- fn test_buffer_diff_compare() {
+ #[gpui::test]
+ async fn test_buffer_diff_compare(cx: &mut TestAppContext) {
let base_text = "
zero
one
@@ -535,8 +801,8 @@ mod tests {
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1);
- let empty_diff = BufferDiff::new(&buffer);
- let diff_1 = BufferDiff::build(&base_text, &buffer);
+ let empty_diff = BufferDiffSnapshot::new(&buffer);
+ let diff_1 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx);
let range = diff_1.compare(&empty_diff, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0));
@@ -554,7 +820,7 @@ mod tests {
"
.unindent(),
);
- let diff_2 = BufferDiff::build(&base_text, &buffer);
+ let diff_2 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx);
assert_eq!(None, diff_2.compare(&diff_1, &buffer));
// Edit turns a deletion hunk into a modification.
@@ -571,7 +837,7 @@ mod tests {
"
.unindent(),
);
- let diff_3 = BufferDiff::build(&base_text, &buffer);
+ let diff_3 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx);
let range = diff_3.compare(&diff_2, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0));
@@ -588,7 +854,7 @@ mod tests {
"
.unindent(),
);
- let diff_4 = BufferDiff::build(&base_text, &buffer);
+ let diff_4 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx);
let range = diff_4.compare(&diff_3, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0));
@@ -606,7 +872,7 @@ mod tests {
"
.unindent(),
);
- let diff_5 = BufferDiff::build(&base_text, &buffer);
+ let diff_5 = BufferDiffSnapshot::build_sync(buffer.snapshot(), base_text.clone(), cx);
let range = diff_5.compare(&diff_4, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0));
@@ -624,7 +890,7 @@ mod tests {
"
.unindent(),
);
- let diff_6 = BufferDiff::build(&base_text, &buffer);
+ let diff_6 = BufferDiffSnapshot::build_sync(buffer.snapshot(), base_text, cx);
let range = diff_6.compare(&diff_5, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0));
}
@@ -38,6 +38,7 @@ clock.workspace = true
collections.workspace = true
convert_case.workspace = true
db.workspace = true
+diff.workspace = true
emojis.workspace = true
file_icons.workspace = true
futures.workspace = true
@@ -3,56 +3,64 @@ use super::*;
use gpui::{action_as, action_with_deprecated_aliases};
use schemars::JsonSchema;
use util::serde::default_true;
-
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct SelectNext {
#[serde(default)]
pub replace_newest: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct SelectPrevious {
#[serde(default)]
pub replace_newest: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MoveToBeginningOfLine {
#[serde(default = "default_true")]
pub stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct SelectToBeginningOfLine {
#[serde(default)]
pub(super) stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MovePageUp {
#[serde(default)]
pub(super) center_cursor: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MovePageDown {
#[serde(default)]
pub(super) center_cursor: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MoveToEndOfLine {
#[serde(default = "default_true")]
pub stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct SelectToEndOfLine {
#[serde(default)]
pub(super) stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ToggleCodeActions {
// Display row from which the action was deployed.
#[serde(default)]
@@ -61,24 +69,28 @@ pub struct ToggleCodeActions {
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ConfirmCompletion {
#[serde(default)]
pub item_ix: Option<usize>,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ComposeCompletion {
#[serde(default)]
pub item_ix: Option<usize>,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ConfirmCodeAction {
#[serde(default)]
pub item_ix: Option<usize>,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ToggleComments {
#[serde(default)]
pub advance_downwards: bool,
@@ -87,60 +99,70 @@ pub struct ToggleComments {
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct FoldAt {
#[serde(skip)]
pub buffer_row: MultiBufferRow,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct UnfoldAt {
#[serde(skip)]
pub buffer_row: MultiBufferRow,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MoveUpByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MoveDownByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct SelectUpByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct SelectDownByLines {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ExpandExcerpts {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ExpandExcerptsUp {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ExpandExcerptsDown {
#[serde(default)]
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct ShowCompletions {
#[serde(default)]
pub(super) trigger: Option<String>,
@@ -150,23 +172,24 @@ pub struct ShowCompletions {
pub struct HandleInput(pub String);
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct DeleteToNextWordEnd {
#[serde(default)]
pub ignore_newlines: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct DeleteToPreviousWordStart {
#[serde(default)]
pub ignore_newlines: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
-pub struct FoldAtLevel {
- pub level: u32,
-}
+pub struct FoldAtLevel(pub u32);
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct SpawnNearestTask {
#[serde(default)]
pub reveal: task::RevealStrategy,
@@ -216,9 +239,9 @@ impl_actions!(
gpui::actions!(
editor,
[
- AcceptInlineCompletion,
+ AcceptEditPrediction,
AcceptPartialCopilotSuggestion,
- AcceptPartialInlineCompletion,
+ AcceptPartialEditPrediction,
AddSelectionAbove,
AddSelectionBelow,
ApplyAllDiffHunks,
@@ -310,7 +333,7 @@ gpui::actions!(
Newline,
NewlineAbove,
NewlineBelow,
- NextInlineCompletion,
+ NextEditPrediction,
NextScreen,
OpenContextMenu,
OpenExcerpts,
@@ -325,7 +348,7 @@ gpui::actions!(
PageDown,
PageUp,
Paste,
- PreviousInlineCompletion,
+ PreviousEditPrediction,
Redo,
RedoSelection,
Rename,
@@ -361,7 +384,7 @@ gpui::actions!(
SelectToStartOfParagraph,
SelectUp,
ShowCharacterPalette,
- ShowInlineCompletion,
+ ShowEditPrediction,
ShowSignatureHelp,
ShuffleLines,
SortLinesCaseInsensitive,
@@ -375,7 +398,7 @@ gpui::actions!(
ToggleGitBlameInline,
ToggleIndentGuides,
ToggleInlayHints,
- ToggleInlineCompletions,
+ ToggleEditPrediction,
ToggleLineNumbers,
SwapSelectionEnds,
SetMark,
@@ -169,7 +169,6 @@ pub struct CompletionsMenu {
resolve_completions: bool,
show_completion_documentation: bool,
last_rendered_range: Rc<RefCell<Option<Range<usize>>>>,
- pub previewing_inline_completion: bool,
}
impl CompletionsMenu {
@@ -200,7 +199,6 @@ impl CompletionsMenu {
scroll_handle: UniformListScrollHandle::new(),
resolve_completions: true,
last_rendered_range: RefCell::new(None).into(),
- previewing_inline_completion: false,
}
}
@@ -257,7 +255,6 @@ impl CompletionsMenu {
resolve_completions: false,
show_completion_documentation: false,
last_rendered_range: RefCell::new(None).into(),
- previewing_inline_completion: false,
}
}
@@ -410,12 +407,8 @@ impl CompletionsMenu {
.detach();
}
- pub fn is_empty(&self) -> bool {
- self.entries.borrow().is_empty()
- }
-
pub fn visible(&self) -> bool {
- !self.is_empty() && !self.previewing_inline_completion
+ !self.entries.borrow().is_empty()
}
fn origin(&self) -> ContextMenuOrigin {
@@ -524,7 +517,6 @@ impl CompletionsMenu {
} else {
None
};
-
let color_swatch = completion
.color()
.map(|color| div().size_4().bg(color).rounded_sm());
@@ -709,10 +701,6 @@ impl CompletionsMenu {
// This keeps the display consistent when y_flipped.
self.scroll_handle.scroll_to_item(0, ScrollStrategy::Top);
}
-
- pub fn set_previewing_inline_completion(&mut self, value: bool) {
- self.previewing_inline_completion = value;
- }
}
#[derive(Clone)]
@@ -1142,12 +1142,7 @@ impl DisplaySnapshot {
}
pub fn line_indent_for_buffer_row(&self, buffer_row: MultiBufferRow) -> LineIndent {
- let (buffer, range) = self
- .buffer_snapshot
- .buffer_line_for_row(buffer_row)
- .unwrap();
-
- buffer.line_indent_for_row(range.start.row)
+ self.buffer_snapshot.line_indent_for_row(buffer_row)
}
pub fn line_len(&self, row: DisplayRow) -> u32 {
@@ -1438,7 +1433,10 @@ impl ToDisplayPoint for Anchor {
#[cfg(test)]
pub mod tests {
use super::*;
- use crate::{movement, test::marked_display_snapshot};
+ use crate::{
+ movement,
+ test::{marked_display_snapshot, test_font},
+ };
use block_map::BlockPlacement;
use gpui::{
div, font, observe, px, App, AppContext as _, BorrowAppContext, Element, Hsla, Rgba,
@@ -1497,10 +1495,11 @@ pub mod tests {
}
});
+ let font = test_font();
let map = cx.new(|cx| {
DisplayMap::new(
buffer.clone(),
- font("Helvetica"),
+ font,
font_size,
wrap_width,
true,
@@ -1992,8 +1992,9 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
#[cfg(test)]
mod tests {
use super::*;
- use crate::display_map::{
- fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap,
+ use crate::{
+ display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap},
+ test::test_font,
};
use gpui::{div, font, px, App, AppContext as _, Element};
use itertools::Itertools;
@@ -2227,7 +2228,7 @@ mod tests {
multi_buffer
});
- let font = font("Helvetica");
+ let font = test_font();
let font_size = px(14.);
let font_id = cx.text_system().resolve_font(&font);
let mut wrap_width = px(0.);
@@ -3069,8 +3070,9 @@ mod tests {
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
- let (wrap_map, wraps_snapshot) = cx
- .update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx));
+ let font = test_font();
+ let (wrap_map, wraps_snapshot) =
+ cx.update(|cx| WrapMap::new(tab_snapshot, font, font_size, wrap_width, cx));
let mut block_map = BlockMap::new(
wraps_snapshot,
true,
@@ -979,6 +979,7 @@ impl<'a> Iterator for WrapRows<'a> {
Some(if soft_wrapped {
RowInfo {
+ buffer_id: None,
buffer_row: None,
multibuffer_row: None,
diff_status,
@@ -1168,9 +1169,10 @@ mod tests {
use super::*;
use crate::{
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
+ test::test_font,
MultiBuffer,
};
- use gpui::{font, px, test::observe};
+ use gpui::{px, test::observe};
use rand::prelude::*;
use settings::SettingsStore;
use smol::stream::StreamExt;
@@ -1195,7 +1197,8 @@ mod tests {
Some(px(rng.gen_range(0.0..=1000.0)))
};
let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
- let font = font("Helvetica");
+
+ let font = test_font();
let _font_id = text_system.font_id(&font);
let font_size = px(14.0);
@@ -47,7 +47,6 @@ mod signature_help;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
-use ::git::diff::DiffHunkStatus;
pub(crate) use actions::*;
pub use actions::{OpenExcerpts, OpenExcerptsSplit};
use aho_corasick::AhoCorasick;
@@ -63,10 +62,10 @@ pub use editor_settings::{
CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, ShowScrollbar,
};
pub use editor_settings_controls::*;
-use element::LineWithInvisibles;
pub use element::{
CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition,
};
+use element::{LineWithInvisibles, PositionMap};
use futures::{future, FutureExt};
use fuzzy::StringMatchCandidate;
@@ -74,6 +73,7 @@ use code_context_menus::{
AvailableCodeAction, CodeActionContents, CodeActionsItem, CodeActionsMenu, CodeContextMenu,
CompletionsMenu, ContextMenuOrigin,
};
+use diff::DiffHunkStatus;
use git::blame::GitBlame;
use gpui::{
div, impl_actions, linear_color_stop, linear_gradient, point, prelude::*, pulsating_between,
@@ -82,23 +82,23 @@ use gpui::{
Entity, EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId,
FontWeight, Global, HighlightStyle, Hsla, InteractiveText, KeyContext, Modifiers, MouseButton,
MouseDownEvent, PaintQuad, ParentElement, Pixels, Render, SharedString, Size, Styled,
- StyledText, Subscription, Task, TextStyle, TextStyleRefinement, UTF16Selection, UnderlineStyle,
- UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window,
+ StyledText, Subscription, Task, TextRun, TextStyle, TextStyleRefinement, UTF16Selection,
+ UnderlineStyle, UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window,
};
use highlight_matching_bracket::refresh_matching_bracket_highlights;
use hover_popover::{hide_hover, HoverState};
use indent_guides::ActiveIndentGuidesState;
use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy};
pub use inline_completion::Direction;
-use inline_completion::{InlineCompletionProvider, InlineCompletionProviderHandle};
+use inline_completion::{EditPredictionProvider, InlineCompletionProviderHandle};
pub use items::MAX_TAB_TITLE_LEN;
use itertools::Itertools;
use language::{
language_settings::{self, all_language_settings, language_settings, InlayHintSettings},
markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CharKind, CodeLabel,
CompletionDocumentation, CursorShape, Diagnostic, EditPreview, HighlightedText, IndentKind,
- IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal, TextObject,
- TransactionId, TreeSitterOptions,
+ IndentSize, InlineCompletionPreviewMode, Language, OffsetRangeExt, Point, Selection,
+ SelectionGoal, TextObject, TransactionId, TreeSitterOptions,
};
use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange};
use linked_editing_ranges::refresh_linked_ranges;
@@ -124,7 +124,8 @@ pub use multi_buffer::{
ToOffset, ToPoint,
};
use multi_buffer::{
- ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16,
+ ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow,
+ ToOffsetUtf16,
};
use project::{
lsp_store::{FormatTrigger, LspFormatTarget, OpenLspBufferHandle},
@@ -306,6 +307,7 @@ pub fn init(cx: &mut App) {
workspace.register_action(Editor::new_file);
workspace.register_action(Editor::new_file_vertical);
workspace.register_action(Editor::new_file_horizontal);
+ workspace.register_action(Editor::cancel_language_server_work);
},
)
.detach();
@@ -466,7 +468,7 @@ pub fn make_suggestion_styles(cx: &mut App) -> InlineCompletionStyles {
type CompletionId = usize;
pub(crate) enum EditDisplayMode {
- TabAccept(bool),
+ TabAccept,
DiffPopover,
Inline,
}
@@ -491,15 +493,6 @@ struct InlineCompletionState {
invalidation_range: Range<Anchor>,
}
-impl InlineCompletionState {
- pub fn is_move(&self) -> bool {
- match &self.completion {
- InlineCompletion::Move { .. } => true,
- _ => false,
- }
- }
-}
-
enum InlineCompletionHighlight {}
pub enum MenuInlineCompletionsPolicy {
@@ -579,6 +572,15 @@ struct BufferOffset(usize);
pub trait Addon: 'static {
fn extend_key_context(&self, _: &mut KeyContext, _: &App) {}
+ fn render_buffer_header_controls(
+ &self,
+ _: &ExcerptInfo,
+ _: &Window,
+ _: &App,
+ ) -> Option<AnyElement> {
+ None
+ }
+
fn to_any(&self) -> &dyn std::any::Any;
}
@@ -672,16 +674,15 @@ pub struct Editor {
pending_mouse_down: Option<Rc<RefCell<Option<MouseDownEvent>>>>,
gutter_hovered: bool,
hovered_link_state: Option<HoveredLinkState>,
- inline_completion_provider: Option<RegisteredInlineCompletionProvider>,
+ edit_prediction_provider: Option<RegisteredInlineCompletionProvider>,
code_action_providers: Vec<Rc<dyn CodeActionProvider>>,
active_inline_completion: Option<InlineCompletionState>,
/// Used to prevent flickering as the user types while the menu is open
stale_inline_completion_in_menu: Option<InlineCompletionState>,
- // enable_inline_completions is a switch that Vim can use to disable
- // edit predictions based on its mode.
- enable_inline_completions: bool,
+ inline_completions_hidden_for_vim_mode: bool,
show_inline_completions_override: Option<bool>,
menu_inline_completions_policy: MenuInlineCompletionsPolicy,
+ previewing_inline_completion: bool,
inlay_hint_cache: InlayHintCache,
next_inlay_id: usize,
_subscriptions: Vec<Subscription>,
@@ -715,6 +716,7 @@ pub struct Editor {
>,
>,
last_bounds: Option<Bounds<Pixels>>,
+ last_position_map: Option<Rc<PositionMap>>,
expect_bounds_change: Option<Bounds<Pixels>>,
tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>,
tasks_update_task: Option<Task<()>>,
@@ -1283,7 +1285,7 @@ impl Editor {
let mut code_action_providers = Vec::new();
if let Some(project) = project.clone() {
- get_unstaged_changes_for_buffers(
+ get_uncommitted_diff_for_buffer(
&project,
buffer.read(cx).all_buffers(),
buffer.clone(),
@@ -1369,14 +1371,16 @@ impl Editor {
hover_state: Default::default(),
pending_mouse_down: None,
hovered_link_state: Default::default(),
- inline_completion_provider: None,
+ edit_prediction_provider: None,
active_inline_completion: None,
stale_inline_completion_in_menu: None,
+ previewing_inline_completion: false,
inlay_hint_cache: InlayHintCache::new(inlay_hint_settings),
gutter_hovered: false,
pixel_position_of_newest_cursor: None,
last_bounds: None,
+ last_position_map: None,
expect_bounds_change: None,
gutter_dimensions: GutterDimensions::default(),
style: None,
@@ -1384,8 +1388,8 @@ impl Editor {
hovered_cursors: Default::default(),
next_editor_action_id: EditorActionId::default(),
editor_actions: Rc::default(),
+ inline_completions_hidden_for_vim_mode: false,
show_inline_completions_override: None,
- enable_inline_completions: true,
menu_inline_completions_policy: MenuInlineCompletionsPolicy::ByProvider,
custom_context_menu: None,
show_git_blame_gutter: false,
@@ -1484,10 +1488,14 @@ impl Editor {
if self.pending_rename.is_some() {
key_context.add("renaming");
}
+
+ let mut showing_completions = false;
+
match self.context_menu.borrow().as_ref() {
Some(CodeContextMenu::Completions(_)) => {
key_context.add("menu");
key_context.add("showing_completions");
+ showing_completions = true;
}
Some(CodeContextMenu::CodeActions(_)) => {
key_context.add("menu");
@@ -1516,7 +1524,11 @@ impl Editor {
if self.has_active_inline_completion() {
key_context.add("copilot_suggestion");
- key_context.add("inline_completion");
+ key_context.add("edit_prediction");
+
+ if showing_completions || self.edit_prediction_requires_modifier(cx) {
+ key_context.add("edit_prediction_requires_modifier");
+ }
}
if self.selection_mark_mode {
@@ -1725,15 +1737,15 @@ impl Editor {
self.semantics_provider = provider;
}
- pub fn set_inline_completion_provider<T>(
+ pub fn set_edit_prediction_provider<T>(
&mut self,
provider: Option<Entity<T>>,
window: &mut Window,
cx: &mut Context<Self>,
) where
- T: InlineCompletionProvider,
+ T: EditPredictionProvider,
{
- self.inline_completion_provider =
+ self.edit_prediction_provider =
provider.map(|provider| RegisteredInlineCompletionProvider {
_subscription: cx.observe_in(&provider, window, |this, _, window, cx| {
if this.focus_handle.is_focused(window) {
@@ -1781,7 +1793,7 @@ impl Editor {
self.collapse_matches = collapse_matches;
}
- pub fn register_buffers_with_language_servers(&mut self, cx: &mut Context<Self>) {
+ fn register_buffers_with_language_servers(&mut self, cx: &mut Context<Self>) {
let buffers = self.buffer.read(cx).all_buffers();
let Some(lsp_store) = self.lsp_store(cx) else {
return;
@@ -1815,11 +1827,19 @@ impl Editor {
self.input_enabled = input_enabled;
}
- pub fn set_inline_completions_enabled(&mut self, enabled: bool, cx: &mut Context<Self>) {
- self.enable_inline_completions = enabled;
- if !self.enable_inline_completions {
- self.take_active_inline_completion(cx);
- cx.notify();
+ pub fn set_inline_completions_hidden_for_vim_mode(
+ &mut self,
+ hidden: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if hidden != self.inline_completions_hidden_for_vim_mode {
+ self.inline_completions_hidden_for_vim_mode = hidden;
+ if hidden {
+ self.update_visible_inline_completion(window, cx);
+ } else {
+ self.refresh_inline_completion(true, false, window, cx);
+ }
}
}
@@ -1857,7 +1877,7 @@ impl Editor {
pub fn toggle_inline_completions(
&mut self,
- _: &ToggleInlineCompletions,
+ _: &ToggleEditPrediction,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -1868,8 +1888,11 @@ impl Editor {
if let Some((buffer, cursor_buffer_position)) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)
{
- let show_inline_completions =
- !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx);
+ let show_inline_completions = !self.should_show_inline_completions_in_buffer(
+ &buffer,
+ cursor_buffer_position,
+ cx,
+ );
self.set_show_inline_completions(Some(show_inline_completions), window, cx);
}
}
@@ -1877,48 +1900,21 @@ impl Editor {
pub fn set_show_inline_completions(
&mut self,
- show_inline_completions: Option<bool>,
+ show_edit_predictions: Option<bool>,
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.show_inline_completions_override = show_inline_completions;
+ self.show_inline_completions_override = show_edit_predictions;
self.refresh_inline_completion(false, true, window, cx);
}
- pub fn inline_completions_enabled(&self, cx: &App) -> bool {
- let cursor = self.selections.newest_anchor().head();
- if let Some((buffer, buffer_position)) =
- self.buffer.read(cx).text_anchor_for_position(cursor, cx)
- {
- self.should_show_inline_completions(&buffer, buffer_position, cx)
- } else {
- false
- }
- }
-
- fn should_show_inline_completions(
- &self,
- buffer: &Entity<Buffer>,
- buffer_position: language::Anchor,
- cx: &App,
- ) -> bool {
- if !self.snippet_stack.is_empty() {
- return false;
- }
-
- if self.inline_completions_disabled_in_scope(buffer, buffer_position, cx) {
- return false;
- }
-
- if let Some(provider) = self.inline_completion_provider() {
- if let Some(show_inline_completions) = self.show_inline_completions_override {
- show_inline_completions
- } else {
- self.mode == EditorMode::Full && provider.is_enabled(buffer, buffer_position, cx)
- }
- } else {
- false
- }
+ pub fn inline_completion_start_anchor(&self) -> Option<Anchor> {
+ let active_completion = self.active_inline_completion.as_ref()?;
+ let result = match &active_completion.completion {
+ InlineCompletion::Edit { edits, .. } => edits.first()?.0.start,
+ InlineCompletion::Move { target, .. } => *target,
+ };
+ Some(result)
}
fn inline_completions_disabled_in_scope(
@@ -1936,7 +1932,7 @@ impl Editor {
scope.override_name().map_or(false, |scope_name| {
settings
- .inline_completions_disabled_in
+ .edit_predictions_disabled_in
.iter()
.any(|s| s == scope_name)
})
@@ -2024,6 +2020,21 @@ impl Editor {
None
}
};
+ if let Some(buffer_id) = new_cursor_position.buffer_id {
+ if !self.registered_buffers.contains_key(&buffer_id) {
+ if let Some(lsp_store) = self.lsp_store(cx) {
+ lsp_store.update(cx, |lsp_store, cx| {
+ let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else {
+ return;
+ };
+ self.registered_buffers.insert(
+ buffer_id,
+ lsp_store.register_buffer_with_language_servers(&buffer, cx),
+ );
+ })
+ }
+ }
+ }
if let Some(completion_menu) = completion_menu {
let cursor_position = new_cursor_position.to_offset(buffer);
@@ -2585,7 +2596,7 @@ impl Editor {
pub fn dismiss_menus_and_popups(
&mut self,
- should_report_inline_completion_event: bool,
+ is_user_requested: bool,
window: &mut Window,
cx: &mut Context<Self>,
) -> bool {
@@ -2609,7 +2620,7 @@ impl Editor {
return true;
}
- if self.discard_inline_completion(should_report_inline_completion_event, cx) {
+ if is_user_requested && self.discard_inline_completion(true, cx) {
return true;
}
@@ -3004,7 +3015,7 @@ impl Editor {
}
let trigger_in_words =
- this.show_inline_completions_in_menu(cx) || !had_active_inline_completion;
+ this.show_edit_predictions_in_menu(cx) || !had_active_inline_completion;
this.trigger_completion_on_input(&text, trigger_in_words, window, cx);
linked_editing_ranges::refresh_linked_ranges(this, window, cx);
this.refresh_inline_completion(true, false, window, cx);
@@ -3897,7 +3908,7 @@ impl Editor {
*editor.context_menu.borrow_mut() =
Some(CodeContextMenu::Completions(menu));
- if editor.show_inline_completions_in_menu(cx) {
+ if editor.show_edit_predictions_in_menu(cx) {
editor.update_visible_inline_completion(window, cx);
} else {
editor.discard_inline_completion(false, cx);
@@ -3911,7 +3922,7 @@ impl Editor {
// If it was already hidden and we don't show inline
// completions in the menu, we should also show the
// inline-completion when available.
- if was_hidden && editor.show_inline_completions_in_menu(cx) {
+ if was_hidden && editor.show_edit_predictions_in_menu(cx) {
editor.update_visible_inline_completion(window, cx);
}
}
@@ -3943,10 +3954,6 @@ impl Editor {
self.do_completion(action.item_ix, CompletionIntent::Compose, window, cx)
}
- fn toggle_zed_predict_onboarding(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx);
- }
-
fn do_completion(
&mut self,
item_ix: Option<usize>,
@@ -3965,7 +3972,7 @@ impl Editor {
let entries = completions_menu.entries.borrow();
let mat = entries.get(item_ix.unwrap_or(completions_menu.selected_item))?;
- if self.show_inline_completions_in_menu(cx) {
+ if self.show_edit_predictions_in_menu(cx) {
self.discard_inline_completion(true, cx);
}
let candidate_id = mat.candidate_id;
@@ -4646,14 +4653,18 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<()> {
- let provider = self.inline_completion_provider()?;
+ let provider = self.edit_prediction_provider()?;
let cursor = self.selections.newest_anchor().head();
let (buffer, cursor_buffer_position) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
+ if !self.inline_completions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) {
+ self.discard_inline_completion(false, cx);
+ return None;
+ }
+
if !user_requested
- && (!self.enable_inline_completions
- || !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx)
+ && (!self.should_show_inline_completions_in_buffer(&buffer, cursor_buffer_position, cx)
|| !self.is_focused(window)
|| buffer.read(cx).is_empty())
{
@@ -4662,22 +4673,112 @@ impl Editor {
}
self.update_visible_inline_completion(window, cx);
- provider.refresh(buffer, cursor_buffer_position, debounce, cx);
+ provider.refresh(
+ self.project.clone(),
+ buffer,
+ cursor_buffer_position,
+ debounce,
+ cx,
+ );
Some(())
}
+ pub fn should_show_inline_completions(&self, cx: &App) -> bool {
+ let cursor = self.selections.newest_anchor().head();
+ if let Some((buffer, cursor_position)) =
+ self.buffer.read(cx).text_anchor_for_position(cursor, cx)
+ {
+ self.should_show_inline_completions_in_buffer(&buffer, cursor_position, cx)
+ } else {
+ false
+ }
+ }
+
+ fn edit_prediction_requires_modifier(&self, cx: &App) -> bool {
+ let cursor = self.selections.newest_anchor().head();
+
+ self.buffer
+ .read(cx)
+ .text_anchor_for_position(cursor, cx)
+ .map(|(buffer, _)| {
+ all_language_settings(buffer.read(cx).file(), cx).inline_completions_preview_mode()
+ == InlineCompletionPreviewMode::WhenHoldingModifier
+ })
+ .unwrap_or(false)
+ }
+
+ fn should_show_inline_completions_in_buffer(
+ &self,
+ buffer: &Entity<Buffer>,
+ buffer_position: language::Anchor,
+ cx: &App,
+ ) -> bool {
+ if !self.snippet_stack.is_empty() {
+ return false;
+ }
+
+ if self.inline_completions_disabled_in_scope(buffer, buffer_position, cx) {
+ return false;
+ }
+
+ if let Some(show_inline_completions) = self.show_inline_completions_override {
+ show_inline_completions
+ } else {
+ let buffer = buffer.read(cx);
+ self.mode == EditorMode::Full
+ && language_settings(
+ buffer.language_at(buffer_position).map(|l| l.name()),
+ buffer.file(),
+ cx,
+ )
+ .show_edit_predictions
+ }
+ }
+
+ pub fn inline_completions_enabled(&self, cx: &App) -> bool {
+ let cursor = self.selections.newest_anchor().head();
+ if let Some((buffer, cursor_position)) =
+ self.buffer.read(cx).text_anchor_for_position(cursor, cx)
+ {
+ self.inline_completions_enabled_in_buffer(&buffer, cursor_position, cx)
+ } else {
+ false
+ }
+ }
+
+ fn inline_completions_enabled_in_buffer(
+ &self,
+ buffer: &Entity<Buffer>,
+ buffer_position: language::Anchor,
+ cx: &App,
+ ) -> bool {
+ maybe!({
+ let provider = self.edit_prediction_provider()?;
+ if !provider.is_enabled(&buffer, buffer_position, cx) {
+ return Some(false);
+ }
+ let buffer = buffer.read(cx);
+ let Some(file) = buffer.file() else {
+ return Some(true);
+ };
+ let settings = all_language_settings(Some(file), cx);
+ Some(settings.inline_completions_enabled_for_path(file.path()))
+ })
+ .unwrap_or(false)
+ }
+
fn cycle_inline_completion(
&mut self,
direction: Direction,
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<()> {
- let provider = self.inline_completion_provider()?;
+ let provider = self.edit_prediction_provider()?;
let cursor = self.selections.newest_anchor().head();
let (buffer, cursor_buffer_position) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
- if !self.enable_inline_completions
- || !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx)
+ if self.inline_completions_hidden_for_vim_mode
+ || !self.should_show_inline_completions_in_buffer(&buffer, cursor_buffer_position, cx)
{
return None;
}
@@ -4690,7 +4791,7 @@ impl Editor {
pub fn show_inline_completion(
&mut self,
- _: &ShowInlineCompletion,
+ _: &ShowEditPrediction,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -4725,9 +4826,9 @@ impl Editor {
.detach();
}
- pub fn next_inline_completion(
+ pub fn next_edit_prediction(
&mut self,
- _: &NextInlineCompletion,
+ _: &NextEditPrediction,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -4743,9 +4844,9 @@ impl Editor {
}
}
- pub fn previous_inline_completion(
+ pub fn previous_edit_prediction(
&mut self,
- _: &PreviousInlineCompletion,
+ _: &PreviousEditPrediction,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -4761,9 +4862,9 @@ impl Editor {
}
}
- pub fn accept_inline_completion(
+ pub fn accept_edit_prediction(
&mut self,
- _: &AcceptInlineCompletion,
+ _: &AcceptEditPrediction,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -4784,7 +4885,7 @@ impl Editor {
}
}
- if self.show_inline_completions_in_menu(cx) {
+ if self.show_edit_predictions_in_menu(cx) {
self.hide_context_menu(window, cx);
}
@@ -4797,12 +4898,13 @@ impl Editor {
match &active_inline_completion.completion {
InlineCompletion::Move { target, .. } => {
let target = *target;
+ // Note that this is also done in vim's handler of the Tab action.
self.change_selections(Some(Autoscroll::newest()), window, cx, |selections| {
selections.select_anchor_ranges([target..target]);
});
}
InlineCompletion::Edit { edits, .. } => {
- if let Some(provider) = self.inline_completion_provider() {
+ if let Some(provider) = self.edit_prediction_provider() {
provider.accept(cx);
}
@@ -4829,7 +4931,7 @@ impl Editor {
pub fn accept_partial_inline_completion(
&mut self,
- _: &AcceptPartialInlineCompletion,
+ _: &AcceptPartialEditPrediction,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -4886,7 +4988,7 @@ impl Editor {
self.refresh_inline_completion(true, true, window, cx);
cx.notify();
} else {
- self.accept_inline_completion(&Default::default(), window, cx);
+ self.accept_edit_prediction(&Default::default(), window, cx);
}
}
}
@@ -4901,7 +5003,7 @@ impl Editor {
self.report_inline_completion_event(false, cx);
}
- if let Some(provider) = self.inline_completion_provider() {
+ if let Some(provider) = self.edit_prediction_provider() {
provider.discard(cx);
}
@@ -4909,7 +5011,7 @@ impl Editor {
}
fn report_inline_completion_event(&self, accepted: bool, cx: &App) {
- let Some(provider) = self.inline_completion_provider() else {
+ let Some(provider) = self.edit_prediction_provider() else {
return;
};
@@ -4953,11 +5055,26 @@ impl Editor {
true
}
- pub fn is_previewing_inline_completion(&self) -> bool {
- matches!(
- self.context_menu.borrow().as_ref(),
- Some(CodeContextMenu::Completions(menu)) if !menu.is_empty() && menu.previewing_inline_completion
- )
+ /// Returns true when we're displaying the inline completion popover below the cursor
+ /// like we are not previewing and the LSP autocomplete menu is visible
+ /// or we are in `when_holding_modifier` mode.
+ pub fn inline_completion_visible_in_cursor_popover(
+ &self,
+ has_completion: bool,
+ cx: &App,
+ ) -> bool {
+ if self.previewing_inline_completion
+ || !self.show_edit_predictions_in_menu(cx)
+ || !self.should_show_inline_completions(cx)
+ {
+ return false;
+ }
+
+ if self.has_visible_completions_menu() {
+ return true;
+ }
+
+ has_completion && self.edit_prediction_requires_modifier(cx)
}
fn update_inline_completion_preview(
@@ -4966,36 +5083,13 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- // Moves jump directly with a preview step
-
- if self
- .active_inline_completion
- .as_ref()
- .map_or(true, |c| c.is_move())
- {
- cx.notify();
- return;
- }
-
- if !self.show_inline_completions_in_menu(cx) {
- return;
- }
-
- let mut menu_borrow = self.context_menu.borrow_mut();
-
- let Some(CodeContextMenu::Completions(completions_menu)) = menu_borrow.as_mut() else {
- return;
- };
-
- if completions_menu.is_empty()
- || completions_menu.previewing_inline_completion == modifiers.alt
- {
+ if !self.show_edit_predictions_in_menu(cx) {
return;
}
- completions_menu.set_previewing_inline_completion(modifiers.alt);
- drop(menu_borrow);
+ self.previewing_inline_completion = modifiers.alt;
self.update_visible_inline_completion(window, cx);
+ cx.notify();
}
fn update_visible_inline_completion(
@@ -5009,13 +5103,12 @@ impl Editor {
let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer));
let excerpt_id = cursor.excerpt_id;
- let show_in_menu = self.show_inline_completions_in_menu(cx);
+ let show_in_menu = self.show_edit_predictions_in_menu(cx);
let completions_menu_has_precedence = !show_in_menu
&& (self.context_menu.borrow().is_some()
|| (!self.completion_tasks.is_empty() && !self.has_active_inline_completion()));
if completions_menu_has_precedence
|| !offset_selection.is_empty()
- || !self.enable_inline_completions
|| self
.active_inline_completion
.as_ref()
@@ -5030,7 +5123,7 @@ impl Editor {
}
self.take_active_inline_completion(cx);
- let provider = self.inline_completion_provider()?;
+ let provider = self.edit_prediction_provider()?;
let (buffer, cursor_buffer_position) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
@@ -5070,8 +5163,11 @@ impl Editor {
} else {
None
};
- let completion = if let Some(move_invalidation_row_range) = move_invalidation_row_range {
- invalidation_row_range = move_invalidation_row_range;
+ let is_move =
+ move_invalidation_row_range.is_some() || self.inline_completions_hidden_for_vim_mode;
+ let completion = if is_move {
+ invalidation_row_range =
+ move_invalidation_row_range.unwrap_or(edit_start_row..edit_end_row);
let target = first_edit_start;
let target_point = text::ToPoint::to_point(&target.text_anchor, &snapshot);
// TODO: Base this off of TreeSitter or word boundaries?
@@ -5090,7 +5186,10 @@ impl Editor {
snapshot,
}
} else {
- if !show_in_menu || !self.has_active_completions_menu() {
+ let show_completions_in_buffer = !self
+ .inline_completion_visible_in_cursor_popover(true, cx)
+ && !self.inline_completions_hidden_for_vim_mode;
+ if show_completions_in_buffer {
if edits
.iter()
.all(|(range, _)| range.to_offset(&multibuffer).is_empty())
@@ -5124,7 +5223,7 @@ impl Editor {
let display_mode = if all_edits_insertions_or_deletions(&edits, &multibuffer) {
if provider.show_tab_accept_marker() {
- EditDisplayMode::TabAccept(self.is_previewing_inline_completion())
+ EditDisplayMode::TabAccept
} else {
EditDisplayMode::Inline
}
@@ -5159,20 +5258,20 @@ impl Editor {
Some(())
}
- pub fn inline_completion_provider(&self) -> Option<Arc<dyn InlineCompletionProviderHandle>> {
- Some(self.inline_completion_provider.as_ref()?.provider.clone())
+ pub fn edit_prediction_provider(&self) -> Option<Arc<dyn InlineCompletionProviderHandle>> {
+ Some(self.edit_prediction_provider.as_ref()?.provider.clone())
}
- fn show_inline_completions_in_menu(&self, cx: &App) -> bool {
+ fn show_edit_predictions_in_menu(&self, cx: &App) -> bool {
let by_provider = matches!(
self.menu_inline_completions_policy,
MenuInlineCompletionsPolicy::ByProvider
);
by_provider
- && EditorSettings::get_global(cx).show_inline_completions_in_menu
+ && EditorSettings::get_global(cx).show_edit_predictions_in_menu
&& self
- .inline_completion_provider()
+ .edit_prediction_provider()
.map_or(false, |provider| provider.show_completions_in_menu())
}
@@ -5387,10 +5486,12 @@ impl Editor {
}
pub fn context_menu_visible(&self) -> bool {
- self.context_menu
- .borrow()
- .as_ref()
- .map_or(false, |menu| menu.visible())
+ !self.previewing_inline_completion
+ && self
+ .context_menu
+ .borrow()
+ .as_ref()
+ .map_or(false, |menu| menu.visible())
}
fn context_menu_origin(&self) -> Option<ContextMenuOrigin> {
@@ -5401,7 +5502,7 @@ impl Editor {
}
fn edit_prediction_cursor_popover_height(&self) -> Pixels {
- px(32.)
+ px(30.)
}
fn current_user_player_color(&self, cx: &mut App) -> PlayerColor {
@@ -5418,13 +5519,12 @@ impl Editor {
min_width: Pixels,
max_width: Pixels,
cursor_point: Point,
- line_layouts: &[LineWithInvisibles],
style: &EditorStyle,
accept_keystroke: &gpui::Keystroke,
window: &Window,
cx: &mut Context<Editor>,
) -> Option<AnyElement> {
- let provider = self.inline_completion_provider.as_ref()?;
+ let provider = self.edit_prediction_provider.as_ref()?;
if provider.provider.needs_terms_acceptance(cx) {
return Some(
@@ -5441,16 +5541,24 @@ impl Editor {
.on_mouse_down(MouseButton::Left, |_, window, _| window.prevent_default())
.on_click(cx.listener(|this, _event, window, cx| {
cx.stop_propagation();
- this.toggle_zed_predict_onboarding(window, cx)
+ this.report_editor_event("Edit Prediction Provider ToS Clicked", None, cx);
+ window.dispatch_action(
+ zed_actions::OpenZedPredictOnboarding.boxed_clone(),
+ cx,
+ );
}))
.child(
h_flex()
- .w_full()
+ .flex_1()
.gap_2()
.child(Icon::new(IconName::ZedPredict))
.child(Label::new("Accept Terms of Service"))
.child(div().w_full())
- .child(Icon::new(IconName::ArrowUpRight))
+ .child(
+ Icon::new(IconName::ArrowUpRight)
+ .color(Color::Muted)
+ .size(IconSize::Small),
+ )
.into_any_element(),
)
.into_any(),
@@ -5461,8 +5569,9 @@ impl Editor {
fn pending_completion_container() -> Div {
h_flex()
+ .h_full()
.flex_1()
- .gap_3()
+ .gap_2()
.child(Icon::new(IconName::ZedPredict))
}
@@ -5470,8 +5579,8 @@ impl Editor {
Some(completion) => self.render_edit_prediction_cursor_popover_preview(
completion,
cursor_point,
- line_layouts,
style,
+ window,
cx,
)?,
@@ -5479,8 +5588,8 @@ impl Editor {
Some(stale_completion) => self.render_edit_prediction_cursor_popover_preview(
stale_completion,
cursor_point,
- line_layouts,
style,
+ window,
cx,
)?,
@@ -5511,11 +5620,6 @@ impl Editor {
let has_completion = self.active_inline_completion.is_some();
- let is_move = self
- .active_inline_completion
- .as_ref()
- .map_or(false, |c| c.is_move());
-
Some(
h_flex()
.h(self.edit_prediction_cursor_popover_height())
@@ -5523,39 +5627,29 @@ impl Editor {
.max_w(max_width)
.flex_1()
.px_2()
- .gap_3()
.elevation_2(cx)
.child(completion)
+ .child(ui::Divider::vertical())
.child(
h_flex()
- .border_l_1()
- .border_color(cx.theme().colors().border_variant)
+ .h_full()
+ .gap_1()
.pl_2()
- .child(
- h_flex()
- .font(buffer_font.clone())
- .p_1()
- .rounded_sm()
- .children(ui::render_modifiers(
- &accept_keystroke.modifiers,
- PlatformStyle::platform(),
- if window.modifiers() == accept_keystroke.modifiers {
- Some(Color::Accent)
- } else {
- None
- },
- !is_move,
- )),
- )
- .opacity(if has_completion { 1.0 } else { 0.1 })
- .child(if is_move {
- div()
- .child(ui::Key::new(&accept_keystroke.key, None))
- .font(buffer_font.clone())
- .into_any()
- } else {
- Label::new("Preview").color(Color::Muted).into_any_element()
- }),
+ .child(h_flex().font(buffer_font.clone()).gap_1().children(
+ ui::render_modifiers(
+ &accept_keystroke.modifiers,
+ PlatformStyle::platform(),
+ Some(if !has_completion {
+ Color::Muted
+ } else {
+ Color::Default
+ }),
+ None,
+ true,
+ ),
+ ))
+ .child(Label::new("Preview").into_any_element())
+ .opacity(if has_completion { 1.0 } else { 0.4 }),
)
.into_any(),
)
@@ -35,7 +35,7 @@ pub struct EditorSettings {
pub auto_signature_help: bool,
pub show_signature_help_after_edits: bool,
pub jupyter: Jupyter,
- pub show_inline_completions_in_menu: bool,
+ pub show_edit_predictions_in_menu: bool,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
@@ -372,7 +372,7 @@ pub struct EditorSettingsContent {
/// Only has an effect if edit prediction provider supports it.
///
/// Default: true
- pub show_inline_completions_in_menu: Option<bool>,
+ pub show_edit_predictions_in_menu: Option<bool>,
/// Jupyter REPL settings.
pub jupyter: Option<JupyterContent>,
@@ -7,6 +7,7 @@ use crate::{
},
JoinLines,
};
+use diff::{BufferDiff, DiffHunkStatus};
use futures::StreamExt;
use gpui::{
div, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext,
@@ -26,7 +27,7 @@ use language_settings::{Formatter, FormatterList, IndentGuideSettings};
use multi_buffer::IndentGuide;
use parking_lot::Mutex;
use pretty_assertions::{assert_eq, assert_ne};
-use project::{buffer_store::BufferChangeSet, FakeFs};
+use project::FakeFs;
use project::{
lsp_command::SIGNATURE_HELP_HIGHLIGHT_CURRENT,
project_settings::{LspSettings, ProjectSettings},
@@ -40,8 +41,9 @@ use std::{
use test::{build_editor_with_project, editor_lsp_test_context::rust_lang};
use unindent::Unindent;
use util::{
- assert_set_eq,
+ assert_set_eq, path,
test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker},
+ uri,
};
use workspace::{
item::{FollowEvent, FollowableItem, Item, ItemHandle},
@@ -1157,7 +1159,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
});
_ = editor.update(cx, |editor, window, cx| {
- editor.fold_at_level(&FoldAtLevel { level: 2 }, window, cx);
+ editor.fold_at_level(&FoldAtLevel(2), window, cx);
assert_eq!(
editor.display_text(cx),
"
@@ -1181,7 +1183,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
.unindent(),
);
- editor.fold_at_level(&FoldAtLevel { level: 1 }, window, cx);
+ editor.fold_at_level(&FoldAtLevel(1), window, cx);
assert_eq!(
editor.display_text(cx),
"
@@ -1196,7 +1198,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
);
editor.unfold_all(&UnfoldAll, window, cx);
- editor.fold_at_level(&FoldAtLevel { level: 0 }, window, cx);
+ editor.fold_at_level(&FoldAtLevel(0), window, cx);
assert_eq!(
editor.display_text(cx),
"
@@ -5619,13 +5621,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) {
let base_text = r#"
impl A {
- // this is an unstaged comment
+ // this is an uncommitted comment
fn b() {
c();
}
- // this is another unstaged comment
+ // this is another uncommitted comment
fn d() {
// e
@@ -5668,13 +5670,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) {
cx.assert_state_with_diff(
"
ˇimpl A {
- - // this is an unstaged comment
+ - // this is an uncommitted comment
fn b() {
c();
}
- - // this is another unstaged comment
+ - // this is another uncommitted comment
-
fn d() {
// e
@@ -5691,13 +5693,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) {
let expected_display_text = "
impl A {
- // this is an unstaged comment
+ // this is an uncommitted comment
fn b() {
⋯
}
- // this is another unstaged comment
+ // this is another uncommitted comment
fn d() {
⋯
@@ -7074,9 +7076,9 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let fs = FakeFs::new(cx.executor());
- fs.insert_file("/file.rs", Default::default()).await;
+ fs.insert_file(path!("/file.rs"), Default::default()).await;
- let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/file.rs").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -7092,7 +7094,9 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
);
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/file.rs"), cx)
+ })
.await
.unwrap();
@@ -7117,7 +7121,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
assert_eq!(params.options.tab_size, 4);
Ok(Some(vec![lsp::TextEdit::new(
@@ -7145,7 +7149,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
fake_server.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
futures::future::pending::<()>().await;
unreachable!()
@@ -7202,7 +7206,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
assert_eq!(params.options.tab_size, 8);
Ok(Some(vec![]))
@@ -7237,7 +7241,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": sample_text_1,
"other.rs": sample_text_2,
@@ -7246,7 +7250,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
@@ -7421,20 +7425,20 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
assert!(cx.read(|cx| !multi_buffer_editor.is_dirty(cx)));
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)),
- "a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}",
+ uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}"),
);
buffer_1.update(cx, |buffer, _| {
assert!(!buffer.is_dirty());
assert_eq!(
buffer.text(),
- "a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n",
+ uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n"),
)
});
buffer_2.update(cx, |buffer, _| {
assert!(!buffer.is_dirty());
assert_eq!(
buffer.text(),
- "lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n",
+ uri!("lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n"),
)
});
buffer_3.update(cx, |buffer, _| {
@@ -7448,9 +7452,9 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let fs = FakeFs::new(cx.executor());
- fs.insert_file("/file.rs", Default::default()).await;
+ fs.insert_file(path!("/file.rs"), Default::default()).await;
- let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -7466,7 +7470,9 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
);
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/file.rs"), cx)
+ })
.await
.unwrap();
@@ -7491,7 +7497,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
assert_eq!(params.options.tab_size, 4);
Ok(Some(vec![lsp::TextEdit::new(
@@ -7519,7 +7525,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
futures::future::pending::<()>().await;
unreachable!()
@@ -7577,7 +7583,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
assert_eq!(params.options.tab_size, 8);
Ok(Some(vec![]))
@@ -7597,9 +7603,9 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
});
let fs = FakeFs::new(cx.executor());
- fs.insert_file("/file.rs", Default::default()).await;
+ fs.insert_file(path!("/file.rs"), Default::default()).await;
- let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -7633,7 +7639,9 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
);
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/file.rs"), cx)
+ })
.await
.unwrap();
@@ -7663,7 +7671,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
assert_eq!(params.options.tab_size, 4);
Ok(Some(vec![lsp::TextEdit::new(
@@ -7687,7 +7695,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
fake_server.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/file.rs").unwrap()
+ lsp::Url::from_file_path(path!("/file.rs")).unwrap()
);
futures::future::pending::<()>().await;
unreachable!()
@@ -8727,14 +8735,14 @@ async fn test_multiline_completion(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.ts": "a",
}),
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let typescript_language = Arc::new(Language::new(
LanguageConfig {
@@ -8794,7 +8802,7 @@ async fn test_multiline_completion(cx: &mut gpui::TestAppContext) {
.unwrap();
let _buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/a/main.ts", cx)
+ project.open_local_buffer_with_lsp(path!("/a/main.ts"), cx)
})
.await
.unwrap();
@@ -10570,7 +10578,7 @@ async fn go_to_prev_overlapping_diagnostic(
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/root/file").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
@@ -10645,6 +10653,176 @@ async fn go_to_prev_overlapping_diagnostic(
"});
}
+#[gpui::test]
+async fn cycle_through_same_place_diagnostics(
+ executor: BackgroundExecutor,
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let lsp_store =
+ cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store());
+
+ cx.set_state(indoc! {"
+ ˇfn func(abc def: i32) -> u32 {
+ }
+ "});
+
+ cx.update(|_, cx| {
+ lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store
+ .update_diagnostics(
+ LanguageServerId(0),
+ lsp::PublishDiagnosticsParams {
+ uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+ version: None,
+ diagnostics: vec![
+ lsp::Diagnostic {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 11),
+ lsp::Position::new(0, 12),
+ ),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ ..Default::default()
+ },
+ lsp::Diagnostic {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 12),
+ lsp::Position::new(0, 15),
+ ),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ ..Default::default()
+ },
+ lsp::Diagnostic {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 12),
+ lsp::Position::new(0, 15),
+ ),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ ..Default::default()
+ },
+ lsp::Diagnostic {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 25),
+ lsp::Position::new(0, 28),
+ ),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ ..Default::default()
+ },
+ ],
+ },
+ &[],
+ cx,
+ )
+ .unwrap()
+ });
+ });
+ executor.run_until_parked();
+
+ //// Backward
+
+ // Fourth diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abc def: i32) -> ˇu32 {
+ }
+ "});
+
+ // Third diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abc ˇdef: i32) -> u32 {
+ }
+ "});
+
+ // Second diagnostic, same place
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abc ˇdef: i32) -> u32 {
+ }
+ "});
+
+ // First diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abcˇ def: i32) -> u32 {
+ }
+ "});
+
+ // Wrapped over, fourth diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abc def: i32) -> ˇu32 {
+ }
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor.move_to_beginning(&MoveToBeginning, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ ˇfn func(abc def: i32) -> u32 {
+ }
+ "});
+
+ //// Forward
+
+ // First diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abcˇ def: i32) -> u32 {
+ }
+ "});
+
+ // Second diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abc ˇdef: i32) -> u32 {
+ }
+ "});
+
+ // Third diagnostic, same place
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abc ˇdef: i32) -> u32 {
+ }
+ "});
+
+ // Fourth diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abc def: i32) -> ˇu32 {
+ }
+ "});
+
+ // Wrapped around, first diagnostic
+ cx.update_editor(|editor, window, cx| {
+ editor.go_to_diagnostic(&GoToDiagnostic, window, cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ fn func(abcˇ def: i32) -> u32 {
+ }
+ "});
+}
+
#[gpui::test]
async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -10663,7 +10841,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
lsp_store.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/root/file").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 12)),
@@ -10923,14 +11101,14 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": "fn main() { let a = 5; }",
"other.rs": "// Test file",
}),
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -10982,7 +11160,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/a/main.rs", cx)
+ project.open_local_buffer(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -11002,7 +11180,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
fake_server.handle_request::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
- lsp::Url::from_file_path("/a/main.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -11040,7 +11218,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": "fn main() { let a = 5; }",
"other.rs": "// Test file",
@@ -11048,7 +11226,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let server_restarts = Arc::new(AtomicUsize::new(0));
let closure_restarts = Arc::clone(&server_restarts);
@@ -11088,7 +11266,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
let _window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let _buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/a/main.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -11861,9 +12039,9 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
});
let fs = FakeFs::new(cx.executor());
- fs.insert_file("/file.ts", Default::default()).await;
+ fs.insert_file(path!("/file.ts"), Default::default()).await;
- let project = Project::test(fs, ["/file.ts".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/file.ts").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -11895,7 +12073,9 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/file.ts", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/file.ts"), cx)
+ })
.await
.unwrap();
@@ -12431,11 +12611,10 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
(buffer_2.clone(), base_text_2),
(buffer_3.clone(), base_text_3),
] {
- let change_set = cx
- .new(|cx| BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx));
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx));
editor
.buffer
- .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
+ .update(cx, |buffer, cx| buffer.add_diff(diff, cx));
}
});
cx.executor().run_until_parked();
@@ -13125,12 +13304,10 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
(buffer_2.clone(), file_2_old),
(buffer_3.clone(), file_3_old),
] {
- let change_set = cx.new(|cx| {
- BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)
- });
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx));
editor
.buffer
- .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
+ .update(cx, |buffer, cx| buffer.add_diff(diff, cx));
}
})
.unwrap();
@@ -13212,7 +13389,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
init_test(cx, |_| {});
let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n";
- let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n";
+ let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\nhhh\niii\n";
let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx));
let multi_buffer = cx.new(|cx| {
@@ -13225,7 +13402,11 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
primary: None,
},
ExcerptRange {
- context: Point::new(5, 0)..Point::new(7, 0),
+ context: Point::new(4, 0)..Point::new(7, 0),
+ primary: None,
+ },
+ ExcerptRange {
+ context: Point::new(9, 0)..Point::new(10, 0),
primary: None,
},
],
@@ -13239,11 +13420,10 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
});
editor
.update(cx, |editor, _window, cx| {
- let change_set =
- cx.new(|cx| BufferChangeSet::new_with_base_text(base.to_string(), &buffer, cx));
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base, &buffer, cx));
editor
.buffer
- .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx))
+ .update(cx, |buffer, cx| buffer.add_diff(diff, cx))
})
.unwrap();
@@ -13255,14 +13435,22 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
});
cx.executor().run_until_parked();
+ // When the start of a hunk coincides with the start of its excerpt,
+ // the hunk is expanded. When the start of a a hunk is earlier than
+ // the start of its excerpt, the hunk is not expanded.
cx.assert_state_with_diff(
"
ˇaaa
- bbb
+ BBB
+ - ddd
+ - eee
+ + DDD
+ EEE
fff
+
+ iii
"
.unindent(),
);
@@ -13500,8 +13688,8 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
cx.set_state(indoc! { "
one
- TWO
- ˇthree
+ ˇTWO
+ three
four
five
"});
@@ -13514,15 +13702,14 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
indoc! { "
one
- two
- + TWO
- ˇthree
+ + ˇTWO
+ three
four
five
"}
.to_string(),
);
cx.update_editor(|editor, window, cx| {
- editor.move_up(&Default::default(), window, cx);
editor.move_up(&Default::default(), window, cx);
editor.toggle_selected_diff_hunks(&Default::default(), window, cx);
});
@@ -14402,15 +14589,10 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContex
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = multibuffer.as_singleton().unwrap();
- let change_set = cx.new(|cx| {
- let mut change_set = BufferChangeSet::new(&buffer, cx);
- let _ =
- change_set.set_base_text(base_text.into(), buffer.read(cx).text_snapshot(), cx);
- change_set
- });
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
multibuffer.set_all_diff_hunks_expanded(cx);
- multibuffer.add_change_set(change_set, cx);
+ multibuffer.add_diff(diff, cx);
buffer.read(cx).remote_id()
})
@@ -14863,7 +15045,7 @@ async fn test_multi_buffer_folding(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"first.rs": sample_text_1,
"second.rs": sample_text_2,
@@ -14871,7 +15053,7 @@ async fn test_multi_buffer_folding(cx: &mut gpui::TestAppContext) {
}),
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let worktree = project.update(cx, |project, cx| {
@@ -15047,7 +15229,7 @@ async fn test_multi_buffer_single_excerpts_folding(cx: &mut gpui::TestAppContext
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"first.rs": sample_text_1,
"second.rs": sample_text_2,
@@ -15055,7 +15237,7 @@ async fn test_multi_buffer_single_excerpts_folding(cx: &mut gpui::TestAppContext
}),
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let worktree = project.update(cx, |project, cx| {
@@ -15194,13 +15376,13 @@ async fn test_multi_buffer_with_single_excerpt_folding(cx: &mut gpui::TestAppCon
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": sample_text,
}),
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let worktree = project.update(cx, |project, cx| {
@@ -26,8 +26,9 @@ use crate::{
};
use client::ParticipantIndex;
use collections::{BTreeMap, HashMap, HashSet};
+use diff::DiffHunkStatus;
use file_icons::FileIcons;
-use git::{blame::BlameEntry, diff::DiffHunkStatus, Oid};
+use git::{blame::BlameEntry, Oid};
use gpui::{
anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, point, px, quad,
relative, size, svg, transparent_black, Action, AnyElement, App, AvailableSpace, Axis, Bounds,
@@ -430,7 +431,6 @@ impl EditorElement {
}
});
register_action(editor, window, Editor::restart_language_server);
- register_action(editor, window, Editor::cancel_language_server_work);
register_action(editor, window, Editor::show_character_palette);
register_action(editor, window, |editor, action, window, cx| {
if let Some(task) = editor.confirm_completion(action, window, cx) {
@@ -475,8 +475,8 @@ impl EditorElement {
}
});
register_action(editor, window, Editor::show_signature_help);
- register_action(editor, window, Editor::next_inline_completion);
- register_action(editor, window, Editor::previous_inline_completion);
+ register_action(editor, window, Editor::next_edit_prediction);
+ register_action(editor, window, Editor::previous_edit_prediction);
register_action(editor, window, Editor::show_inline_completion);
register_action(editor, window, Editor::context_menu_first);
register_action(editor, window, Editor::context_menu_prev);
@@ -486,7 +486,7 @@ impl EditorElement {
register_action(editor, window, Editor::unique_lines_case_insensitive);
register_action(editor, window, Editor::unique_lines_case_sensitive);
register_action(editor, window, Editor::accept_partial_inline_completion);
- register_action(editor, window, Editor::accept_inline_completion);
+ register_action(editor, window, Editor::accept_edit_prediction);
register_action(editor, window, Editor::revert_file);
register_action(editor, window, Editor::revert_selected_hunks);
register_action(editor, window, Editor::apply_all_diff_hunks);
@@ -503,7 +503,6 @@ impl EditorElement {
let position_map = layout.position_map.clone();
window.on_key_event({
let editor = self.editor.clone();
- let text_hitbox = layout.text_hitbox.clone();
move |event: &ModifiersChangedEvent, phase, window, cx| {
if phase != DispatchPhase::Bubble {
return;
@@ -512,7 +511,7 @@ impl EditorElement {
if editor.hover_state.focused(window, cx) {
return;
}
- Self::modifiers_changed(editor, event, &position_map, &text_hitbox, window, cx)
+ Self::modifiers_changed(editor, event, &position_map, window, cx)
})
}
});
@@ -522,19 +521,18 @@ impl EditorElement {
editor: &mut Editor,
event: &ModifiersChangedEvent,
position_map: &PositionMap,
- text_hitbox: &Hitbox,
window: &mut Window,
cx: &mut Context<Editor>,
) {
editor.update_inline_completion_preview(&event.modifiers, window, cx);
let mouse_position = window.mouse_position();
- if !text_hitbox.is_hovered(window) {
+ if !position_map.text_hitbox.is_hovered(window) {
return;
}
editor.update_hovered_link(
- position_map.point_for_position(text_hitbox.bounds, mouse_position),
+ position_map.point_for_position(mouse_position),
&position_map.snapshot,
event.modifiers,
window,
@@ -542,14 +540,11 @@ impl EditorElement {
)
}
- #[allow(clippy::too_many_arguments)]
fn mouse_left_down(
editor: &mut Editor,
event: &MouseDownEvent,
hovered_hunk: Option<Range<Anchor>>,
position_map: &PositionMap,
- text_hitbox: &Hitbox,
- gutter_hitbox: &Hitbox,
line_numbers: &HashMap<MultiBufferRow, LineNumberLayout>,
window: &mut Window,
cx: &mut Context<Editor>,
@@ -558,11 +553,13 @@ impl EditorElement {
return;
}
+ let text_hitbox = &position_map.text_hitbox;
+ let gutter_hitbox = &position_map.gutter_hitbox;
let mut click_count = event.click_count;
let mut modifiers = event.modifiers;
if let Some(hovered_hunk) = hovered_hunk {
- editor.toggle_diff_hunks_in_ranges(vec![hovered_hunk], cx);
+ editor.toggle_diff_hunks_in_ranges_narrow(vec![hovered_hunk], cx);
cx.notify();
return;
} else if gutter_hitbox.is_hovered(window) {
@@ -614,8 +611,7 @@ impl EditorElement {
}
}
- let point_for_position =
- position_map.point_for_position(text_hitbox.bounds, event.position);
+ let point_for_position = position_map.point_for_position(event.position);
let position = point_for_position.previous_valid;
if modifiers.shift && modifiers.alt {
editor.select(
@@ -690,15 +686,13 @@ impl EditorElement {
editor: &mut Editor,
event: &MouseDownEvent,
position_map: &PositionMap,
- text_hitbox: &Hitbox,
window: &mut Window,
cx: &mut Context<Editor>,
) {
- if !text_hitbox.is_hovered(window) {
+ if !position_map.text_hitbox.is_hovered(window) {
return;
}
- let point_for_position =
- position_map.point_for_position(text_hitbox.bounds, event.position);
+ let point_for_position = position_map.point_for_position(event.position);
mouse_context_menu::deploy_context_menu(
editor,
Some(event.position),
@@ -713,16 +707,14 @@ impl EditorElement {
editor: &mut Editor,
event: &MouseDownEvent,
position_map: &PositionMap,
- text_hitbox: &Hitbox,
window: &mut Window,
cx: &mut Context<Editor>,
) {
- if !text_hitbox.is_hovered(window) || window.default_prevented() {
+ if !position_map.text_hitbox.is_hovered(window) || window.default_prevented() {
return;
}
- let point_for_position =
- position_map.point_for_position(text_hitbox.bounds, event.position);
+ let point_for_position = position_map.point_for_position(event.position);
let position = point_for_position.previous_valid;
editor.select(
@@ -739,15 +731,11 @@ impl EditorElement {
fn mouse_up(
editor: &mut Editor,
event: &MouseUpEvent,
- #[cfg_attr(
- not(any(target_os = "linux", target_os = "freebsd")),
- allow(unused_variables)
- )]
position_map: &PositionMap,
- text_hitbox: &Hitbox,
window: &mut Window,
cx: &mut Context<Editor>,
) {
+ let text_hitbox = &position_map.text_hitbox;
let end_selection = editor.has_pending_selection();
let pending_nonempty_selections = editor.has_pending_nonempty_selection();
@@ -767,8 +755,7 @@ impl EditorElement {
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
if EditorSettings::get_global(cx).middle_click_paste {
if let Some(text) = cx.read_from_primary().and_then(|item| item.text()) {
- let point_for_position =
- position_map.point_for_position(text_hitbox.bounds, event.position);
+ let point_for_position = position_map.point_for_position(event.position);
let position = point_for_position.previous_valid;
editor.select(
@@ -791,10 +778,10 @@ impl EditorElement {
editor: &mut Editor,
event: &ClickEvent,
position_map: &PositionMap,
- text_hitbox: &Hitbox,
window: &mut Window,
cx: &mut Context<Editor>,
) {
+ let text_hitbox = &position_map.text_hitbox;
let pending_nonempty_selections = editor.has_pending_nonempty_selection();
let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier;
@@ -804,7 +791,7 @@ impl EditorElement {
};
if !pending_nonempty_selections && multi_cursor_modifier && text_hitbox.is_hovered(window) {
- let point = position_map.point_for_position(text_hitbox.bounds, event.up.position);
+ let point = position_map.point_for_position(event.up.position);
editor.handle_click_hovered_link(point, event.modifiers(), window, cx);
cx.stop_propagation();
@@ -815,7 +802,6 @@ impl EditorElement {
editor: &mut Editor,
event: &MouseMoveEvent,
position_map: &PositionMap,
- text_bounds: Bounds<Pixels>,
window: &mut Window,
cx: &mut Context<Editor>,
) {
@@ -823,7 +809,8 @@ impl EditorElement {
return;
}
- let point_for_position = position_map.point_for_position(text_bounds, event.position);
+ let text_bounds = position_map.text_hitbox.bounds;
+ let point_for_position = position_map.point_for_position(event.position);
let mut scroll_delta = gpui::Point::<f32>::default();
let vertical_margin = position_map.line_height.min(text_bounds.size.height / 3.0);
let top = text_bounds.origin.y + vertical_margin;
@@ -870,19 +857,18 @@ impl EditorElement {
editor: &mut Editor,
event: &MouseMoveEvent,
position_map: &PositionMap,
- text_hitbox: &Hitbox,
- gutter_hitbox: &Hitbox,
window: &mut Window,
cx: &mut Context<Editor>,
) {
+ let text_hitbox = &position_map.text_hitbox;
+ let gutter_hitbox = &position_map.gutter_hitbox;
let modifiers = event.modifiers;
let gutter_hovered = gutter_hitbox.is_hovered(window);
editor.set_gutter_hovered(gutter_hovered, cx);
// Don't trigger hover popover if mouse is hovering over context menu
if text_hitbox.is_hovered(window) {
- let point_for_position =
- position_map.point_for_position(text_hitbox.bounds, event.position);
+ let point_for_position = position_map.point_for_position(event.position);
editor.update_hovered_link(
point_for_position,
@@ -1668,7 +1654,7 @@ impl EditorElement {
if let Some(inline_completion) = editor.active_inline_completion.as_ref() {
match &inline_completion.completion {
InlineCompletion::Edit {
- display_mode: EditDisplayMode::TabAccept(_),
+ display_mode: EditDisplayMode::TabAccept,
..
} => padding += INLINE_ACCEPT_SUGGESTION_EM_WIDTHS,
_ => {}
@@ -2418,35 +2404,18 @@ impl EditorElement {
height,
} => {
let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id);
- let icon_offset = gutter_dimensions.width
- - (gutter_dimensions.left_padding + gutter_dimensions.margin);
let mut result = v_flex().id(block_id).w_full();
if let Some(prev_excerpt) = prev_excerpt {
if *show_excerpt_controls {
- result =
- result.child(
- h_flex()
- .id("expand_down_hit_area")
- .w(icon_offset)
- .h(MULTI_BUFFER_EXCERPT_HEADER_HEIGHT as f32
- * window.line_height())
- .flex_none()
- .justify_end()
- .child(self.render_expand_excerpt_button(
- IconName::ArrowDownFromLine,
- None,
- cx,
- ))
- .on_click(window.listener_for(&self.editor, {
- let excerpt_id = prev_excerpt.id;
- let direction = ExpandExcerptDirection::Down;
- move |editor, _, _, cx| {
- editor.expand_excerpt(excerpt_id, direction, cx);
- cx.stop_propagation();
- }
- })),
- );
+ result = result.child(self.render_expand_excerpt_control(
+ block_id,
+ ExpandExcerptDirection::Down,
+ prev_excerpt.id,
+ gutter_dimensions,
+ window,
+ cx,
+ ));
}
}
@@ -2470,65 +2439,19 @@ impl EditorElement {
height,
starts_new_buffer,
} => {
- let icon_offset = gutter_dimensions.width
- - (gutter_dimensions.left_padding + gutter_dimensions.margin);
- let header_height =
- MULTI_BUFFER_EXCERPT_HEADER_HEIGHT as f32 * window.line_height();
let color = cx.theme().colors().clone();
- let hover_color = color.border_variant.opacity(0.5);
- let focus_handle = self.editor.focus_handle(cx).clone();
-
let mut result = v_flex().id(block_id).w_full();
- let expand_area = |id: SharedString| {
- h_flex()
- .id(id)
- .w_full()
- .cursor_pointer()
- .block_mouse_down()
- .on_mouse_move(|_, _, cx| cx.stop_propagation())
- .hover(|style| style.bg(hover_color))
- .tooltip({
- let focus_handle = focus_handle.clone();
- move |window, cx| {
- Tooltip::for_action_in(
- "Expand Excerpt",
- &ExpandExcerpts { lines: 0 },
- &focus_handle,
- window,
- cx,
- )
- }
- })
- };
if let Some(prev_excerpt) = prev_excerpt {
if *show_excerpt_controls {
- let group_name = "expand-down";
-
- result = result.child(
- expand_area(format!("block-{}-down", block_id).into())
- .group(group_name)
- .child(
- h_flex()
- .w(icon_offset)
- .h(header_height)
- .flex_none()
- .justify_end()
- .child(self.render_expand_excerpt_button(
- IconName::ArrowDownFromLine,
- Some(group_name.to_string()),
- cx,
- )),
- )
- .on_click(window.listener_for(&self.editor, {
- let excerpt_id = prev_excerpt.id;
- let direction = ExpandExcerptDirection::Down;
- move |editor, _, _, cx| {
- editor.expand_excerpt(excerpt_id, direction, cx);
- cx.stop_propagation();
- }
- })),
- );
+ result = result.child(self.render_expand_excerpt_control(
+ block_id,
+ ExpandExcerptDirection::Down,
+ prev_excerpt.id,
+ gutter_dimensions,
+ window,
+ cx,
+ ));
}
}
@@ -2554,43 +2477,20 @@ impl EditorElement {
}
if *show_excerpt_controls {
- let group_name = "expand-up-first";
-
- result = result.child(
- h_flex().group(group_name).child(
- expand_area(format!("block-{}-up-first", block_id).into())
- .h(header_height)
- .child(
- h_flex()
- .w(icon_offset)
- .h(header_height)
- .flex_none()
- .justify_end()
- .child(self.render_expand_excerpt_button(
- IconName::ArrowUpFromLine,
- Some(group_name.to_string()),
- cx,
- )),
- )
- .on_click(window.listener_for(&self.editor, {
- let excerpt_id = next_excerpt.id;
- let direction = ExpandExcerptDirection::Up;
- move |editor, _, _, cx| {
- editor.expand_excerpt(excerpt_id, direction, cx);
- cx.stop_propagation();
- }
- })),
- ),
- );
+ result = result.child(self.render_expand_excerpt_control(
+ block_id,
+ ExpandExcerptDirection::Up,
+ next_excerpt.id,
+ gutter_dimensions,
+ window,
+ cx,
+ ));
}
} else {
- let group_name = "expand-up-subsequent";
-
if *show_excerpt_controls {
result = result.child(
h_flex()
.relative()
- .group(group_name)
.child(
div()
.top(px(0.))
@@ -2599,55 +2499,14 @@ impl EditorElement {
.h_px()
.bg(color.border_variant),
)
- .child(
- expand_area(format!("block-{}-up", block_id).into())
- .h(header_height)
- .child(
- h_flex()
- .w(icon_offset)
- .h(header_height)
- .flex_none()
- .justify_end()
- .child(if *show_excerpt_controls {
- self.render_expand_excerpt_button(
- IconName::ArrowUpFromLine,
- Some(group_name.to_string()),
- cx,
- )
- } else {
- ButtonLike::new("jump-icon")
- .style(ButtonStyle::Transparent)
- .child(
- svg()
- .path(
- IconName::ArrowUpRight
- .path(),
- )
- .size(IconSize::XSmall.rems())
- .text_color(
- color.border_variant,
- )
- .group_hover(
- group_name,
- |style| {
- style.text_color(
- color.border,
- )
- },
- ),
- )
- }),
- )
- .on_click(window.listener_for(&self.editor, {
- let excerpt_id = next_excerpt.id;
- let direction = ExpandExcerptDirection::Up;
- move |editor, _, _, cx| {
- editor
- .expand_excerpt(excerpt_id, direction, cx);
- cx.stop_propagation();
- }
- })),
- ),
+ .child(self.render_expand_excerpt_control(
+ block_id,
+ ExpandExcerptDirection::Up,
+ next_excerpt.id,
+ gutter_dimensions,
+ window,
+ cx,
+ )),
);
}
};
@@ -2775,6 +2634,16 @@ impl EditorElement {
),
)
})
+ .children(
+ self.editor
+ .read(cx)
+ .addons
+ .values()
+ .filter_map(|addon| {
+ addon.render_buffer_header_controls(for_excerpt, window, cx)
+ })
+ .take(1),
+ )
.child(
h_flex()
.cursor_pointer()
@@ -2824,26 +2693,93 @@ impl EditorElement {
)
}
- fn render_expand_excerpt_button(
+ fn render_expand_excerpt_control(
&self,
- icon: IconName,
- group_name: impl Into<Option<String>>,
+ block_id: BlockId,
+ direction: ExpandExcerptDirection,
+ excerpt_id: ExcerptId,
+ gutter_dimensions: &GutterDimensions,
+ window: &Window,
cx: &mut App,
- ) -> ButtonLike {
- let group_name = group_name.into();
- ButtonLike::new("expand-icon")
- .style(ButtonStyle::Transparent)
- .child(
- svg()
- .path(icon.path())
- .size(IconSize::XSmall.rems())
- .text_color(cx.theme().colors().editor_line_number)
- .when_some(group_name, |svg, group_name| {
- svg.group_hover(group_name, |style| {
- style.text_color(cx.theme().colors().editor_active_line_number)
- })
- }),
+ ) -> impl IntoElement {
+ let color = cx.theme().colors().clone();
+ let hover_color = color.border_variant.opacity(0.5);
+ let focus_handle = self.editor.focus_handle(cx).clone();
+
+ let icon_offset =
+ gutter_dimensions.width - (gutter_dimensions.left_padding + gutter_dimensions.margin);
+ let header_height = MULTI_BUFFER_EXCERPT_HEADER_HEIGHT as f32 * window.line_height();
+ let group_name = if direction == ExpandExcerptDirection::Down {
+ "expand-down"
+ } else {
+ "expand-up"
+ };
+
+ let expand_area = |id: SharedString| {
+ h_flex()
+ .id(id)
+ .w_full()
+ .cursor_pointer()
+ .block_mouse_down()
+ .on_mouse_move(|_, _, cx| cx.stop_propagation())
+ .hover(|style| style.bg(hover_color))
+ .tooltip({
+ let focus_handle = focus_handle.clone();
+ move |window, cx| {
+ Tooltip::for_action_in(
+ "Expand Excerpt",
+ &ExpandExcerpts { lines: 0 },
+ &focus_handle,
+ window,
+ cx,
+ )
+ }
+ })
+ };
+
+ expand_area(
+ format!(
+ "block-{}-{}",
+ block_id,
+ if direction == ExpandExcerptDirection::Down {
+ "down"
+ } else {
+ "up"
+ }
)
+ .into(),
+ )
+ .group(group_name)
+ .child(
+ h_flex()
+ .w(icon_offset)
+ .h(header_height)
+ .flex_none()
+ .justify_end()
+ .child(
+ ButtonLike::new("expand-icon")
+ .style(ButtonStyle::Transparent)
+ .child(
+ svg()
+ .path(if direction == ExpandExcerptDirection::Down {
+ IconName::ArrowDownFromLine.path()
+ } else {
+ IconName::ArrowUpFromLine.path()
+ })
+ .size(IconSize::XSmall.rems())
+ .text_color(cx.theme().colors().editor_line_number)
+ .group_hover(group_name, |style| {
+ style.text_color(cx.theme().colors().editor_active_line_number)
+ }),
+ ),
+ ),
+ )
+ .on_click(window.listener_for(&self.editor, {
+ move |editor, _, _, cx| {
+ editor.expand_excerpt(excerpt_id, direction, cx);
+ cx.stop_propagation();
+ }
+ }))
}
#[allow(clippy::too_many_arguments)]
@@ -3174,7 +3110,10 @@ impl EditorElement {
{
let editor = self.editor.read(cx);
- if editor.has_active_completions_menu() && editor.show_inline_completions_in_menu(cx) {
+ if editor.inline_completion_visible_in_cursor_popover(
+ editor.has_active_inline_completion(),
+ cx,
+ ) {
height_above_menu +=
editor.edit_prediction_cursor_popover_height() + POPOVER_Y_PADDING;
edit_prediction_popover_visible = true;
@@ -3235,7 +3174,7 @@ impl EditorElement {
};
let mut element = self
.render_context_menu(line_height, menu_height, y_flipped, window, cx)
- .unwrap();
+ .expect("Visible context menu should always render.");
let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
Some((CursorPopoverType::CodeContextMenu, element, size))
} else {
@@ -3258,7 +3197,7 @@ impl EditorElement {
#[cfg(target_os = "macos")]
{
// let bindings = window.bindings_for_action_in(
- // &crate::AcceptInlineCompletion,
+ // &crate::AcceptEditPrediction,
// &self.editor.focus_handle(cx),
// );
@@ -3300,7 +3239,6 @@ impl EditorElement {
min_width,
max_width,
cursor_point,
- &line_layouts,
style,
accept_keystroke.as_ref()?,
window,
@@ -3392,8 +3330,12 @@ impl EditorElement {
window: &mut Window,
cx: &mut App,
) {
+ let editor = self.editor.read(cx);
+ if !editor.context_menu_visible() {
+ return;
+ }
let Some(crate::ContextMenuOrigin::GutterIndicator(gutter_row)) =
- self.editor.read(cx).context_menu_origin()
+ editor.context_menu_origin()
else {
return;
};
@@ -3421,11 +3363,9 @@ impl EditorElement {
window,
cx,
move |height, _max_width_for_stable_x, y_flipped, window, cx| {
- let Some(mut element) =
- self.render_context_menu(line_height, height, y_flipped, window, cx)
- else {
- return vec![];
- };
+ let mut element = self
+ .render_context_menu(line_height, height, y_flipped, window, cx)
+ .expect("Visible context menu should always render.");
let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
vec![(CursorPopoverType::CodeContextMenu, element, size)]
},
@@ -3677,7 +3617,12 @@ impl EditorElement {
const PADDING_X: Pixels = Pixels(24.);
const PADDING_Y: Pixels = Pixels(2.);
- let active_inline_completion = self.editor.read(cx).active_inline_completion.as_ref()?;
+ let editor = self.editor.read(cx);
+ let active_inline_completion = editor.active_inline_completion.as_ref()?;
+
+ if editor.inline_completion_visible_in_cursor_popover(true, cx) {
+ return None;
+ }
match &active_inline_completion.completion {
InlineCompletion::Move { target, .. } => {
@@ -3691,7 +3636,7 @@ impl EditorElement {
self.editor.focus_handle(cx),
window,
cx,
- );
+ )?;
let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
let offset = point((text_bounds.size.width - size.width) / 2., PADDING_Y);
element.prepaint_at(text_bounds.origin + offset, window, cx);
@@ -3704,7 +3649,7 @@ impl EditorElement {
self.editor.focus_handle(cx),
window,
cx,
- );
+ )?;
let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
let offset = point(
(text_bounds.size.width - size.width) / 2.,
@@ -3720,7 +3665,7 @@ impl EditorElement {
self.editor.focus_handle(cx),
window,
cx,
- );
+ )?;
let target_line_end = DisplayPoint::new(
target_display_point.row(),
@@ -3744,7 +3689,7 @@ impl EditorElement {
display_mode,
snapshot,
} => {
- if self.editor.read(cx).has_active_completions_menu() {
+ if self.editor.read(cx).has_visible_completions_menu() {
return None;
}
@@ -3768,8 +3713,7 @@ impl EditorElement {
}
match display_mode {
- EditDisplayMode::TabAccept(previewing) => {
- let previewing = *previewing;
+ EditDisplayMode::TabAccept => {
let range = &edits.first()?.0;
let target_display_point = range.end.to_display_point(editor_snapshot);
@@ -3777,18 +3721,26 @@ impl EditorElement {
target_display_point.row(),
editor_snapshot.line_len(target_display_point.row()),
);
- let origin = self.editor.update(cx, |editor, _cx| {
- editor.display_to_pixel_point(target_line_end, editor_snapshot, window)
- })?;
+ let (previewing_inline_completion, origin) =
+ self.editor.update(cx, |editor, _cx| {
+ Some((
+ editor.previewing_inline_completion,
+ editor.display_to_pixel_point(
+ target_line_end,
+ editor_snapshot,
+ window,
+ )?,
+ ))
+ })?;
let mut element = inline_completion_accept_indicator(
"Accept",
None,
- previewing,
+ previewing_inline_completion,
self.editor.focus_handle(cx),
window,
cx,
- );
+ )?;
element.prepaint_as_root(
text_bounds.origin + origin + point(PADDING_X, px(0.)),
@@ -4107,8 +4059,7 @@ impl EditorElement {
window: &mut Window,
cx: &mut App,
) -> Vec<AnyElement> {
- let point_for_position =
- position_map.point_for_position(text_hitbox.bounds, window.mouse_position());
+ let point_for_position = position_map.point_for_position(window.mouse_position());
let mut controls = vec![];
@@ -4245,7 +4196,10 @@ impl EditorElement {
let scroll_top = layout.position_map.snapshot.scroll_position().y;
let gutter_bg = cx.theme().colors().editor_gutter_background;
window.paint_quad(fill(layout.gutter_hitbox.bounds, gutter_bg));
- window.paint_quad(fill(layout.text_hitbox.bounds, self.style.background));
+ window.paint_quad(fill(
+ layout.position_map.text_hitbox.bounds,
+ self.style.background,
+ ));
if let EditorMode::Full = layout.mode {
let mut active_rows = layout.active_rows.iter().peekable();
@@ -4270,8 +4224,8 @@ impl EditorElement {
end: layout.gutter_hitbox.right(),
}),
CurrentLineHighlight::Line => Some(Range {
- start: layout.text_hitbox.bounds.left(),
- end: layout.text_hitbox.bounds.right(),
+ start: layout.position_map.text_hitbox.bounds.left(),
+ end: layout.position_map.text_hitbox.bounds.right(),
}),
CurrentLineHighlight::All => Some(Range {
start: layout.hitbox.left(),
@@ -4345,7 +4299,7 @@ impl EditorElement {
layout.position_map.snapshot.scroll_position().x * layout.position_map.em_width;
for (wrap_position, active) in layout.wrap_guides.iter() {
- let x = (layout.text_hitbox.origin.x
+ let x = (layout.position_map.text_hitbox.origin.x
+ *wrap_position
+ layout.position_map.em_width / 2.)
- scroll_left;
@@ -4357,7 +4311,7 @@ impl EditorElement {
|| scrollbar_y.as_ref().map_or(false, |sy| sy.visible)
};
- if x < layout.text_hitbox.origin.x
+ if x < layout.position_map.text_hitbox.origin.x
|| (show_scrollbars && x > self.scrollbar_left(&layout.hitbox.bounds))
{
continue;
@@ -4370,8 +4324,8 @@ impl EditorElement {
};
window.paint_quad(fill(
Bounds {
- origin: point(x, layout.text_hitbox.origin.y),
- size: size(px(1.), layout.text_hitbox.size.height),
+ origin: point(x, layout.position_map.text_hitbox.origin.y),
+ size: size(px(1.), layout.position_map.text_hitbox.size.height),
},
color,
));
@@ -4746,7 +4700,7 @@ impl EditorElement {
fn paint_text(&mut self, layout: &mut EditorLayout, window: &mut Window, cx: &mut App) {
window.with_content_mask(
Some(ContentMask {
- bounds: layout.text_hitbox.bounds,
+ bounds: layout.position_map.text_hitbox.bounds,
}),
|window| {
let cursor_style = if self
@@ -4760,7 +4714,7 @@ impl EditorElement {
} else {
CursorStyle::IBeam
};
- window.set_cursor_style(cursor_style, &layout.text_hitbox);
+ window.set_cursor_style(cursor_style, &layout.position_map.text_hitbox);
let invisible_display_ranges = self.paint_highlights(layout, window);
self.paint_lines(&invisible_display_ranges, layout, window, cx);
@@ -4782,7 +4736,7 @@ impl EditorElement {
layout: &mut EditorLayout,
window: &mut Window,
) -> SmallVec<[Range<DisplayPoint>; 32]> {
- window.paint_layer(layout.text_hitbox.bounds, |window| {
+ window.paint_layer(layout.position_map.text_hitbox.bounds, |window| {
let mut invisible_display_ranges = SmallVec::<[Range<DisplayPoint>; 32]>::new();
let line_end_overshoot = 0.15 * layout.position_map.line_height;
for (range, color) in &layout.highlighted_ranges {
@@ -4861,7 +4815,7 @@ impl EditorElement {
// A softer than perfect black
let redaction_color = gpui::rgb(0x0e1111);
- window.paint_layer(layout.text_hitbox.bounds, |window| {
+ window.paint_layer(layout.position_map.text_hitbox.bounds, |window| {
for range in layout.redacted_ranges.iter() {
self.paint_highlighted_range(
range.clone(),
@@ -5307,7 +5261,7 @@ impl EditorElement {
if scrollbar_settings.diagnostics != ScrollbarDiagnostics::None {
let diagnostics = snapshot
.buffer_snapshot
- .diagnostics_in_range::<_, Point>(Point::zero()..max_point)
+ .diagnostics_in_range::<Point>(Point::zero()..max_point)
// Don't show diagnostics the user doesn't care about
.filter(|diagnostic| {
match (
@@ -5435,13 +5389,13 @@ impl EditorElement {
.collect(),
};
- highlighted_range.paint(layout.text_hitbox.bounds, window);
+ highlighted_range.paint(layout.position_map.text_hitbox.bounds, window);
}
}
fn paint_inline_blame(&mut self, layout: &mut EditorLayout, window: &mut Window, cx: &mut App) {
if let Some(mut inline_blame) = layout.inline_blame.take() {
- window.paint_layer(layout.text_hitbox.bounds, |window| {
+ window.paint_layer(layout.position_map.text_hitbox.bounds, |window| {
inline_blame.paint(window, cx);
})
}
@@ -5560,8 +5514,6 @@ impl EditorElement {
window.on_mouse_event({
let position_map = layout.position_map.clone();
let editor = self.editor.clone();
- let text_hitbox = layout.text_hitbox.clone();
- let gutter_hitbox = layout.gutter_hitbox.clone();
let multi_buffer_range =
layout
.display_hunks
@@ -5600,32 +5552,16 @@ impl EditorElement {
event,
multi_buffer_range.clone(),
&position_map,
- &text_hitbox,
- &gutter_hitbox,
line_numbers.as_ref(),
window,
cx,
);
}),
MouseButton::Right => editor.update(cx, |editor, cx| {
- Self::mouse_right_down(
- editor,
- event,
- &position_map,
- &text_hitbox,
- window,
- cx,
- );
+ Self::mouse_right_down(editor, event, &position_map, window, cx);
}),
MouseButton::Middle => editor.update(cx, |editor, cx| {
- Self::mouse_middle_down(
- editor,
- event,
- &position_map,
- &text_hitbox,
- window,
- cx,
- );
+ Self::mouse_middle_down(editor, event, &position_map, window, cx);
}),
_ => {}
};
@@ -5636,12 +5572,11 @@ impl EditorElement {
window.on_mouse_event({
let editor = self.editor.clone();
let position_map = layout.position_map.clone();
- let text_hitbox = layout.text_hitbox.clone();
move |event: &MouseUpEvent, phase, window, cx| {
if phase == DispatchPhase::Bubble {
editor.update(cx, |editor, cx| {
- Self::mouse_up(editor, event, &position_map, &text_hitbox, window, cx)
+ Self::mouse_up(editor, event, &position_map, window, cx)
});
}
}
@@ -5650,8 +5585,6 @@ impl EditorElement {
window.on_mouse_event({
let editor = self.editor.clone();
let position_map = layout.position_map.clone();
- let text_hitbox = layout.text_hitbox.clone();
-
let mut captured_mouse_down = None;
move |event: &MouseUpEvent, phase, window, cx| match phase {
@@ -5665,7 +5598,7 @@ impl EditorElement {
.clone();
let mut pending_mouse_down = pending_mouse_down.borrow_mut();
- if pending_mouse_down.is_some() && text_hitbox.is_hovered(window) {
+ if pending_mouse_down.is_some() && position_map.text_hitbox.is_hovered(window) {
captured_mouse_down = pending_mouse_down.take();
window.refresh();
}
@@ -5677,7 +5610,7 @@ impl EditorElement {
down: mouse_down,
up: event.clone(),
};
- Self::click(editor, &event, &position_map, &text_hitbox, window, cx);
+ Self::click(editor, &event, &position_map, window, cx);
}
}),
}
@@ -5686,8 +5619,6 @@ impl EditorElement {
window.on_mouse_event({
let position_map = layout.position_map.clone();
let editor = self.editor.clone();
- let text_hitbox = layout.text_hitbox.clone();
- let gutter_hitbox = layout.gutter_hitbox.clone();
move |event: &MouseMoveEvent, phase, window, cx| {
if phase == DispatchPhase::Bubble {
@@ -5698,25 +5629,10 @@ impl EditorElement {
if event.pressed_button == Some(MouseButton::Left)
|| event.pressed_button == Some(MouseButton::Middle)
{
- Self::mouse_dragged(
- editor,
- event,
- &position_map,
- text_hitbox.bounds,
- window,
- cx,
- )
+ Self::mouse_dragged(editor, event, &position_map, window, cx)
}
- Self::mouse_moved(
- editor,
- event,
- &position_map,
- &text_hitbox,
- &gutter_hitbox,
- window,
- cx,
- )
+ Self::mouse_moved(editor, event, &position_map, window, cx)
});
}
}
@@ -560,7 +560,7 @@ mod tests {
use settings::SettingsStore;
use std::{cmp, env, ops::Range, path::Path};
use unindent::Unindent as _;
- use util::RandomCharIter;
+ use util::{path, RandomCharIter};
// macro_rules! assert_blame_rows {
// ($blame:expr, $rows:expr, $expected:expr, $cx:expr) => {
@@ -697,7 +697,7 @@ mod tests {
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
- Path::new("file.txt"),
+ "file.txt".into(),
Blame {
entries: vec![
blame_entry("1b1b1b", 0..1),
@@ -793,7 +793,7 @@ mod tests {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/my-repo",
+ path!("/my-repo"),
json!({
".git": {},
"file.txt": r#"
@@ -807,9 +807,9 @@ mod tests {
.await;
fs.set_blame_for_repo(
- Path::new("/my-repo/.git"),
+ Path::new(path!("/my-repo/.git")),
vec![(
- Path::new("file.txt"),
+ "file.txt".into(),
Blame {
entries: vec![blame_entry("1b1b1b", 0..4)],
..Default::default()
@@ -817,10 +817,10 @@ mod tests {
)],
);
- let project = Project::test(fs, ["/my-repo".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/my-repo").as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/my-repo/file.txt", cx)
+ project.open_local_buffer(path!("/my-repo/file.txt"), cx)
})
.await
.unwrap();
@@ -945,7 +945,7 @@ mod tests {
log::info!("initial buffer text: {:?}", buffer_initial_text);
fs.insert_tree(
- "/my-repo",
+ path!("/my-repo"),
json!({
".git": {},
"file.txt": buffer_initial_text.to_string()
@@ -956,9 +956,9 @@ mod tests {
let blame_entries = gen_blame_entries(buffer_initial_text.max_point().row, &mut rng);
log::info!("initial blame entries: {:?}", blame_entries);
fs.set_blame_for_repo(
- Path::new("/my-repo/.git"),
+ Path::new(path!("/my-repo/.git")),
vec![(
- Path::new("file.txt"),
+ "file.txt".into(),
Blame {
entries: blame_entries,
..Default::default()
@@ -966,10 +966,10 @@ mod tests {
)],
);
- let project = Project::test(fs.clone(), ["/my-repo".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/my-repo").as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/my-repo/file.txt", cx)
+ project.open_local_buffer(path!("/my-repo/file.txt"), cx)
})
.await
.unwrap();
@@ -998,9 +998,9 @@ mod tests {
log::info!("regenerating blame entries: {:?}", blame_entries);
fs.set_blame_for_repo(
- Path::new("/my-repo/.git"),
+ Path::new(path!("/my-repo/.git")),
vec![(
- Path::new("file.txt"),
+ "file.txt".into(),
Blame {
entries: blame_entries,
..Default::default()
@@ -921,7 +921,7 @@ mod tests {
use indoc::indoc;
use language::language_settings::InlayHintSettings;
use lsp::request::{GotoDefinition, GotoTypeDefinition};
- use util::assert_set_eq;
+ use util::{assert_set_eq, path};
use workspace::item::Item;
#[gpui::test]
@@ -1574,18 +1574,31 @@ mod tests {
// Insert a new file
let fs = cx.update_workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone());
fs.as_fake()
- .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec())
+ .insert_file(
+ path!("/root/dir/file2.rs"),
+ "This is file2.rs".as_bytes().to_vec(),
+ )
.await;
+ #[cfg(not(target_os = "windows"))]
cx.set_state(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
Or go to ../dir/file2.rs if you want.
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.ˇ
+ "});
+ #[cfg(target_os = "windows")]
+ cx.set_state(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.ˇ
"});
// File does not exist
+ #[cfg(not(target_os = "windows"))]
let screen_coord = cx.pixel_position(indoc! {"
You can't go to a file that dˇoes_not_exist.txt.
Go to file2.rs if you want.
@@ -1593,6 +1606,14 @@ mod tests {
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ let screen_coord = cx.pixel_position(indoc! {"
+ You can't go to a file that dˇoes_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.
+ "});
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
// No highlight
cx.update_editor(|editor, window, cx| {
@@ -1605,6 +1626,7 @@ mod tests {
});
// Moving the mouse over a file that does exist should highlight it.
+ #[cfg(not(target_os = "windows"))]
let screen_coord = cx.pixel_position(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to fˇile2.rs if you want.
@@ -1612,8 +1634,17 @@ mod tests {
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ let screen_coord = cx.pixel_position(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to fˇile2.rs if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.
+ "});
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
+ #[cfg(not(target_os = "windows"))]
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to «file2.rsˇ» if you want.
@@ -1621,8 +1652,17 @@ mod tests {
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to «file2.rsˇ» if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.
+ "});
// Moving the mouse over a relative path that does exist should highlight it
+ #[cfg(not(target_os = "windows"))]
let screen_coord = cx.pixel_position(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
@@ -1630,8 +1670,17 @@ mod tests {
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ let screen_coord = cx.pixel_position(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to ../dir/fˇile2.rs if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.
+ "});
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
+ #[cfg(not(target_os = "windows"))]
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
@@ -1639,8 +1688,17 @@ mod tests {
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to «../dir/file2.rsˇ» if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.
+ "});
// Moving the mouse over an absolute path that does exist should highlight it
+ #[cfg(not(target_os = "windows"))]
let screen_coord = cx.pixel_position(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
@@ -1649,7 +1707,17 @@ mod tests {
Or go to /root/dir/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ let screen_coord = cx.pixel_position(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to C:/root/diˇr/file2.rs if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.
+ "});
+
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
+ #[cfg(not(target_os = "windows"))]
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
@@ -1657,8 +1725,17 @@ mod tests {
Or go to «/root/dir/file2.rsˇ» if project is local.
Or go to /root/dir/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to «C:/root/dir/file2.rsˇ» if project is local.
+ Or go to C:/root/dir/file2 if this is a Rust file.
+ "});
// Moving the mouse over a path that exists, if we add the language-specific suffix, it should highlight it
+ #[cfg(not(target_os = "windows"))]
let screen_coord = cx.pixel_position(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
@@ -1666,8 +1743,17 @@ mod tests {
Or go to /root/dir/file2.rs if project is local.
Or go to /root/diˇr/file2 if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ let screen_coord = cx.pixel_position(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to C:/root/diˇr/file2 if this is a Rust file.
+ "});
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
+ #[cfg(not(target_os = "windows"))]
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
@@ -1675,6 +1761,14 @@ mod tests {
Or go to /root/dir/file2.rs if project is local.
Or go to «/root/dir/file2ˇ» if this is a Rust file.
"});
+ #[cfg(target_os = "windows")]
+ cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ You can't go to a file that does_not_exist.txt.
+ Go to file2.rs if you want.
+ Or go to ../dir/file2.rs if you want.
+ Or go to C:/root/dir/file2.rs if project is local.
+ Or go to «C:/root/dir/file2ˇ» if this is a Rust file.
+ "});
cx.simulate_click(screen_coord, Modifiers::secondary_key());
@@ -1692,7 +1786,10 @@ mod tests {
let file = buffer.read(cx).file().unwrap();
let file_path = file.as_local().unwrap().abs_path(cx);
- assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs");
+ assert_eq!(
+ file_path,
+ std::path::PathBuf::from(path!("/root/dir/file2.rs"))
+ );
});
}
@@ -279,9 +279,10 @@ fn show_hover(
delay.await;
}
+ let offset = anchor.to_offset(&snapshot.buffer_snapshot);
let local_diagnostic = snapshot
.buffer_snapshot
- .diagnostics_in_range::<_, usize>(anchor..anchor)
+ .diagnostics_in_range::<usize>(offset..offset)
// Find the entry with the most specific range
.min_by_key(|entry| entry.range.len());
@@ -597,7 +598,7 @@ async fn parse_blocks(
},
syntax: cx.theme().syntax().clone(),
selection_background_color: { cx.theme().players().local().selection },
- break_style: Default::default(),
+
heading: StyleRefinement::default()
.font_weight(FontWeight::BOLD)
.text_base()
@@ -884,8 +885,10 @@ mod tests {
let slice = data;
for (range, event) in slice.iter() {
- if [MarkdownEvent::Text, MarkdownEvent::Code].contains(event) {
- rendered_text.push_str(&text[range.clone()])
+ match event {
+ MarkdownEvent::Text(parsed) => rendered_text.push_str(parsed),
+ MarkdownEvent::Code => rendered_text.push_str(&text[range.clone()]),
+ _ => {}
}
}
}
@@ -56,7 +56,7 @@ pub(super) struct ExpandedHunk {
pub(crate) struct DiffMapSnapshot(TreeMap<BufferId, git::diff::BufferDiff>);
pub(crate) struct DiffBaseState {
- pub(crate) change_set: Model<BufferChangeSet>,
+ pub(crate) diff: Model<BufferChangeSet>,
pub(crate) last_version: Option<usize>,
_subscription: Subscription,
}
@@ -80,38 +80,29 @@ impl DiffMap {
self.snapshot.clone()
}
- pub fn add_change_set(
+ pub fn add_diff(
&mut self,
- change_set: Model<BufferChangeSet>,
+ diff: Model<BufferChangeSet>,
window: &mut Window,
cx: &mut Context<Editor>,
) {
- let buffer_id = change_set.read(cx).buffer_id;
+ let buffer_id = diff.read(cx).buffer_id;
self.snapshot
.0
- .insert(buffer_id, change_set.read(cx).diff_to_buffer.clone());
+ .insert(buffer_id, diff.read(cx).diff_to_buffer.clone());
self.diff_bases.insert(
buffer_id,
DiffBaseState {
last_version: None,
- _subscription: cx.observe_in(
- &change_set,
- window,
- move |editor, change_set, window, cx| {
- editor
- .diff_map
- .snapshot
- .0
- .insert(buffer_id, change_set.read(cx).diff_to_buffer.clone());
- Editor::sync_expanded_diff_hunks(
- &mut editor.diff_map,
- buffer_id,
- window,
- cx,
- );
- },
- ),
- change_set,
+ _subscription: cx.observe_in(&diff, window, move |editor, diff, window, cx| {
+ editor
+ .diff_map
+ .snapshot
+ .0
+ .insert(buffer_id, diff.read(cx).diff_to_buffer.clone());
+ Editor::sync_expanded_diff_hunks(&mut editor.diff_map, buffer_id, window, cx);
+ }),
+ diff,
},
);
Editor::sync_expanded_diff_hunks(self, buffer_id, window, cx);
@@ -399,7 +390,7 @@ impl Editor {
self.diff_map
.diff_bases
.get(&buffer_id)?
- .change_set
+ .diff
.read(cx)
.base_text
.clone()
@@ -953,12 +944,12 @@ impl Editor {
let mut diff_base_buffer = None;
let mut diff_base_buffer_unchanged = true;
if let Some(diff_base_state) = diff_base_state {
- diff_base_state.change_set.update(cx, |change_set, _| {
- if diff_base_state.last_version != Some(change_set.base_text_version) {
- diff_base_state.last_version = Some(change_set.base_text_version);
+ diff_base_state.diff.update(cx, |diff, _| {
+ if diff_base_state.last_version != Some(diff.base_text_version) {
+ diff_base_state.last_version = Some(diff.base_text_version);
diff_base_buffer_unchanged = false;
}
- diff_base_buffer = change_set.base_text.clone();
+ diff_base_buffer = diff.base_text.clone();
})
}
@@ -1498,14 +1489,14 @@ mod tests {
(buffer_1.clone(), diff_base_1),
(buffer_2.clone(), diff_base_2),
] {
- let change_set = cx.new(|cx| {
+ let diff = cx.new(|cx| {
BufferChangeSet::new_with_base_text(
diff_base.to_string(),
buffer.read(cx).text_snapshot(),
cx,
)
});
- editor.diff_map.add_change_set(change_set, window, cx)
+ editor.diff_map.add_diff(diff, window, cx)
}
})
.unwrap();
@@ -1274,6 +1274,7 @@ pub mod tests {
use settings::SettingsStore;
use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize, Ordering};
use text::Point;
+ use util::path;
use super::*;
@@ -1499,7 +1500,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out",
"other.md": "Test md file with some text",
@@ -1507,7 +1508,7 @@ pub mod tests {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let mut rs_fake_servers = None;
@@ -1542,14 +1543,16 @@ pub mod tests {
"Rust" => {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/a/main.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/a/main.rs"))
+ .unwrap(),
);
rs_lsp_request_count.fetch_add(1, Ordering::Release) + 1
}
"Markdown" => {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/a/other.md").unwrap(),
+ lsp::Url::from_file_path(path!("/a/other.md"))
+ .unwrap(),
);
md_lsp_request_count.fetch_add(1, Ordering::Release) + 1
}
@@ -1585,7 +1588,7 @@ pub mod tests {
let rs_buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/a/main.rs", cx)
+ project.open_local_buffer(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -1611,7 +1614,7 @@ pub mod tests {
cx.executor().run_until_parked();
let md_buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/a/other.md", cx)
+ project.open_local_buffer(path!("/a/other.md"), cx)
})
.await
.unwrap();
@@ -2173,7 +2176,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": format!("fn main() {{\n{}\n}}", "let i = 5;\n".repeat(500)),
"other.rs": "// Test file",
@@ -2181,7 +2184,7 @@ pub mod tests {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -2209,7 +2212,7 @@ pub mod tests {
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/a/main.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
task_lsp_request_ranges.lock().push(params.range);
@@ -2237,7 +2240,7 @@ pub mod tests {
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/a/main.rs", cx)
+ project.open_local_buffer(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -2471,7 +2474,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
"other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
@@ -2479,7 +2482,7 @@ pub mod tests {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let language = rust_lang();
@@ -2497,13 +2500,13 @@ pub mod tests {
let (buffer_1, _handle1) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/a/main.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx)
})
.await
.unwrap();
let (buffer_2, _handle2) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/a/other.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/a/other.rs"), cx)
})
.await
.unwrap();
@@ -2585,11 +2588,11 @@ pub mod tests {
let task_editor_edited = Arc::clone(&closure_editor_edited);
async move {
let hint_text = if params.text_document.uri
- == lsp::Url::from_file_path("/a/main.rs").unwrap()
+ == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
{
"main hint"
} else if params.text_document.uri
- == lsp::Url::from_file_path("/a/other.rs").unwrap()
+ == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap()
{
"other hint"
} else {
@@ -2815,7 +2818,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
"other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
@@ -2823,7 +2826,7 @@ pub mod tests {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -2840,13 +2843,13 @@ pub mod tests {
let (buffer_1, _handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/a/main.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx)
})
.await
.unwrap();
let (buffer_2, _handle2) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/a/other.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/a/other.rs"), cx)
})
.await
.unwrap();
@@ -2886,11 +2889,11 @@ pub mod tests {
let task_editor_edited = Arc::clone(&closure_editor_edited);
async move {
let hint_text = if params.text_document.uri
- == lsp::Url::from_file_path("/a/main.rs").unwrap()
+ == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
{
"main hint"
} else if params.text_document.uri
- == lsp::Url::from_file_path("/a/other.rs").unwrap()
+ == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap()
{
"other hint"
} else {
@@ -3027,7 +3030,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": format!(r#"fn main() {{\n{}\n}}"#, format!("let i = {};\n", "√".repeat(10)).repeat(500)),
"other.rs": "// Test file",
@@ -3035,7 +3038,7 @@ pub mod tests {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -3054,7 +3057,7 @@ pub mod tests {
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/a/main.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
let query_start = params.range.start;
Ok(Some(vec![lsp::InlayHint {
@@ -3077,7 +3080,7 @@ pub mod tests {
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/a/main.rs", cx)
+ project.open_local_buffer(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -3250,7 +3253,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": "fn main() {
let x = 42;
@@ -3265,7 +3268,7 @@ pub mod tests {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -3281,7 +3284,7 @@ pub mod tests {
move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path("/a/main.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
Ok(Some(
serde_json::from_value(json!([
@@ -3351,7 +3354,7 @@ pub mod tests {
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/a/main.rs", cx)
+ project.open_local_buffer(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -3408,7 +3411,7 @@ pub mod tests {
) -> (&'static str, WindowHandle<Editor>, FakeLanguageServer) {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
+ path!("/a"),
json!({
"main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out",
"other.rs": "// Test file",
@@ -3416,8 +3419,8 @@ pub mod tests {
)
.await;
- let project = Project::test(fs, ["/a".as_ref()], cx).await;
- let file_path = "/a/main.rs";
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
+ let file_path = path!("/a/main.rs");
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -3435,7 +3438,7 @@ pub mod tests {
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/a/main.rs", cx)
+ project.open_local_buffer(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -1,8 +1,9 @@
use gpui::{prelude::*, Entity};
use indoc::indoc;
-use inline_completion::InlineCompletionProvider;
+use inline_completion::EditPredictionProvider;
use language::{Language, LanguageConfig};
use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint};
+use project::Project;
use std::{num::NonZeroU32, ops::Range, sync::Arc};
use text::{Point, ToOffset};
@@ -314,7 +315,7 @@ fn assert_editor_active_move_completion(
fn accept_completion(cx: &mut EditorTestContext) {
cx.update_editor(|editor, window, cx| {
- editor.accept_inline_completion(&crate::AcceptInlineCompletion, window, cx)
+ editor.accept_edit_prediction(&crate::AcceptEditPrediction, window, cx)
})
}
@@ -344,7 +345,7 @@ fn assign_editor_completion_provider(
cx: &mut EditorTestContext,
) {
cx.update_editor(|editor, window, cx| {
- editor.set_inline_completion_provider(Some(provider), window, cx);
+ editor.set_edit_prediction_provider(Some(provider), window, cx);
})
}
@@ -362,7 +363,7 @@ impl FakeInlineCompletionProvider {
}
}
-impl InlineCompletionProvider for FakeInlineCompletionProvider {
+impl EditPredictionProvider for FakeInlineCompletionProvider {
fn name() -> &'static str {
"fake-completion-provider"
}
@@ -375,10 +376,6 @@ impl InlineCompletionProvider for FakeInlineCompletionProvider {
false
}
- fn show_completions_in_normal_mode() -> bool {
- false
- }
-
fn is_enabled(
&self,
_buffer: &gpui::Entity<language::Buffer>,
@@ -394,6 +391,7 @@ impl InlineCompletionProvider for FakeInlineCompletionProvider {
fn refresh(
&mut self,
+ _project: Option<Entity<Project>>,
_buffer: gpui::Entity<language::Buffer>,
_cursor_position: language::Anchor,
_debounce: bool,
@@ -1717,6 +1717,7 @@ mod tests {
use language::{LanguageMatcher, TestFile};
use project::FakeFs;
use std::path::{Path, PathBuf};
+ use util::path;
#[gpui::test]
fn test_path_for_file(cx: &mut App) {
@@ -1771,24 +1772,24 @@ mod tests {
init_test(cx, |_| {});
let fs = FakeFs::new(cx.executor());
- fs.insert_file("/file.rs", Default::default()).await;
+ fs.insert_file(path!("/file.rs"), Default::default()).await;
// Test case 1: Deserialize with path and contents
{
- let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap();
let item_id = 1234 as ItemId;
let mtime = fs
- .metadata(Path::new("/file.rs"))
+ .metadata(Path::new(path!("/file.rs")))
.await
.unwrap()
.unwrap()
.mtime;
let serialized_editor = SerializedEditor {
- abs_path: Some(PathBuf::from("/file.rs")),
+ abs_path: Some(PathBuf::from(path!("/file.rs"))),
contents: Some("fn main() {}".to_string()),
language: Some("Rust".to_string()),
mtime: Some(mtime),
@@ -1812,7 +1813,7 @@ mod tests {
// Test case 2: Deserialize with only path
{
- let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -1820,7 +1821,7 @@ mod tests {
let item_id = 5678 as ItemId;
let serialized_editor = SerializedEditor {
- abs_path: Some(PathBuf::from("/file.rs")),
+ abs_path: Some(PathBuf::from(path!("/file.rs"))),
contents: None,
language: None,
mtime: None,
@@ -1845,7 +1846,7 @@ mod tests {
// Test case 3: Deserialize with no path (untitled buffer, with content and language)
{
- let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await;
// Add Rust to the language, so that we can restore the language of the buffer
project.update(cx, |project, _| project.languages().add(rust_language()));
@@ -1884,7 +1885,7 @@ mod tests {
// Test case 4: Deserialize with path, content, and old mtime
{
- let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -1893,7 +1894,7 @@ mod tests {
let item_id = 9345 as ItemId;
let old_mtime = MTime::from_seconds_and_nanos(0, 50);
let serialized_editor = SerializedEditor {
- abs_path: Some(PathBuf::from("/file.rs")),
+ abs_path: Some(PathBuf::from(path!("/file.rs"))),
contents: Some("fn main() {}".to_string()),
language: Some("Rust".to_string()),
mtime: Some(old_mtime),
@@ -229,9 +229,10 @@ pub fn deploy_context_menu(
cx,
),
None => {
+ let character_size = editor.character_size(window);
let menu_position = MenuPosition::PinnedToEditor {
source: source_anchor,
- offset: editor.character_size(window),
+ offset: gpui::point(character_size.width, character_size.height),
};
Some(MouseContextMenu::new(
menu_position,
@@ -1,10 +1,11 @@
use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SemanticsProvider};
use collections::HashSet;
+use diff::BufferDiff;
use futures::{channel::mpsc, future::join_all};
use gpui::{App, Entity, EventEmitter, Focusable, Render, Subscription, Task};
use language::{Buffer, BufferEvent, Capability};
use multi_buffer::{ExcerptRange, MultiBuffer};
-use project::{buffer_store::BufferChangeSet, Project};
+use project::Project;
use smol::stream::StreamExt;
use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
use text::ToOffset;
@@ -106,16 +107,10 @@ impl ProposedChangesEditor {
let buffer = buffer.read(cx);
let base_buffer = buffer.base_buffer()?;
let buffer = buffer.text_snapshot();
- let change_set = this
- .multibuffer
- .read(cx)
- .change_set_for(buffer.remote_id())?;
- Some(change_set.update(cx, |change_set, cx| {
- change_set.set_base_text(
- base_buffer.read(cx).text(),
- buffer,
- cx,
- )
+ let diff =
+ this.multibuffer.read(cx).diff_for(buffer.remote_id())?;
+ Some(diff.update(cx, |diff, cx| {
+ diff.set_base_text(base_buffer.clone(), buffer, cx)
}))
})
.collect::<Vec<_>>()
@@ -176,7 +171,7 @@ impl ProposedChangesEditor {
});
let mut buffer_entries = Vec::new();
- let mut new_change_sets = Vec::new();
+ let mut new_diffs = Vec::new();
for location in locations {
let branch_buffer;
if let Some(ix) = self
@@ -189,14 +184,14 @@ impl ProposedChangesEditor {
buffer_entries.push(entry);
} else {
branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx));
- new_change_sets.push(cx.new(|cx| {
- let mut change_set = BufferChangeSet::new(&branch_buffer, cx);
- let _ = change_set.set_base_text(
- location.buffer.read(cx).text(),
+ new_diffs.push(cx.new(|cx| {
+ let mut diff = BufferDiff::new(&branch_buffer, cx);
+ let _ = diff.set_base_text(
+ location.buffer.clone(),
branch_buffer.read(cx).text_snapshot(),
cx,
);
- change_set
+ diff
}));
buffer_entries.push(BufferEntry {
branch: branch_buffer.clone(),
@@ -221,8 +216,8 @@ impl ProposedChangesEditor {
self.editor.update(cx, |editor, cx| {
editor.change_selections(None, window, cx, |selections| selections.refresh());
editor.buffer.update(cx, |buffer, cx| {
- for change_set in new_change_sets {
- buffer.add_change_set(change_set, cx)
+ for diff in new_diffs {
+ buffer.add_diff(diff, cx)
}
})
});
@@ -158,7 +158,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- if self.pending_rename.is_some() || self.has_active_completions_menu() {
+ if self.pending_rename.is_some() || self.has_visible_completions_menu() {
return;
}
@@ -1,12 +1,15 @@
pub mod editor_lsp_test_context;
pub mod editor_test_context;
+use std::sync::LazyLock;
+
use crate::{
display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint},
DisplayPoint, Editor, EditorMode, FoldPlaceholder, MultiBuffer,
};
use gpui::{
- AppContext as _, Context, Entity, Font, FontFeatures, FontStyle, FontWeight, Pixels, Window,
+ font, AppContext as _, Context, Entity, Font, FontFeatures, FontStyle, FontWeight, Pixels,
+ Window,
};
use project::Project;
use util::test::{marked_text_offsets, marked_text_ranges};
@@ -19,6 +22,22 @@ fn init_logger() {
}
}
+pub fn test_font() -> Font {
+ static TEST_FONT: LazyLock<Font> = LazyLock::new(|| {
+ #[cfg(not(target_os = "windows"))]
+ {
+ font("Helvetica")
+ }
+
+ #[cfg(target_os = "windows")]
+ {
+ font("Courier New")
+ }
+ });
+
+ TEST_FONT.clone()
+}
+
// Returns a snapshot from text containing '|' character markers with the markers removed, and DisplayPoints for each one.
pub fn marked_display_snapshot(
text: &str,
@@ -3,8 +3,9 @@ use crate::{
RowExt,
};
use collections::BTreeMap;
+use diff::DiffHunkStatus;
use futures::Future;
-use git::diff::DiffHunkStatus;
+
use gpui::{
prelude::*, AnyWindowHandle, App, Context, Entity, Focusable as _, Keystroke, Pixels, Point,
VisualTestContext, Window, WindowHandle,
@@ -290,9 +291,9 @@ impl EditorTestContext {
editor.project.as_ref().unwrap().read(cx).fs().as_fake()
});
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
- fs.set_index_for_repo(
+ fs.set_head_for_repo(
&Self::root_path().join(".git"),
- &[(path.as_ref(), diff_base.to_string())],
+ &[(path.into(), diff_base.to_string())],
);
self.cx.run_until_parked();
}
@@ -118,6 +118,8 @@ pub trait ExtensionThemeProxy: Send + Sync + 'static {
icons_root_dir: PathBuf,
fs: Arc<dyn Fs>,
) -> Task<Result<()>>;
+
+ fn reload_current_icon_theme(&self, cx: &mut App);
}
impl ExtensionThemeProxy for ExtensionHostProxy {
@@ -185,6 +187,14 @@ impl ExtensionThemeProxy for ExtensionHostProxy {
proxy.load_icon_theme(icon_theme_path, icons_root_dir, fs)
}
+
+ fn reload_current_icon_theme(&self, cx: &mut App) {
+ let Some(proxy) = self.theme_proxy.read().clone() else {
+ return;
+ };
+
+ proxy.reload_current_icon_theme(cx)
+ }
}
pub trait ExtensionGrammarProxy: Send + Sync + 'static {
@@ -1,20 +1,18 @@
-use std::{
- collections::HashMap,
- env, fs,
- path::{Path, PathBuf},
- process::Command,
- sync::Arc,
-};
+use std::collections::{BTreeSet, HashMap};
+use std::env;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::sync::Arc;
use ::fs::{copy_recursive, CopyOptions, Fs, RealFs};
use anyhow::{anyhow, bail, Context, Result};
use clap::Parser;
-use extension::{
- extension_builder::{CompileExtensionOptions, ExtensionBuilder},
- ExtensionManifest,
-};
+use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
+use extension::ExtensionManifest;
use language::LanguageConfig;
use reqwest_client::ReqwestClient;
+use rpc::ExtensionProvides;
use tree_sitter::{Language, Query, WasmStore};
#[derive(Parser, Debug)]
@@ -99,6 +97,8 @@ async fn main() -> Result<()> {
);
}
+ let extension_provides = extension_provides(&manifest);
+
let manifest_json = serde_json::to_string(&rpc::ExtensionApiManifest {
name: manifest.name,
version: manifest.version,
@@ -109,6 +109,7 @@ async fn main() -> Result<()> {
.repository
.ok_or_else(|| anyhow!("missing repository in extension manifest"))?,
wasm_api_version: manifest.lib.version.map(|version| version.to_string()),
+ provides: extension_provides,
})?;
fs::remove_dir_all(&archive_dir)?;
fs::write(output_dir.join("manifest.json"), manifest_json.as_bytes())?;
@@ -116,6 +117,44 @@ async fn main() -> Result<()> {
Ok(())
}
+/// Returns the set of features provided by the extension.
+fn extension_provides(manifest: &ExtensionManifest) -> BTreeSet<ExtensionProvides> {
+ let mut provides = BTreeSet::default();
+ if !manifest.themes.is_empty() {
+ provides.insert(ExtensionProvides::Themes);
+ }
+
+ if !manifest.icon_themes.is_empty() {
+ provides.insert(ExtensionProvides::IconThemes);
+ }
+
+ if !manifest.languages.is_empty() {
+ provides.insert(ExtensionProvides::Languages);
+ }
+
+ if !manifest.grammars.is_empty() {
+ provides.insert(ExtensionProvides::Grammars);
+ }
+
+ if !manifest.language_servers.is_empty() {
+ provides.insert(ExtensionProvides::LanguageServers);
+ }
+
+ if !manifest.context_servers.is_empty() {
+ provides.insert(ExtensionProvides::ContextServers);
+ }
+
+ if !manifest.indexed_docs_providers.is_empty() {
+ provides.insert(ExtensionProvides::IndexedDocsProviders);
+ }
+
+ if manifest.snippets.is_some() {
+ provides.insert(ExtensionProvides::Snippets);
+ }
+
+ provides
+}
+
async fn copy_extension_resources(
manifest: &ExtensionManifest,
extension_path: &Path,
@@ -8,8 +8,9 @@ mod extension_store_test;
use anyhow::{anyhow, bail, Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
+use client::ExtensionProvides;
use client::{proto, telemetry::Telemetry, Client, ExtensionMetadata, GetExtensionsResponse};
-use collections::{btree_map, BTreeMap, HashMap, HashSet};
+use collections::{btree_map, BTreeMap, BTreeSet, HashMap, HashSet};
use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
pub use extension::ExtensionManifest;
use extension::{
@@ -464,6 +465,7 @@ impl ExtensionStore {
pub fn fetch_extensions(
&self,
search: Option<&str>,
+ provides_filter: Option<&BTreeSet<ExtensionProvides>>,
cx: &mut Context<Self>,
) -> Task<Result<Vec<ExtensionMetadata>>> {
let version = CURRENT_SCHEMA_VERSION.to_string();
@@ -472,6 +474,17 @@ impl ExtensionStore {
query.push(("filter", search));
}
+ let provides_filter = provides_filter.map(|provides_filter| {
+ provides_filter
+ .iter()
+ .map(|provides| provides.to_string())
+ .collect::<Vec<_>>()
+ .join(",")
+ });
+ if let Some(provides_filter) = provides_filter.as_deref() {
+ query.push(("provides", provides_filter));
+ }
+
self.fetch_extensions_from_api("/extensions", &query, cx)
}
@@ -1279,6 +1292,7 @@ impl ExtensionStore {
this.wasm_extensions.extend(wasm_extensions);
this.proxy.reload_current_theme(cx);
+ this.proxy.reload_current_icon_theme(cx);
})
.ok();
})
@@ -455,7 +455,12 @@ async fn test_extension_store(cx: &mut TestAppContext) {
});
}
+// todo(windows)
+// Disable this test on Windows for now. Because this test hangs at
+// `let fake_server = fake_servers.next().await.unwrap();`.
+// Reenable this test when we figure out why.
#[gpui::test]
+#[cfg_attr(target_os = "windows", ignore)]
async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
@@ -634,6 +639,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
.await
.unwrap();
+ // todo(windows)
+ // This test hangs here on Windows.
let fake_server = fake_servers.next().await.unwrap();
let expected_server_path =
extensions_dir.join(format!("work/{test_extension_id}/gleam-v1.2.3/gleam"));
@@ -18,6 +18,7 @@ collections.workspace = true
db.workspace = true
editor.workspace = true
extension_host.workspace = true
+feature_flags.workspace = true
fs.workspace = true
fuzzy.workspace = true
gpui.workspace = true
@@ -7,6 +7,7 @@ use editor::Editor;
use extension_host::ExtensionStore;
use gpui::{AppContext as _, Context, Entity, SharedString, Window};
use language::Buffer;
+use ui::prelude::*;
use workspace::notifications::simple_message_notification::MessageNotification;
use workspace::{notifications::NotificationId, Workspace};
@@ -172,8 +173,10 @@ pub(crate) fn suggest(buffer: Entity<Buffer>, window: &mut Window, cx: &mut Cont
"Do you want to install the recommended '{}' extension for '{}' files?",
extension_id, file_name_or_extension
))
- .with_click_message("Yes, install extension")
- .on_click({
+ .primary_message("Yes, install extension")
+ .primary_icon(IconName::Check)
+ .primary_icon_color(Color::Success)
+ .primary_on_click({
let extension_id = extension_id.clone();
move |_window, cx| {
let extension_id = extension_id.clone();
@@ -183,8 +186,10 @@ pub(crate) fn suggest(buffer: Entity<Buffer>, window: &mut Window, cx: &mut Cont
});
}
})
- .with_secondary_click_message("No, don't install it")
- .on_secondary_click(move |_window, cx| {
+ .secondary_message("No, don't install it")
+ .secondary_icon(IconName::Close)
+ .secondary_icon_color(Color::Error)
+ .secondary_on_click(move |_window, cx| {
let key = language_extension_key(&extension_id);
db::write_and_log(cx, move || {
KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string())
@@ -6,10 +6,11 @@ use std::sync::OnceLock;
use std::time::Duration;
use std::{ops::Range, sync::Arc};
-use client::ExtensionMetadata;
+use client::{ExtensionMetadata, ExtensionProvides};
use collections::{BTreeMap, BTreeSet};
use editor::{Editor, EditorElement, EditorStyle};
use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore};
+use feature_flags::FeatureFlagAppExt as _;
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
actions, uniform_list, Action, App, ClipboardItem, Context, Entity, EventEmitter, Flatten,
@@ -210,6 +211,7 @@ pub struct ExtensionsPage {
filtered_remote_extension_indices: Vec<usize>,
query_editor: Entity<Editor>,
query_contains_error: bool,
+ provides_filter: Option<ExtensionProvides>,
_subscriptions: [gpui::Subscription; 2],
extension_fetch_task: Option<Task<()>>,
upsells: BTreeSet<Feature>,
@@ -261,12 +263,13 @@ impl ExtensionsPage {
filtered_remote_extension_indices: Vec::new(),
remote_extension_entries: Vec::new(),
query_contains_error: false,
+ provides_filter: None,
extension_fetch_task: None,
_subscriptions: subscriptions,
query_editor,
upsells: BTreeSet::default(),
};
- this.fetch_extensions(None, cx);
+ this.fetch_extensions(None, None, cx);
this
})
}
@@ -363,7 +366,12 @@ impl ExtensionsPage {
cx.notify();
}
- fn fetch_extensions(&mut self, search: Option<String>, cx: &mut Context<Self>) {
+ fn fetch_extensions(
+ &mut self,
+ search: Option<String>,
+ provides_filter: Option<BTreeSet<ExtensionProvides>>,
+ cx: &mut Context<Self>,
+ ) {
self.is_fetching_extensions = true;
cx.notify();
@@ -374,7 +382,7 @@ impl ExtensionsPage {
});
let remote_extensions = extension_store.update(cx, |store, cx| {
- store.fetch_extensions(search.as_deref(), cx)
+ store.fetch_extensions(search.as_deref(), provides_filter.as_ref(), cx)
});
cx.spawn(move |this, mut cx| async move {
@@ -575,7 +583,6 @@ impl ExtensionsPage {
.child(
h_flex()
.gap_2()
- .items_end()
.child(
Headline::new(extension.manifest.name.clone())
.size(HeadlineSize::Medium),
@@ -588,7 +595,52 @@ impl ExtensionsPage {
Headline::new(format!("(v{installed_version} installed)",))
.size(HeadlineSize::XSmall)
}),
- ),
+ )
+ .map(|parent| {
+ if extension.manifest.provides.is_empty() {
+ return parent;
+ }
+
+ parent.child(
+ h_flex().gap_2().children(
+ extension
+ .manifest
+ .provides
+ .iter()
+ .map(|provides| {
+ let label = match provides {
+ ExtensionProvides::Themes => "Themes",
+ ExtensionProvides::IconThemes => "Icon Themes",
+ ExtensionProvides::Languages => "Languages",
+ ExtensionProvides::Grammars => "Grammars",
+ ExtensionProvides::LanguageServers => {
+ "Language Servers"
+ }
+ ExtensionProvides::ContextServers => {
+ "Context Servers"
+ }
+ ExtensionProvides::SlashCommands => {
+ "Slash Commands"
+ }
+ ExtensionProvides::IndexedDocsProviders => {
+ "Indexed Docs Providers"
+ }
+ ExtensionProvides::Snippets => "Snippets",
+ };
+ div()
+ .bg(cx.theme().colors().element_background)
+ .px_0p5()
+ .border_1()
+ .border_color(cx.theme().colors().border)
+ .rounded_md()
+ .child(
+ Label::new(label).size(LabelSize::XSmall),
+ )
+ })
+ .collect::<Vec<_>>(),
+ ),
+ )
+ }),
)
.child(
h_flex()
@@ -909,11 +961,15 @@ impl ExtensionsPage {
) {
if let editor::EditorEvent::Edited { .. } = event {
self.query_contains_error = false;
- self.fetch_extensions_debounced(cx);
- self.refresh_feature_upsells(cx);
+ self.refresh_search(cx);
}
}
+ fn refresh_search(&mut self, cx: &mut Context<Self>) {
+ self.fetch_extensions_debounced(cx);
+ self.refresh_feature_upsells(cx);
+ }
+
fn fetch_extensions_debounced(&mut self, cx: &mut Context<ExtensionsPage>) {
self.extension_fetch_task = Some(cx.spawn(|this, mut cx| async move {
let search = this
@@ -934,7 +990,7 @@ impl ExtensionsPage {
};
this.update(&mut cx, |this, cx| {
- this.fetch_extensions(search, cx);
+ this.fetch_extensions(search, Some(BTreeSet::from_iter(this.provides_filter)), cx);
})
.ok();
}));
@@ -1118,7 +1174,41 @@ impl Render for ExtensionsPage {
.w_full()
.gap_2()
.justify_between()
- .child(h_flex().child(self.render_search(cx)))
+ .child(
+ h_flex()
+ .gap_2()
+ .child(self.render_search(cx))
+ .map(|parent| {
+ // Note: Staff-only until this gets design input.
+ if !cx.is_staff() {
+ return parent;
+ }
+
+ parent.child(CheckboxWithLabel::new(
+ "icon-themes-filter",
+ Label::new("Icon themes"),
+ match self.provides_filter {
+ Some(ExtensionProvides::IconThemes) => {
+ ToggleState::Selected
+ }
+ _ => ToggleState::Unselected,
+ },
+ cx.listener(|this, checked, _window, cx| {
+ match checked {
+ ToggleState::Unselected
+ | ToggleState::Indeterminate => {
+ this.provides_filter = None
+ }
+ ToggleState::Selected => {
+ this.provides_filter =
+ Some(ExtensionProvides::IconThemes)
+ }
+ };
+ this.refresh_search(cx);
+ }),
+ ))
+ }),
+ )
.child(
h_flex()
.child(
@@ -64,12 +64,17 @@ impl Display for SystemSpecs {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let os_information = format!("OS: {} {}", self.os_name, self.os_version);
let app_version_information = format!(
- "Zed: v{} ({})",
+ "Zed: v{} ({}) {}",
self.app_version,
match &self.commit_sha {
Some(commit_sha) => format!("{} {}", self.release_channel, commit_sha),
None => self.release_channel.to_string(),
- }
+ },
+ if cfg!(debug_assertions) {
+ "(Taylor's Version)"
+ } else {
+ ""
+ },
);
let system_specs = [
app_version_information,
@@ -6,6 +6,7 @@ use gpui::{Entity, TestAppContext, VisualTestContext};
use menu::{Confirm, SelectNext, SelectPrev};
use project::{RemoveOptions, FS_WATCH_LATENCY};
use serde_json::json;
+use util::path;
use workspace::{AppState, ToggleFileFinder, Workspace};
#[ctor::ctor]
@@ -90,7 +91,7 @@ async fn test_absolute_paths(cx: &mut TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"a": {
"file1.txt": "",
@@ -102,16 +103,16 @@ async fn test_absolute_paths(cx: &mut TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (picker, workspace, cx) = build_find_picker(project, cx);
- let matching_abs_path = "/root/a/b/file2.txt";
+ let matching_abs_path = path!("/root/a/b/file2.txt").to_string();
picker
.update_in(cx, |picker, window, cx| {
picker
.delegate
- .update_matches(matching_abs_path.to_string(), window, cx)
+ .update_matches(matching_abs_path, window, cx)
})
.await;
picker.update(cx, |picker, _| {
@@ -128,12 +129,12 @@ async fn test_absolute_paths(cx: &mut TestAppContext) {
assert_eq!(active_editor.read(cx).title(cx), "file2.txt");
});
- let mismatching_abs_path = "/root/a/b/file1.txt";
+ let mismatching_abs_path = path!("/root/a/b/file1.txt").to_string();
picker
.update_in(cx, |picker, window, cx| {
picker
.delegate
- .update_matches(mismatching_abs_path.to_string(), window, cx)
+ .update_matches(mismatching_abs_path, window, cx)
})
.await;
picker.update(cx, |picker, _| {
@@ -518,7 +519,7 @@ async fn test_path_distance_ordering(cx: &mut TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"dir1": { "a.txt": "" },
"dir2": {
@@ -529,7 +530,7 @@ async fn test_path_distance_ordering(cx: &mut TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
let worktree_id = cx.read(|cx| {
@@ -606,7 +607,7 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"first.rs": "// First Rust file",
@@ -617,7 +618,7 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
let worktree_id = cx.read(|cx| {
let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
@@ -648,7 +649,7 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
- Some(PathBuf::from("/src/test/first.rs"))
+ Some(PathBuf::from(path!("/src/test/first.rs")))
)],
"Should show 1st opened item in the history when opening the 2nd item"
);
@@ -663,14 +664,14 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) {
worktree_id,
path: Arc::from(Path::new("test/second.rs")),
},
- Some(PathBuf::from("/src/test/second.rs"))
+ Some(PathBuf::from(path!("/src/test/second.rs")))
),
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
- Some(PathBuf::from("/src/test/first.rs"))
+ Some(PathBuf::from(path!("/src/test/first.rs")))
),
],
"Should show 1st and 2nd opened items in the history when opening the 3rd item. \
@@ -687,21 +688,21 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) {
worktree_id,
path: Arc::from(Path::new("test/third.rs")),
},
- Some(PathBuf::from("/src/test/third.rs"))
+ Some(PathBuf::from(path!("/src/test/third.rs")))
),
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/second.rs")),
},
- Some(PathBuf::from("/src/test/second.rs"))
+ Some(PathBuf::from(path!("/src/test/second.rs")))
),
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
- Some(PathBuf::from("/src/test/first.rs"))
+ Some(PathBuf::from(path!("/src/test/first.rs")))
),
],
"Should show 1st, 2nd and 3rd opened items in the history when opening the 2nd item again. \
@@ -718,21 +719,21 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) {
worktree_id,
path: Arc::from(Path::new("test/second.rs")),
},
- Some(PathBuf::from("/src/test/second.rs"))
+ Some(PathBuf::from(path!("/src/test/second.rs")))
),
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/third.rs")),
},
- Some(PathBuf::from("/src/test/third.rs"))
+ Some(PathBuf::from(path!("/src/test/third.rs")))
),
FoundPath::new(
ProjectPath {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
- Some(PathBuf::from("/src/test/first.rs"))
+ Some(PathBuf::from(path!("/src/test/first.rs")))
),
],
"Should show 1st, 2nd and 3rd opened items in the history when opening the 3rd item again. \
@@ -748,7 +749,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"first.rs": "// First Rust file",
@@ -762,7 +763,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/external-src",
+ path!("/external-src"),
json!({
"test": {
"third.rs": "// Third Rust file",
@@ -772,10 +773,10 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
cx.update(|cx| {
project.update(cx, |project, cx| {
- project.find_or_create_worktree("/external-src", false, cx)
+ project.find_or_create_worktree(path!("/external-src"), false, cx)
})
})
.detach();
@@ -791,7 +792,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) {
workspace
.update_in(cx, |workspace, window, cx| {
workspace.open_abs_path(
- PathBuf::from("/external-src/test/third.rs"),
+ PathBuf::from(path!("/external-src/test/third.rs")),
false,
window,
cx,
@@ -816,7 +817,10 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) {
.as_u64() as usize,
)
});
- cx.dispatch_action(workspace::CloseActiveItem { save_intent: None });
+ cx.dispatch_action(workspace::CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ });
let initial_history_items =
open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await;
@@ -827,7 +831,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) {
worktree_id: external_worktree_id,
path: Arc::from(Path::new("")),
},
- Some(PathBuf::from("/external-src/test/third.rs"))
+ Some(PathBuf::from(path!("/external-src/test/third.rs")))
)],
"Should show external file with its full path in the history after it was open"
);
@@ -842,14 +846,14 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) {
worktree_id,
path: Arc::from(Path::new("test/second.rs")),
},
- Some(PathBuf::from("/src/test/second.rs"))
+ Some(PathBuf::from(path!("/src/test/second.rs")))
),
FoundPath::new(
ProjectPath {
worktree_id: external_worktree_id,
path: Arc::from(Path::new("")),
},
- Some(PathBuf::from("/external-src/test/third.rs"))
+ Some(PathBuf::from(path!("/external-src/test/third.rs")))
),
],
"Should keep external file with history updates",
@@ -864,7 +868,7 @@ async fn test_toggle_panel_new_selections(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"first.rs": "// First Rust file",
@@ -875,7 +879,7 @@ async fn test_toggle_panel_new_selections(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
// generate some history to select from
@@ -919,7 +923,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"first.rs": "// First Rust file",
@@ -931,7 +935,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
let worktree_id = cx.read(|cx| {
let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
@@ -964,7 +968,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
- Some(PathBuf::from("/src/test/first.rs"))
+ Some(PathBuf::from(path!("/src/test/first.rs")))
));
assert_eq!(matches.search.len(), 1, "Only one non-history item contains {first_query}, it should be present");
assert_eq!(matches.search.first().unwrap(), Path::new("test/fourth.rs"));
@@ -1007,7 +1011,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) {
worktree_id,
path: Arc::from(Path::new("test/first.rs")),
},
- Some(PathBuf::from("/src/test/first.rs"))
+ Some(PathBuf::from(path!("/src/test/first.rs")))
));
assert_eq!(matches.search.len(), 1, "Only one non-history item contains {first_query_again}, it should be present, even after non-matching query");
assert_eq!(matches.search.first().unwrap(), Path::new("test/fourth.rs"));
@@ -1022,7 +1026,7 @@ async fn test_search_sorts_history_items(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"test": {
"1_qw": "// First file that matches the query",
@@ -1037,7 +1041,7 @@ async fn test_search_sorts_history_items(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
// generate some history to select from
open_close_queried_buffer("1", 1, "1_qw", &workspace, cx).await;
@@ -1079,7 +1083,7 @@ async fn test_select_current_open_file_when_no_history(cx: &mut gpui::TestAppCon
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"test": {
"1_qw": "",
@@ -1088,7 +1092,7 @@ async fn test_select_current_open_file_when_no_history(cx: &mut gpui::TestAppCon
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
// Open new buffer
open_queried_buffer("1", 1, "1_qw", &workspace, cx).await;
@@ -1109,7 +1113,7 @@ async fn test_keep_opened_file_on_top_of_search_results_and_select_next_one(
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"bar.rs": "// Bar file",
@@ -1122,7 +1126,7 @@ async fn test_keep_opened_file_on_top_of_search_results_and_select_next_one(
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_close_queried_buffer("bar", 1, "bar.rs", &workspace, cx).await;
@@ -1202,7 +1206,7 @@ async fn test_non_separate_history_items(cx: &mut TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"bar.rs": "// Bar file",
@@ -1215,7 +1219,7 @@ async fn test_non_separate_history_items(cx: &mut TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_close_queried_buffer("bar", 1, "bar.rs", &workspace, cx).await;
@@ -1296,7 +1300,7 @@ async fn test_history_items_shown_in_order_of_open(cx: &mut TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/test",
+ path!("/test"),
json!({
"test": {
"1.txt": "// One",
@@ -1307,7 +1311,7 @@ async fn test_history_items_shown_in_order_of_open(cx: &mut TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
@@ -1354,7 +1358,7 @@ async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut
.fs
.as_fake()
.insert_tree(
- "/test",
+ path!("/test"),
json!({
"test": {
"1.txt": "// One",
@@ -1365,7 +1369,7 @@ async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_close_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
@@ -1384,7 +1388,11 @@ async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut
// Add more files to the worktree to trigger update matches
for i in 0..5 {
- let filename = format!("/test/{}.txt", 4 + i);
+ let filename = if cfg!(windows) {
+ format!("C:/test/{}.txt", 4 + i)
+ } else {
+ format!("/test/{}.txt", 4 + i)
+ };
app_state
.fs
.create_file(Path::new(&filename), Default::default())
@@ -1410,7 +1418,7 @@ async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppCo
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"collab_ui": {
"first.rs": "// First Rust file",
@@ -1422,7 +1430,7 @@ async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppCo
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
// generate some history to select from
open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await;
@@ -1456,7 +1464,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext)
.fs
.as_fake()
.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"first.rs": "// First Rust file",
@@ -1467,7 +1475,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext)
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); // generate some history to select from
open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await;
open_close_queried_buffer("non", 1, "nonexistent.rs", &workspace, cx).await;
@@ -1476,7 +1484,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext)
app_state
.fs
.remove_file(
- Path::new("/src/test/nonexistent.rs"),
+ Path::new(path!("/src/test/nonexistent.rs")),
RemoveOptions::default(),
)
.await
@@ -1742,14 +1750,14 @@ async fn test_keeps_file_finder_open_after_modifier_keys_release(cx: &mut gpui::
.fs
.as_fake()
.insert_tree(
- "/test",
+ path!("/test"),
json!({
"1.txt": "// One",
}),
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
@@ -1809,7 +1817,7 @@ async fn test_switches_between_release_norelease_modes_on_forward_nav(
.fs
.as_fake()
.insert_tree(
- "/test",
+ path!("/test"),
json!({
"1.txt": "// One",
"2.txt": "// Two",
@@ -1817,7 +1825,7 @@ async fn test_switches_between_release_norelease_modes_on_forward_nav(
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
@@ -1864,7 +1872,7 @@ async fn test_switches_between_release_norelease_modes_on_backward_nav(
.fs
.as_fake()
.insert_tree(
- "/test",
+ path!("/test"),
json!({
"1.txt": "// One",
"2.txt": "// Two",
@@ -1873,7 +1881,7 @@ async fn test_switches_between_release_norelease_modes_on_backward_nav(
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
@@ -1921,14 +1929,14 @@ async fn test_extending_modifiers_does_not_confirm_selection(cx: &mut gpui::Test
.fs
.as_fake()
.insert_tree(
- "/test",
+ path!("/test"),
json!({
"1.txt": "// One",
}),
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
@@ -1995,7 +2003,10 @@ async fn open_close_queried_buffer(
)
.await;
- cx.dispatch_action(workspace::CloseActiveItem { save_intent: None });
+ cx.dispatch_action(workspace::CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ });
history_items
}
@@ -7,7 +7,7 @@ use gpui::{App, AssetSource, Global, SharedString};
use serde_derive::Deserialize;
use settings::Settings;
use theme::{IconTheme, ThemeRegistry, ThemeSettings};
-use util::{maybe, paths::PathExt};
+use util::paths::PathExt;
#[derive(Deserialize, Debug)]
pub struct FileIcons {
@@ -43,20 +43,45 @@ impl FileIcons {
pub fn get_icon(path: &Path, cx: &App) -> Option<SharedString> {
let this = cx.try_global::<Self>()?;
+ let get_icon_from_suffix = |suffix: &str| -> Option<SharedString> {
+ this.stems
+ .get(suffix)
+ .or_else(|| this.suffixes.get(suffix))
+ .and_then(|typ| this.get_icon_for_type(typ, cx))
+ };
// TODO: Associate a type with the languages and have the file's language
// override these associations
- maybe!({
- let suffix = path.icon_stem_or_suffix()?;
- if let Some(type_str) = this.stems.get(suffix) {
- return this.get_icon_for_type(type_str, cx);
+ // check if file name is in suffixes
+ // e.g. catch file named `eslint.config.js` instead of `.eslint.config.js`
+ if let Some(typ) = path.file_name().and_then(|typ| typ.to_str()) {
+ let maybe_path = get_icon_from_suffix(typ);
+ if maybe_path.is_some() {
+ return maybe_path;
}
+ }
- this.suffixes
- .get(suffix)
- .and_then(|type_str| this.get_icon_for_type(type_str, cx))
- })
- .or_else(|| this.get_icon_for_type("default", cx))
+ // primary case: check if the files extension or the hidden file name
+ // matches some icon path
+ if let Some(suffix) = path.extension_or_hidden_file_name() {
+ let maybe_path = get_icon_from_suffix(suffix);
+ if maybe_path.is_some() {
+ return maybe_path;
+ }
+ }
+
+ // this _should_ only happen when the file is hidden (has leading '.')
+ // and is not a "special" file we have an icon (e.g. not `.eslint.config.js`)
+ // that should be caught above. In the remaining cases, we want to check
+ // for a normal supported extension e.g. `.data.json` -> `json`
+ let extension = path.extension().and_then(|ext| ext.to_str());
+ if let Some(extension) = extension {
+ let maybe_path = get_icon_from_suffix(extension);
+ if maybe_path.is_some() {
+ return maybe_path;
+ }
+ }
+ return this.get_icon_for_type("default", cx);
}
fn default_icon_theme(cx: &App) -> Option<Arc<IconTheme>> {
@@ -5,9 +5,9 @@ mod mac_watcher;
pub mod fs_watcher;
use anyhow::{anyhow, Context as _, Result};
-#[cfg(any(test, feature = "test-support"))]
-use git::status::FileStatus;
use git::GitHostingProviderRegistry;
+#[cfg(any(test, feature = "test-support"))]
+use git::{repository::RepoPath, status::FileStatus};
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
use ashpd::desktop::trash;
@@ -1270,25 +1270,32 @@ impl FakeFs {
})
}
- pub fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
+ pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) {
self.with_git_state(dot_git, true, |state| {
state.index_contents.clear();
state.index_contents.extend(
+ index_state
+ .iter()
+ .map(|(path, content)| (path.clone(), content.clone())),
+ );
+ });
+ }
+
+ pub fn set_head_for_repo(&self, dot_git: &Path, head_state: &[(RepoPath, String)]) {
+ self.with_git_state(dot_git, true, |state| {
+ state.head_contents.clear();
+ state.head_contents.extend(
head_state
.iter()
- .map(|(path, content)| (path.to_path_buf(), content.clone())),
+ .map(|(path, content)| (path.clone(), content.clone())),
);
});
}
- pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(&Path, git::blame::Blame)>) {
+ pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) {
self.with_git_state(dot_git, true, |state| {
state.blames.clear();
- state.blames.extend(
- blames
- .into_iter()
- .map(|(path, blame)| (path.to_path_buf(), blame)),
- );
+ state.blames.extend(blames);
});
}
@@ -9,6 +9,8 @@ const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
const MIN_DISTANCE_PENALTY: f64 = 0.2;
+// TODO:
+// Use `Path` instead of `&str` for paths.
pub struct Matcher<'a> {
query: &'a [char],
lowercase_query: &'a [char],
@@ -173,6 +175,8 @@ impl<'a> Matcher<'a> {
path_idx: usize,
cur_score: f64,
) -> f64 {
+ use std::path::MAIN_SEPARATOR;
+
if query_idx == self.query.len() {
return 1.0;
}
@@ -196,13 +200,19 @@ impl<'a> Matcher<'a> {
} else {
path_cased[j - prefix.len()]
};
- let is_path_sep = path_char == '/' || path_char == '\\';
+ let is_path_sep = path_char == MAIN_SEPARATOR;
if query_idx == 0 && is_path_sep {
last_slash = j;
}
- if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
+ #[cfg(not(target_os = "windows"))]
+ let need_to_score =
+ query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\');
+ // `query_char == '\\'` breaks `test_match_path_entries` on Windows, `\` is only used as a path separator on Windows.
+ #[cfg(target_os = "windows")]
+ let need_to_score = query_char == path_char || (is_path_sep && query_char == '_');
+ if need_to_score {
let curr = if j < prefix.len() {
prefix[j]
} else {
@@ -217,7 +227,7 @@ impl<'a> Matcher<'a> {
path[j - 1 - prefix.len()]
};
- if last == '/' {
+ if last == MAIN_SEPARATOR {
char_score = 0.9;
} else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
|| (last.is_lowercase() && curr.is_uppercase())
@@ -238,7 +248,7 @@ impl<'a> Matcher<'a> {
// Apply a severe penalty if the case doesn't match.
// This will make the exact matches have higher score than the case-insensitive and the
// path insensitive matches.
- if (self.smart_case || curr == '/') && self.query[query_idx] != curr {
+ if (self.smart_case || curr == MAIN_SEPARATOR) && self.query[query_idx] != curr {
char_score *= 0.001;
}
@@ -322,6 +332,7 @@ mod tests {
assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
}
+ #[cfg(not(target_os = "windows"))]
#[test]
fn test_match_path_entries() {
let paths = vec![
@@ -363,6 +374,54 @@ mod tests {
);
}
+ /// todo(windows)
+ /// Now, on Windows, users can only use the backslash as a path separator.
+ /// I do want to support both the backslash and the forward slash as path separators on Windows.
+ #[cfg(target_os = "windows")]
+ #[test]
+ fn test_match_path_entries() {
+ let paths = vec![
+ "",
+ "a",
+ "ab",
+ "abC",
+ "abcd",
+ "alphabravocharlie",
+ "AlphaBravoCharlie",
+ "thisisatestdir",
+ "\\\\\\\\\\ThisIsATestDir",
+ "\\this\\is\\a\\test\\dir",
+ "\\test\\tiatd",
+ ];
+
+ assert_eq!(
+ match_single_path_query("abc", false, &paths),
+ vec![
+ ("abC", vec![0, 1, 2]),
+ ("abcd", vec![0, 1, 2]),
+ ("AlphaBravoCharlie", vec![0, 5, 10]),
+ ("alphabravocharlie", vec![4, 5, 10]),
+ ]
+ );
+ assert_eq!(
+ match_single_path_query("t\\i\\a\\t\\d", false, &paths),
+ vec![(
+ "\\this\\is\\a\\test\\dir",
+ vec![1, 5, 6, 8, 9, 10, 11, 15, 16]
+ ),]
+ );
+
+ assert_eq!(
+ match_single_path_query("tiatd", false, &paths),
+ vec![
+ ("\\test\\tiatd", vec![6, 7, 8, 9, 10]),
+ ("\\this\\is\\a\\test\\dir", vec![1, 6, 9, 11, 16]),
+ ("\\\\\\\\\\ThisIsATestDir", vec![5, 9, 11, 12, 16]),
+ ("thisisatestdir", vec![0, 2, 6, 7, 11]),
+ ]
+ );
+ }
+
#[test]
fn test_lowercase_longer_than_uppercase() {
// This character has more chars in lower-case than in upper-case.
@@ -11,6 +11,9 @@ workspace = true
[lib]
path = "src/git.rs"
+[features]
+test-support = []
+
[dependencies]
anyhow.workspace = true
async-trait.workspace = true
@@ -32,10 +35,7 @@ url.workspace = true
util.workspace = true
[dev-dependencies]
-unindent.workspace = true
-serde_json.workspace = true
pretty_assertions.workspace = true
-text = {workspace = true, features = ["test-support"]}
-
-[features]
-test-support = []
+serde_json.workspace = true
+text = { workspace = true, features = ["test-support"] }
+unindent.workspace = true
@@ -353,7 +353,7 @@ mod tests {
let want_json =
std::fs::read_to_string(&path).unwrap_or_else(|_| {
panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path);
- });
+ }).replace("\r\n", "\n");
pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries");
}
@@ -1,6 +1,5 @@
pub mod blame;
pub mod commit;
-pub mod diff;
mod hosting_provider;
mod remote;
pub mod repository;
@@ -39,8 +38,7 @@ actions!(
StageAll,
UnstageAll,
RevertAll,
- CommitChanges,
- CommitAllChanges,
+ Commit,
ClearCommitMessage
]
);
@@ -1,6 +1,6 @@
use crate::status::FileStatus;
+use crate::GitHostingProviderRegistry;
use crate::{blame::Blame, status::GitStatus};
-use crate::{GitHostingProviderRegistry, COMMIT_MESSAGE};
use anyhow::{anyhow, Context as _, Result};
use collections::{HashMap, HashSet};
use git2::BranchType;
@@ -29,9 +29,15 @@ pub struct Branch {
pub trait GitRepository: Send + Sync {
fn reload_index(&self);
- /// Loads a git repository entry's contents.
+ /// Returns the contents of an entry in the repository's index, or None if there is no entry for the given path.
+ ///
+ /// Note that for symlink entries, this will return the contents of the symlink, not the target.
+ fn load_index_text(&self, path: &RepoPath) -> Option<String>;
+
+ /// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist or has no entry for the given path.
+ ///
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
- fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
+ fn load_committed_text(&self, path: &RepoPath) -> Option<String>;
/// Returns the URL of the remote with the given name.
fn remote_url(&self, name: &str) -> Option<String>;
@@ -40,6 +46,8 @@ pub trait GitRepository: Send + Sync {
/// Returns the SHA of the current HEAD.
fn head_sha(&self) -> Option<String>;
+ fn merge_head_shas(&self) -> Vec<String>;
+
/// Returns the list of git statuses, sorted by path
fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus>;
@@ -50,8 +58,17 @@ pub trait GitRepository: Send + Sync {
fn blame(&self, path: &Path, content: Rope) -> Result<crate::blame::Blame>;
- /// Returns the path to the repository, typically the `.git` folder.
- fn dot_git_dir(&self) -> PathBuf;
+ /// Returns the absolute path to the repository. For worktrees, this will be the path to the
+ /// worktree's gitdir within the main repository (typically `.git/worktrees/<name>`).
+ fn path(&self) -> PathBuf;
+
+ /// Returns the absolute path to the ".git" dir for the main repository, typically a `.git`
+ /// folder. For worktrees, this will be the path to the repository the worktree was created
+ /// from. Otherwise, this is the same value as `path()`.
+ ///
+ /// Git documentation calls this the "commondir", and for git CLI is overridden by
+ /// `GIT_COMMON_DIR`.
+ fn main_repository_path(&self) -> PathBuf;
/// Updates the index to match the worktree at the given paths.
///
@@ -62,7 +79,7 @@ pub trait GitRepository: Send + Sync {
/// If any of the paths were previously staged but do not exist in HEAD, they will be removed from the index.
fn unstage_paths(&self, paths: &[RepoPath]) -> Result<()>;
- fn commit(&self, name_and_email: Option<(&str, &str)>) -> Result<()>;
+ fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()>;
}
impl std::fmt::Debug for dyn GitRepository {
@@ -101,20 +118,25 @@ impl GitRepository for RealGitRepository {
}
}
- fn dot_git_dir(&self) -> PathBuf {
+ fn path(&self) -> PathBuf {
let repo = self.repository.lock();
repo.path().into()
}
- fn load_index_text(&self, relative_file_path: &Path) -> Option<String> {
- fn logic(repo: &git2::Repository, relative_file_path: &Path) -> Result<Option<String>> {
+ fn main_repository_path(&self) -> PathBuf {
+ let repo = self.repository.lock();
+ repo.commondir().into()
+ }
+
+ fn load_index_text(&self, path: &RepoPath) -> Option<String> {
+ fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
const STAGE_NORMAL: i32 = 0;
let index = repo.index()?;
// This check is required because index.get_path() unwraps internally :(
- check_path_to_repo_path_errors(relative_file_path)?;
+ check_path_to_repo_path_errors(path)?;
- let oid = match index.get_path(relative_file_path, STAGE_NORMAL) {
+ let oid = match index.get_path(path, STAGE_NORMAL) {
Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
_ => return Ok(None),
};
@@ -123,13 +145,22 @@ impl GitRepository for RealGitRepository {
Ok(Some(String::from_utf8(content)?))
}
- match logic(&self.repository.lock(), relative_file_path) {
+ match logic(&self.repository.lock(), path) {
Ok(value) => return value,
- Err(err) => log::error!("Error loading head text: {:?}", err),
+ Err(err) => log::error!("Error loading index text: {:?}", err),
}
None
}
+ fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
+ let repo = self.repository.lock();
+ let head = repo.head().ok()?.peel_to_tree().log_err()?;
+ let oid = head.get_path(path).ok()?.id();
+ let content = repo.find_blob(oid).log_err()?.content().to_owned();
+ let content = String::from_utf8(content).log_err()?;
+ Some(content)
+ }
+
fn remote_url(&self, name: &str) -> Option<String> {
let repo = self.repository.lock();
let remote = repo.find_remote(name).ok()?;
@@ -147,6 +178,18 @@ impl GitRepository for RealGitRepository {
Some(self.repository.lock().head().ok()?.target()?.to_string())
}
+ fn merge_head_shas(&self) -> Vec<String> {
+ let mut shas = Vec::default();
+ self.repository
+ .lock()
+ .mergehead_foreach(|oid| {
+ shas.push(oid.to_string());
+ true
+ })
+ .ok();
+ shas
+ }
+
fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus> {
let working_directory = self
.repository
@@ -250,13 +293,16 @@ impl GitRepository for RealGitRepository {
.to_path_buf();
if !paths.is_empty() {
- let cmd = new_std_command(&self.git_binary_path)
+ let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.args(["update-index", "--add", "--remove", "--"])
.args(paths.iter().map(|p| p.as_ref()))
- .status()?;
- if !cmd.success() {
- return Err(anyhow!("Failed to stage paths: {cmd}"));
+ .output()?;
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to stage paths:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
}
}
Ok(())
@@ -271,46 +317,45 @@ impl GitRepository for RealGitRepository {
.to_path_buf();
if !paths.is_empty() {
- let cmd = new_std_command(&self.git_binary_path)
+ let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.args(["reset", "--quiet", "--"])
.args(paths.iter().map(|p| p.as_ref()))
- .status()?;
- if !cmd.success() {
- return Err(anyhow!("Failed to unstage paths: {cmd}"));
+ .output()?;
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to unstage:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
}
}
Ok(())
}
- fn commit(&self, name_and_email: Option<(&str, &str)>) -> Result<()> {
+ fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()> {
let working_directory = self
.repository
.lock()
.workdir()
.context("failed to read git work directory")?
.to_path_buf();
- let commit_file = self.dot_git_dir().join(*COMMIT_MESSAGE);
- let commit_file_path = commit_file.to_string_lossy();
- let mut args = vec![
- "commit",
- "--quiet",
- "-F",
- commit_file_path.as_ref(),
- "--cleanup=strip",
- ];
+ let mut args = vec!["commit", "--quiet", "-m", message, "--cleanup=strip"];
let author = name_and_email.map(|(name, email)| format!("{name} <{email}>"));
if let Some(author) = author.as_deref() {
args.push("--author");
args.push(author);
}
- let cmd = new_std_command(&self.git_binary_path)
+ let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.args(args)
- .status()?;
- if !cmd.success() {
- return Err(anyhow!("Failed to commit: {cmd}"));
+ .output()?;
+
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to commit:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
}
Ok(())
}
@@ -323,10 +368,11 @@ pub struct FakeGitRepository {
#[derive(Debug, Clone)]
pub struct FakeGitRepositoryState {
- pub dot_git_dir: PathBuf,
+ pub path: PathBuf,
pub event_emitter: smol::channel::Sender<PathBuf>,
- pub index_contents: HashMap<PathBuf, String>,
- pub blames: HashMap<PathBuf, Blame>,
+ pub head_contents: HashMap<RepoPath, String>,
+ pub index_contents: HashMap<RepoPath, String>,
+ pub blames: HashMap<RepoPath, Blame>,
pub statuses: HashMap<RepoPath, FileStatus>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
@@ -339,10 +385,11 @@ impl FakeGitRepository {
}
impl FakeGitRepositoryState {
- pub fn new(dot_git_dir: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
+ pub fn new(path: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
FakeGitRepositoryState {
- dot_git_dir,
+ path,
event_emitter,
+ head_contents: Default::default(),
index_contents: Default::default(),
blames: Default::default(),
statuses: Default::default(),
@@ -355,9 +402,14 @@ impl FakeGitRepositoryState {
impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
- fn load_index_text(&self, path: &Path) -> Option<String> {
+ fn load_index_text(&self, path: &RepoPath) -> Option<String> {
+ let state = self.state.lock();
+ state.index_contents.get(path.as_ref()).cloned()
+ }
+
+ fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
let state = self.state.lock();
- state.index_contents.get(path).cloned()
+ state.head_contents.get(path.as_ref()).cloned()
}
fn remote_url(&self, _name: &str) -> Option<String> {
@@ -373,9 +425,17 @@ impl GitRepository for FakeGitRepository {
None
}
- fn dot_git_dir(&self) -> PathBuf {
+ fn merge_head_shas(&self) -> Vec<String> {
+ vec![]
+ }
+
+ fn path(&self) -> PathBuf {
let state = self.state.lock();
- state.dot_git_dir.clone()
+ state.path.clone()
+ }
+
+ fn main_repository_path(&self) -> PathBuf {
+ self.path()
}
fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus> {
@@ -426,7 +486,7 @@ impl GitRepository for FakeGitRepository {
state.current_branch_name = Some(name.to_owned());
state
.event_emitter
- .try_send(state.dot_git_dir.clone())
+ .try_send(state.path.clone())
.expect("Dropped repo change event");
Ok(())
}
@@ -436,7 +496,7 @@ impl GitRepository for FakeGitRepository {
state.branches.insert(name.to_owned());
state
.event_emitter
- .try_send(state.dot_git_dir.clone())
+ .try_send(state.path.clone())
.expect("Dropped repo change event");
Ok(())
}
@@ -458,7 +518,7 @@ impl GitRepository for FakeGitRepository {
unimplemented!()
}
- fn commit(&self, _name_and_email: Option<(&str, &str)>) -> Result<()> {
+ fn commit(&self, _message: &str, _name_and_email: Option<(&str, &str)>) -> Result<()> {
unimplemented!()
}
}
@@ -529,6 +589,12 @@ impl From<&Path> for RepoPath {
}
}
+impl From<Arc<Path>> for RepoPath {
+ fn from(value: Arc<Path>) -> Self {
+ RepoPath(value)
+ }
+}
+
impl From<PathBuf> for RepoPath {
fn from(value: PathBuf) -> Self {
RepoPath::new(value)
@@ -134,7 +134,11 @@ impl FileStatus {
}
pub fn has_changes(&self) -> bool {
- self.is_modified() || self.is_created() || self.is_deleted() || self.is_untracked()
+ self.is_modified()
+ || self.is_created()
+ || self.is_deleted()
+ || self.is_untracked()
+ || self.is_conflicted()
}
pub fn is_modified(self) -> bool {
@@ -16,17 +16,19 @@ path = "src/git_ui.rs"
anyhow.workspace = true
collections.workspace = true
db.workspace = true
+diff.workspace = true
editor.workspace = true
feature_flags.workspace = true
futures.workspace = true
git.workspace = true
gpui.workspace = true
language.workspace = true
-multi_buffer.workspace = true
menu.workspace = true
+multi_buffer.workspace = true
+panel.workspace = true
+picker.workspace = true
postage.workspace = true
project.workspace = true
-rpc.workspace = true
schemars.workspace = true
serde.workspace = true
serde_derive.workspace = true
@@ -36,7 +38,6 @@ theme.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
-picker.workspace = true
[target.'cfg(windows)'.dependencies]
windows.workspace = true
@@ -4,7 +4,6 @@ use crate::ProjectDiff;
use crate::{
git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector,
};
-use anyhow::{Context as _, Result};
use collections::HashMap;
use db::kvp::KEY_VALUE_STORE;
use editor::actions::MoveToEnd;
@@ -12,29 +11,28 @@ use editor::scroll::ScrollbarAutoHide;
use editor::{Editor, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar};
use git::repository::RepoPath;
use git::status::FileStatus;
-use git::{
- CommitAllChanges, CommitChanges, RevertAll, StageAll, ToggleStaged, UnstageAll, COMMIT_MESSAGE,
-};
+use git::{Commit, ToggleStaged};
use gpui::*;
-use language::{Buffer, BufferId};
+use language::{Buffer, File};
use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev};
-use project::git::{GitRepo, RepositoryHandle};
-use project::{CreateOptions, Fs, Project, ProjectPath};
-use rpc::proto;
+use multi_buffer::ExcerptInfo;
+use panel::PanelHeader;
+use project::git::{GitEvent, Repository};
+use project::{Fs, Project, ProjectPath};
use serde::{Deserialize, Serialize};
use settings::Settings as _;
use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration, usize};
use theme::ThemeSettings;
use ui::{
prelude::*, ButtonLike, Checkbox, Divider, DividerColor, ElevationIndex, IndentGuideColors,
- ListHeader, ListItem, ListItemSpacing, Scrollbar, ScrollbarState, Tooltip,
+ ListItem, ListItemSpacing, Scrollbar, ScrollbarState, Tooltip,
};
use util::{maybe, ResultExt, TryFutureExt};
use workspace::notifications::{DetachAndPromptErr, NotificationId};
use workspace::Toast;
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
- Item, Workspace,
+ Workspace,
};
actions!(
@@ -43,7 +41,6 @@ actions!(
Close,
ToggleFocus,
OpenMenu,
- OpenSelected,
FocusEditor,
FocusChanges,
FillCoAuthors,
@@ -60,6 +57,17 @@ pub fn init(cx: &mut App) {
workspace.register_action(|workspace, _: &ToggleFocus, window, cx| {
workspace.toggle_panel_focus::<GitPanel>(window, cx);
});
+
+ workspace.register_action(|workspace, _: &Commit, window, cx| {
+ workspace.open_panel::<GitPanel>(window, cx);
+ if let Some(git_panel) = workspace.panel::<GitPanel>(cx) {
+ git_panel
+ .read(cx)
+ .commit_editor
+ .focus_handle(cx)
+ .focus(window);
+ }
+ });
},
)
.detach();
@@ -76,34 +84,32 @@ struct SerializedGitPanel {
width: Option<Pixels>,
}
-#[derive(Debug, PartialEq, Eq, Clone)]
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum Section {
- Changed,
+ Conflict,
+ Tracked,
New,
}
-impl Section {
- pub fn contains(&self, status: FileStatus) -> bool {
- match self {
- Section::Changed => !status.is_created(),
- Section::New => status.is_created(),
- }
- }
-}
-
#[derive(Debug, PartialEq, Eq, Clone)]
struct GitHeaderEntry {
header: Section,
- all_staged: ToggleState,
}
impl GitHeaderEntry {
- pub fn contains(&self, status_entry: &GitStatusEntry) -> bool {
- self.header.contains(status_entry.status)
+ pub fn contains(&self, status_entry: &GitStatusEntry, repo: &Repository) -> bool {
+ let this = &self.header;
+ let status = status_entry.status;
+ match this {
+ Section::Conflict => repo.has_conflict(&status_entry.repo_path),
+ Section::Tracked => !status.is_created(),
+ Section::New => status.is_created(),
+ }
}
pub fn title(&self) -> &'static str {
match self.header {
- Section::Changed => "Changed",
+ Section::Conflict => "Conflicts",
+ Section::Tracked => "Changed",
Section::New => "New",
}
}
@@ -116,9 +122,9 @@ enum GitListEntry {
}
impl GitListEntry {
- fn status_entry(&self) -> Option<GitStatusEntry> {
+ fn status_entry(&self) -> Option<&GitStatusEntry> {
match self {
- GitListEntry::GitStatusEntry(entry) => Some(entry.clone()),
+ GitListEntry::GitStatusEntry(entry) => Some(entry),
_ => None,
}
}
@@ -126,11 +132,18 @@ impl GitListEntry {
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct GitStatusEntry {
- depth: usize,
- display_name: String,
- repo_path: RepoPath,
- status: FileStatus,
- is_staged: Option<bool>,
+ pub(crate) depth: usize,
+ pub(crate) display_name: String,
+ pub(crate) repo_path: RepoPath,
+ pub(crate) status: FileStatus,
+ pub(crate) is_staged: Option<bool>,
+}
+
+struct PendingOperation {
+ finished: bool,
+ will_become_staged: bool,
+ repo_paths: HashSet<RepoPath>,
+ op_id: usize,
}
pub struct GitPanel {
@@ -141,7 +154,7 @@ pub struct GitPanel {
pending_serialization: Task<Option<()>>,
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
- active_repository: Option<RepositoryHandle>,
+ active_repository: Option<Entity<Repository>>,
scroll_handle: UniformListScrollHandle,
scrollbar_state: ScrollbarState,
selected_entry: Option<usize>,
@@ -152,66 +165,15 @@ pub struct GitPanel {
entries: Vec<GitListEntry>,
entries_by_path: collections::HashMap<RepoPath, usize>,
width: Option<Pixels>,
- pending: HashMap<RepoPath, bool>,
- commit_task: Task<Result<()>>,
- commit_pending: bool,
-}
-
-fn commit_message_buffer(
- project: &Entity<Project>,
- active_repository: &RepositoryHandle,
- cx: &mut App,
-) -> Task<Result<Entity<Buffer>>> {
- match &active_repository.git_repo {
- GitRepo::Local(repo) => {
- let commit_message_file = repo.dot_git_dir().join(*COMMIT_MESSAGE);
- let fs = project.read(cx).fs().clone();
- let project = project.downgrade();
- cx.spawn(|mut cx| async move {
- fs.create_file(
- &commit_message_file,
- CreateOptions {
- overwrite: false,
- ignore_if_exists: true,
- },
- )
- .await
- .with_context(|| format!("creating commit message file {commit_message_file:?}"))?;
- let buffer = project
- .update(&mut cx, |project, cx| {
- project.open_local_buffer(&commit_message_file, cx)
- })?
- .await
- .with_context(|| {
- format!("opening commit message buffer at {commit_message_file:?}",)
- })?;
- Ok(buffer)
- })
- }
- GitRepo::Remote {
- project_id,
- client,
- worktree_id,
- work_directory_id,
- } => {
- let request = client.request(proto::OpenCommitMessageBuffer {
- project_id: project_id.0,
- worktree_id: worktree_id.to_proto(),
- work_directory_id: work_directory_id.to_proto(),
- });
- let project = project.downgrade();
- cx.spawn(|mut cx| async move {
- let response = request.await.context("requesting to open commit buffer")?;
- let buffer_id = BufferId::new(response.buffer_id)?;
- let buffer = project
- .update(&mut cx, {
- |project, cx| project.wait_for_remote_buffer(buffer_id, cx)
- })?
- .await?;
- Ok(buffer)
- })
- }
- }
+ pending: Vec<PendingOperation>,
+ pending_commit: Option<Task<()>>,
+
+ conflicted_staged_count: usize,
+ conflicted_count: usize,
+ tracked_staged_count: usize,
+ tracked_count: usize,
+ new_staged_count: usize,
+ new_count: usize,
}
fn commit_message_editor(
@@ -287,9 +249,12 @@ impl GitPanel {
&git_state,
window,
move |this, git_state, event, window, cx| match event {
- project::git::Event::RepositoriesUpdated => {
+ GitEvent::FileSystemUpdated => {
+ this.schedule_update(false, window, cx);
+ }
+ GitEvent::ActiveRepositoryChanged | GitEvent::GitStateUpdated => {
this.active_repository = git_state.read(cx).active_repository();
- this.schedule_update(window, cx);
+ this.schedule_update(true, window, cx);
}
},
)
@@ -303,7 +268,7 @@ impl GitPanel {
pending_serialization: Task::ready(None),
entries: Vec::new(),
entries_by_path: HashMap::default(),
- pending: HashMap::default(),
+ pending: Vec::new(),
current_modifiers: window.modifiers(),
width: Some(px(360.)),
scrollbar_state: ScrollbarState::new(scroll_handle.clone())
@@ -313,16 +278,21 @@ impl GitPanel {
show_scrollbar: false,
hide_scrollbar_task: None,
update_visible_entries_task: Task::ready(()),
- commit_task: Task::ready(Ok(())),
- commit_pending: false,
+ pending_commit: None,
active_repository,
scroll_handle,
fs,
commit_editor,
project,
workspace,
+ conflicted_count: 0,
+ conflicted_staged_count: 0,
+ tracked_staged_count: 0,
+ tracked_count: 0,
+ new_staged_count: 0,
+ new_count: 0,
};
- git_panel.schedule_update(window, cx);
+ git_panel.schedule_update(false, window, cx);
git_panel.show_scrollbar = git_panel.should_show_scrollbar(cx);
git_panel
});
@@ -346,17 +316,21 @@ impl GitPanel {
git_panel
}
- pub fn set_focused_path(&mut self, path: ProjectPath, _: &mut Window, cx: &mut Context<Self>) {
+ pub fn select_entry_by_path(
+ &mut self,
+ path: ProjectPath,
+ _: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
let Some(git_repo) = self.active_repository.as_ref() else {
return;
};
- let Some(repo_path) = git_repo.project_path_to_repo_path(&path) else {
+ let Some(repo_path) = git_repo.read(cx).project_path_to_repo_path(&path) else {
return;
};
let Some(ix) = self.entries_by_path.get(&repo_path) else {
return;
};
-
self.selected_entry = Some(*ix);
cx.notify();
}
@@ -568,7 +542,7 @@ impl GitPanel {
.active_repository
.as_ref()
.map_or(false, |active_repository| {
- active_repository.entry_count() > 0
+ active_repository.read(cx).entry_count() > 0
});
if have_entries && self.selected_entry.is_none() {
self.selected_entry = Some(0);
@@ -593,10 +567,17 @@ impl GitPanel {
self.selected_entry.and_then(|i| self.entries.get(i))
}
- fn open_selected(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
- if let Some(entry) = self.selected_entry.and_then(|i| self.entries.get(i)) {
- self.open_entry(entry, cx);
- }
+ fn open_selected(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
+ maybe!({
+ let entry = self.entries.get(self.selected_entry?)?.status_entry()?;
+
+ self.workspace
+ .update(cx, |workspace, cx| {
+ ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx);
+ })
+ .ok()
+ });
+ self.focus_handle.focus(window);
}
fn toggle_staged_for_entry(
@@ -617,39 +598,54 @@ impl GitPanel {
}
}
GitListEntry::Header(section) => {
- let goal_staged_state = !section.all_staged.selected();
+ let goal_staged_state = !self.header_state(section.header).selected();
+ let repository = active_repository.read(cx);
let entries = self
.entries
.iter()
.filter_map(|entry| entry.status_entry())
.filter(|status_entry| {
- section.contains(&status_entry)
+ section.contains(&status_entry, repository)
&& status_entry.is_staged != Some(goal_staged_state)
})
- .map(|status_entry| status_entry.repo_path)
+ .map(|status_entry| status_entry.repo_path.clone())
.collect::<Vec<_>>();
- (!section.all_staged.selected(), entries)
+ (goal_staged_state, entries)
}
};
- for repo_path in repo_paths.iter() {
- self.pending.insert(repo_path.clone(), stage);
- }
+
+ let op_id = self.pending.iter().map(|p| p.op_id).max().unwrap_or(0) + 1;
+ self.pending.push(PendingOperation {
+ op_id,
+ will_become_staged: stage,
+ repo_paths: repo_paths.iter().cloned().collect(),
+ finished: false,
+ });
+ let repo_paths = repo_paths.clone();
+ let active_repository = active_repository.clone();
+ let repository = active_repository.read(cx);
+ self.update_counts(repository);
+ cx.notify();
cx.spawn({
- let repo_paths = repo_paths.clone();
- let active_repository = active_repository.clone();
|this, mut cx| async move {
- let result = if stage {
- active_repository.stage_entries(repo_paths.clone()).await
- } else {
- active_repository.unstage_entries(repo_paths.clone()).await
- };
+ let result = cx
+ .update(|cx| {
+ if stage {
+ active_repository.read(cx).stage_entries(repo_paths.clone())
+ } else {
+ active_repository
+ .read(cx)
+ .unstage_entries(repo_paths.clone())
+ }
+ })?
+ .await?;
this.update(&mut cx, |this, cx| {
- for repo_path in repo_paths {
- if this.pending.get(&repo_path) == Some(&stage) {
- this.pending.remove(&repo_path);
+ for pending in this.pending.iter_mut() {
+ if pending.op_id == op_id {
+ pending.finished = true
}
}
result
@@ -675,190 +671,89 @@ impl GitPanel {
}
}
- fn open_entry(&self, entry: &GitListEntry, cx: &mut Context<Self>) {
- let Some(status_entry) = entry.status_entry() else {
- return;
- };
- let Some(active_repository) = self.active_repository.as_ref() else {
- return;
- };
- let Some(path) = active_repository.repo_path_to_project_path(&status_entry.repo_path)
- else {
- return;
- };
- let path_exists = self.project.update(cx, |project, cx| {
- project.entry_for_path(&path, cx).is_some()
- });
- if !path_exists {
- return;
- }
- // TODO maybe move all of this into project?
- cx.emit(Event::OpenedEntry { path });
- }
-
- fn stage_all(&mut self, _: &git::StageAll, _window: &mut Window, cx: &mut Context<Self>) {
- let Some(active_repository) = self.active_repository.as_ref().cloned() else {
- return;
- };
- let mut pending_paths = Vec::new();
- for entry in self.entries.iter() {
- if let Some(status_entry) = entry.status_entry() {
- self.pending.insert(status_entry.repo_path.clone(), true);
- pending_paths.push(status_entry.repo_path.clone());
- }
- }
-
- cx.spawn(|this, mut cx| async move {
- if let Err(e) = active_repository.stage_all().await {
- this.update(&mut cx, |this, cx| {
- this.show_err_toast(e, cx);
- })
- .ok();
- };
- this.update(&mut cx, |this, _cx| {
- for repo_path in pending_paths {
- this.pending.remove(&repo_path);
- }
- })
- })
- .detach();
- }
-
- fn unstage_all(&mut self, _: &git::UnstageAll, _window: &mut Window, cx: &mut Context<Self>) {
- let Some(active_repository) = self.active_repository.as_ref().cloned() else {
- return;
- };
- let mut pending_paths = Vec::new();
- for entry in self.entries.iter() {
- if let Some(status_entry) = entry.status_entry() {
- self.pending.insert(status_entry.repo_path.clone(), false);
- pending_paths.push(status_entry.repo_path.clone());
+ /// Commit all staged changes
+ fn commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context<Self>) {
+ let editor = self.commit_editor.read(cx);
+ if editor.is_empty(cx) {
+ if !editor.focus_handle(cx).contains_focused(window, cx) {
+ editor.focus_handle(cx).focus(window);
+ return;
}
}
- cx.spawn(|this, mut cx| async move {
- if let Err(e) = active_repository.unstage_all().await {
- this.update(&mut cx, |this, cx| {
- this.show_err_toast(e, cx);
- })
- .ok();
- };
- this.update(&mut cx, |this, _cx| {
- for repo_path in pending_paths {
- this.pending.remove(&repo_path);
- }
- })
- })
- .detach();
+ self.commit_changes(window, cx)
}
- fn discard_all(&mut self, _: &git::RevertAll, _window: &mut Window, _cx: &mut Context<Self>) {
- // TODO: Implement discard all
- println!("Discard all triggered");
- }
-
- /// Commit all staged changes
- fn commit_changes(
- &mut self,
- _: &git::CommitChanges,
- name_and_email: Option<(SharedString, SharedString)>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
+ fn commit_changes(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let Some(active_repository) = self.active_repository.clone() else {
return;
};
- if !active_repository.can_commit(false) {
- return;
- }
- if self.commit_editor.read(cx).is_empty(cx) {
- return;
- }
- self.commit_pending = true;
- let save_task = self.commit_editor.update(cx, |editor, cx| {
- editor.save(false, self.project.clone(), window, cx)
- });
- let commit_editor = self.commit_editor.clone();
- self.commit_task = cx.spawn_in(window, |git_panel, mut cx| async move {
- let result = maybe!(async {
- save_task.await?;
- active_repository.commit(name_and_email).await?;
- cx.update(|window, cx| {
- commit_editor.update(cx, |editor, cx| editor.clear(window, cx));
- })
- })
- .await;
-
- git_panel.update(&mut cx, |git_panel, cx| {
- git_panel.commit_pending = false;
- result
- .map_err(|e| {
- git_panel.show_err_toast(e, cx);
- })
- .ok();
+ let error_spawn = |message, window: &mut Window, cx: &mut App| {
+ let prompt = window.prompt(PromptLevel::Warning, message, None, &["Ok"], cx);
+ cx.spawn(|_| async move {
+ prompt.await.ok();
})
- });
- }
-
- /// Commit all changes, regardless of whether they are staged or not
- fn commit_tracked_changes(
- &mut self,
- _: &git::CommitAllChanges,
- name_and_email: Option<(SharedString, SharedString)>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let Some(active_repository) = self.active_repository.clone() else {
- return;
+ .detach();
};
- if !active_repository.can_commit(true) {
+
+ if self.has_unstaged_conflicts() {
+ error_spawn(
+ "There are still conflicts. You must stage these before committing",
+ window,
+ cx,
+ );
return;
}
- if self.commit_editor.read(cx).is_empty(cx) {
+
+ let message = self.commit_editor.read(cx).text(cx);
+ if message.trim().is_empty() {
+ self.commit_editor.read(cx).focus_handle(cx).focus(window);
return;
}
- self.commit_pending = true;
- let save_task = self.commit_editor.update(cx, |editor, cx| {
- editor.save(false, self.project.clone(), window, cx)
- });
- let commit_editor = self.commit_editor.clone();
- let tracked_files = self
- .entries
- .iter()
- .filter_map(|entry| entry.status_entry())
- .filter(|status_entry| {
- Section::Changed.contains(status_entry.status)
- && !status_entry.is_staged.unwrap_or(false)
- })
- .map(|status_entry| status_entry.repo_path)
- .collect::<Vec<_>>();
+ let task = if self.has_staged_changes() {
+ // Repository serializes all git operations, so we can just send a commit immediately
+ let commit_task = active_repository.read(cx).commit(message.into(), None);
+ cx.background_executor()
+ .spawn(async move { commit_task.await? })
+ } else {
+ let changed_files = self
+ .entries
+ .iter()
+ .filter_map(|entry| entry.status_entry())
+ .filter(|status_entry| !status_entry.status.is_created())
+ .map(|status_entry| status_entry.repo_path.clone())
+ .collect::<Vec<_>>();
+
+ if changed_files.is_empty() {
+ error_spawn("No changes to commit", window, cx);
+ return;
+ }
- self.commit_task = cx.spawn_in(window, |git_panel, mut cx| async move {
- let result = maybe!(async {
- save_task.await?;
- active_repository.stage_entries(tracked_files).await?;
- active_repository.commit(name_and_email).await
+ let stage_task = active_repository.read(cx).stage_entries(changed_files);
+ cx.spawn(|_, mut cx| async move {
+ stage_task.await??;
+ let commit_task = active_repository
+ .update(&mut cx, |repo, _| repo.commit(message.into(), None))?;
+ commit_task.await?
})
- .await;
- cx.update(|window, cx| match result {
- Ok(_) => commit_editor.update(cx, |editor, cx| {
- editor.clear(window, cx);
- }),
-
- Err(e) => {
- git_panel
- .update(cx, |git_panel, cx| {
- git_panel.show_err_toast(e, cx);
- })
- .ok();
+ };
+ let task = cx.spawn_in(window, |this, mut cx| async move {
+ let result = task.await;
+ this.update_in(&mut cx, |this, window, cx| {
+ this.pending_commit.take();
+ match result {
+ Ok(()) => {
+ this.commit_editor
+ .update(cx, |editor, cx| editor.clear(window, cx));
+ }
+ Err(e) => this.show_err_toast(e, cx),
}
- })?;
-
- git_panel.update(&mut cx, |git_panel, _| {
- git_panel.commit_pending = false;
})
+ .ok();
});
+
+ self.pending_commit = Some(task);
}
fn fill_co_authors(&mut self, _: &FillCoAuthors, window: &mut Window, cx: &mut Context<Self>) {
@@ -926,50 +821,72 @@ impl GitPanel {
});
}
- fn schedule_update(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let project = self.project.clone();
+ fn schedule_update(
+ &mut self,
+ clear_pending: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
let handle = cx.entity().downgrade();
+ self.reopen_commit_buffer(window, cx);
self.update_visible_entries_task = cx.spawn_in(window, |_, mut cx| async move {
cx.background_executor().timer(UPDATE_DEBOUNCE).await;
if let Some(git_panel) = handle.upgrade() {
- let Ok(commit_message_buffer) = git_panel.update_in(&mut cx, |git_panel, _, cx| {
- git_panel
- .active_repository
- .as_ref()
- .map(|active_repository| {
- commit_message_buffer(&project, active_repository, cx)
- })
- }) else {
- return;
- };
- let commit_message_buffer = match commit_message_buffer {
- Some(commit_message_buffer) => match commit_message_buffer
- .await
- .context("opening commit buffer on repo update")
- .log_err()
- {
- Some(buffer) => Some(buffer),
- None => return,
- },
- None => None,
- };
-
git_panel
- .update_in(&mut cx, |git_panel, window, cx| {
+ .update_in(&mut cx, |git_panel, _, cx| {
+ if clear_pending {
+ git_panel.clear_pending();
+ }
git_panel.update_visible_entries(cx);
- git_panel.commit_editor =
- cx.new(|cx| commit_message_editor(commit_message_buffer, window, cx));
})
.ok();
}
});
}
+ fn reopen_commit_buffer(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ let Some(active_repo) = self.active_repository.as_ref() else {
+ return;
+ };
+ let load_buffer = active_repo.update(cx, |active_repo, cx| {
+ let project = self.project.read(cx);
+ active_repo.open_commit_buffer(
+ Some(project.languages().clone()),
+ project.buffer_store().clone(),
+ cx,
+ )
+ });
+
+ cx.spawn_in(window, |git_panel, mut cx| async move {
+ let buffer = load_buffer.await?;
+ git_panel.update_in(&mut cx, |git_panel, window, cx| {
+ if git_panel
+ .commit_editor
+ .read(cx)
+ .buffer()
+ .read(cx)
+ .as_singleton()
+ .as_ref()
+ != Some(&buffer)
+ {
+ git_panel.commit_editor =
+ cx.new(|cx| commit_message_editor(Some(buffer), window, cx));
+ }
+ })
+ })
+ .detach_and_log_err(cx);
+ }
+
+ fn clear_pending(&mut self) {
+ self.pending.retain(|v| !v.finished)
+ }
+
fn update_visible_entries(&mut self, cx: &mut Context<Self>) {
self.entries.clear();
self.entries_by_path.clear();
let mut changed_entries = Vec::new();
let mut new_entries = Vec::new();
+ let mut conflict_entries = Vec::new();
let Some(repo) = self.active_repository.as_ref() else {
// Just clear entries if no repository is active.
@@ -978,30 +895,18 @@ impl GitPanel {
};
// First pass - collect all paths
+ let repo = repo.read(cx);
let path_set = HashSet::from_iter(repo.status().map(|entry| entry.repo_path));
// Second pass - create entries with proper depth calculation
- let mut new_any_staged = false;
- let mut new_all_staged = true;
- let mut changed_any_staged = false;
- let mut changed_all_staged = true;
-
for entry in repo.status() {
let (depth, difference) =
Self::calculate_depth_and_difference(&entry.repo_path, &path_set);
+ let is_conflict = repo.has_conflict(&entry.repo_path);
let is_new = entry.status.is_created();
let is_staged = entry.status.is_staged();
- let new_is_staged = is_staged.unwrap_or(false);
- if is_new {
- new_any_staged |= new_is_staged;
- new_all_staged &= new_is_staged;
- } else {
- changed_any_staged |= new_is_staged;
- changed_all_staged &= new_is_staged;
- }
-
let display_name = if difference > 1 {
// Show partial path for deeply nested files
entry
@@ -1029,7 +934,9 @@ impl GitPanel {
is_staged,
};
- if is_new {
+ if is_conflict {
+ conflict_entries.push(entry);
+ } else if is_new {
new_entries.push(entry);
} else {
changed_entries.push(entry);
@@ -1037,15 +944,24 @@ impl GitPanel {
}
// Sort entries by path to maintain consistent order
+ conflict_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path));
changed_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path));
new_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path));
+ if conflict_entries.len() > 0 {
+ self.entries.push(GitListEntry::Header(GitHeaderEntry {
+ header: Section::Conflict,
+ }));
+ self.entries.extend(
+ conflict_entries
+ .into_iter()
+ .map(GitListEntry::GitStatusEntry),
+ );
+ }
+
if changed_entries.len() > 0 {
- let toggle_state =
- ToggleState::from_any_and_all(changed_any_staged, changed_all_staged);
self.entries.push(GitListEntry::Header(GitHeaderEntry {
- header: Section::Changed,
- all_staged: toggle_state,
+ header: Section::Tracked,
}));
self.entries.extend(
changed_entries
@@ -1054,10 +970,8 @@ impl GitPanel {
);
}
if new_entries.len() > 0 {
- let toggle_state = ToggleState::from_any_and_all(new_any_staged, new_all_staged);
self.entries.push(GitListEntry::Header(GitHeaderEntry {
header: Section::New,
- all_staged: toggle_state,
}));
self.entries
.extend(new_entries.into_iter().map(GitListEntry::GitStatusEntry));
@@ -1065,20 +979,91 @@ impl GitPanel {
for (ix, entry) in self.entries.iter().enumerate() {
if let Some(status_entry) = entry.status_entry() {
- self.entries_by_path.insert(status_entry.repo_path, ix);
+ self.entries_by_path
+ .insert(status_entry.repo_path.clone(), ix);
}
}
+ self.update_counts(repo);
self.select_first_entry_if_none(cx);
cx.notify();
}
+ fn update_counts(&mut self, repo: &Repository) {
+ self.conflicted_count = 0;
+ self.conflicted_staged_count = 0;
+ self.new_count = 0;
+ self.tracked_count = 0;
+ self.new_staged_count = 0;
+ self.tracked_staged_count = 0;
+ for entry in &self.entries {
+ let Some(status_entry) = entry.status_entry() else {
+ continue;
+ };
+ if repo.has_conflict(&status_entry.repo_path) {
+ self.conflicted_count += 1;
+ if self.entry_is_staged(status_entry) != Some(false) {
+ self.conflicted_staged_count += 1;
+ }
+ } else if status_entry.status.is_created() {
+ self.new_count += 1;
+ if self.entry_is_staged(status_entry) != Some(false) {
+ self.new_staged_count += 1;
+ }
+ } else {
+ self.tracked_count += 1;
+ if self.entry_is_staged(status_entry) != Some(false) {
+ self.tracked_staged_count += 1;
+ }
+ }
+ }
+ }
+
+ fn entry_is_staged(&self, entry: &GitStatusEntry) -> Option<bool> {
+ for pending in self.pending.iter().rev() {
+ if pending.repo_paths.contains(&entry.repo_path) {
+ return Some(pending.will_become_staged);
+ }
+ }
+ entry.is_staged
+ }
+
+ fn has_staged_changes(&self) -> bool {
+ self.tracked_staged_count > 0
+ || self.new_staged_count > 0
+ || self.conflicted_staged_count > 0
+ }
+
+ fn has_tracked_changes(&self) -> bool {
+ self.tracked_count > 0
+ }
+
+ fn has_unstaged_conflicts(&self) -> bool {
+ self.conflicted_count > 0 && self.conflicted_count != self.conflicted_staged_count
+ }
+
+ fn header_state(&self, header_type: Section) -> ToggleState {
+ let (staged_count, count) = match header_type {
+ Section::New => (self.new_staged_count, self.new_count),
+ Section::Tracked => (self.tracked_staged_count, self.tracked_count),
+ Section::Conflict => (self.conflicted_staged_count, self.conflicted_count),
+ };
+ if staged_count == 0 {
+ ToggleState::Unselected
+ } else if count == staged_count {
+ ToggleState::Selected
+ } else {
+ ToggleState::Indeterminate
+ }
+ }
+
fn show_err_toast(&self, e: anyhow::Error, cx: &mut App) {
let Some(workspace) = self.workspace.upgrade() else {
return;
};
let notif_id = NotificationId::Named("git-operation-error".into());
+
let message = e.to_string();
workspace.update(cx, |workspace, cx| {
let toast = Toast::new(notif_id, message).on_click("Open Zed Log", |window, cx| {
@@ -1087,10 +1072,7 @@ impl GitPanel {
workspace.show_toast(toast, cx);
});
}
-}
-// GitPanel –– Render
-impl GitPanel {
pub fn panel_button(
&self,
id: impl Into<SharedString>,
@@ -1106,6 +1088,10 @@ impl GitPanel {
.style(ButtonStyle::Filled)
}
+ pub fn indent_size(&self, window: &Window, cx: &mut Context<Self>) -> Pixels {
+ Checkbox::container_size(cx).to_pixels(window.rem_size())
+ }
+
pub fn render_divider(&self, _cx: &mut Context<Self>) -> impl IntoElement {
h_flex()
.items_center()
@@ -1115,7 +1101,7 @@ impl GitPanel {
pub fn render_panel_header(
&self,
- _window: &mut Window,
+ window: &mut Window,
cx: &mut Context<Self>,
) -> impl IntoElement {
let all_repositories = self
@@ -1127,7 +1113,7 @@ impl GitPanel {
let entry_count = self
.active_repository
.as_ref()
- .map_or(0, RepositoryHandle::entry_count);
+ .map_or(0, |repo| repo.read(cx).entry_count());
let changes_string = match entry_count {
0 => "No changes".to_string(),
@@ -1135,11 +1121,7 @@ impl GitPanel {
n => format!("{} changes", n),
};
- h_flex()
- .h(px(32.))
- .items_center()
- .px_2()
- .bg(ElevationIndex::Surface.bg(cx))
+ self.panel_header_container(window, cx)
.child(h_flex().gap_2().child(if all_repositories.len() <= 1 {
div()
.id("changes-label")
@@ -1,11 +1,8 @@
-use std::{
- any::{Any, TypeId},
- path::Path,
- sync::Arc,
-};
+use std::any::{Any, TypeId};
use anyhow::Result;
use collections::HashSet;
+use diff::BufferDiff;
use editor::{scroll::Autoscroll, Editor, EditorEvent};
use feature_flags::FeatureFlagViewExt;
use futures::StreamExt;
@@ -13,9 +10,9 @@ use gpui::{
actions, AnyElement, AnyView, App, AppContext, AsyncWindowContext, Entity, EventEmitter,
FocusHandle, Focusable, Render, Subscription, Task, WeakEntity,
};
-use language::{Anchor, Buffer, Capability, OffsetRangeExt};
-use multi_buffer::MultiBuffer;
-use project::{buffer_store::BufferChangeSet, git::GitState, Project, ProjectPath};
+use language::{Anchor, Buffer, Capability, OffsetRangeExt, Point};
+use multi_buffer::{MultiBuffer, PathKey};
+use project::{git::GitState, Project, ProjectPath};
use theme::ActiveTheme;
use ui::prelude::*;
use util::ResultExt as _;
@@ -25,7 +22,7 @@ use workspace::{
ItemNavHistory, ToolbarItemLocation, Workspace,
};
-use crate::git_panel::GitPanel;
+use crate::git_panel::{GitPanel, GitPanelAddon, GitStatusEntry};
actions!(git, [Diff]);
@@ -33,22 +30,27 @@ pub(crate) struct ProjectDiff {
multibuffer: Entity<MultiBuffer>,
editor: Entity<Editor>,
project: Entity<Project>,
+ git_panel: Entity<GitPanel>,
git_state: Entity<GitState>,
workspace: WeakEntity<Workspace>,
focus_handle: FocusHandle,
update_needed: postage::watch::Sender<()>,
- pending_scroll: Option<Arc<Path>>,
+ pending_scroll: Option<PathKey>,
_task: Task<Result<()>>,
_subscription: Subscription,
}
struct DiffBuffer {
- abs_path: Arc<Path>,
+ path_key: PathKey,
buffer: Entity<Buffer>,
- change_set: Entity<BufferChangeSet>,
+ diff: Entity<BufferDiff>,
}
+const CONFLICT_NAMESPACE: &'static str = "0";
+const TRACKED_NAMESPACE: &'static str = "1";
+const NEW_NAMESPACE: &'static str = "2";
+
impl ProjectDiff {
pub(crate) fn register(
_: &mut Workspace,
@@ -72,7 +74,7 @@ impl ProjectDiff {
pub fn deploy_at(
workspace: &mut Workspace,
- path: Option<Arc<Path>>,
+ entry: Option<GitStatusEntry>,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
@@ -80,9 +82,16 @@ impl ProjectDiff {
workspace.activate_item(&existing, true, true, window, cx);
existing
} else {
- let workspace_handle = cx.entity().downgrade();
- let project_diff =
- cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx));
+ let workspace_handle = cx.entity();
+ let project_diff = cx.new(|cx| {
+ Self::new(
+ workspace.project().clone(),
+ workspace_handle,
+ workspace.panel::<GitPanel>(cx).unwrap(),
+ window,
+ cx,
+ )
+ });
workspace.add_item_to_active_pane(
Box::new(project_diff.clone()),
None,
@@ -92,16 +101,17 @@ impl ProjectDiff {
);
project_diff
};
- if let Some(path) = path {
+ if let Some(entry) = entry {
project_diff.update(cx, |project_diff, cx| {
- project_diff.scroll_to(path, window, cx);
+ project_diff.scroll_to(entry, window, cx);
})
}
}
fn new(
project: Entity<Project>,
- workspace: WeakEntity<Workspace>,
+ workspace: Entity<Workspace>,
+ git_panel: Entity<GitPanel>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -117,6 +127,9 @@ impl ProjectDiff {
cx,
);
diff_display_editor.set_expand_all_diff_hunks(cx);
+ diff_display_editor.register_addon(GitPanelAddon {
+ git_panel: git_panel.clone(),
+ });
diff_display_editor
});
cx.subscribe_in(&editor, window, Self::handle_editor_event)
@@ -126,10 +139,8 @@ impl ProjectDiff {
let git_state_subscription = cx.subscribe_in(
&git_state,
window,
- move |this, _git_state, event, _window, _cx| match event {
- project::git::Event::RepositoriesUpdated => {
- *this.update_needed.borrow_mut() = ();
- }
+ move |this, _git_state, _event, _window, _cx| {
+ *this.update_needed.borrow_mut() = ();
},
);
@@ -144,7 +155,8 @@ impl ProjectDiff {
Self {
project,
git_state: git_state.clone(),
- workspace,
+ git_panel: git_panel.clone(),
+ workspace: workspace.downgrade(),
focus_handle,
editor,
multibuffer,
@@ -155,15 +167,46 @@ impl ProjectDiff {
}
}
- pub fn scroll_to(&mut self, path: Arc<Path>, window: &mut Window, cx: &mut Context<Self>) {
- if let Some(position) = self.multibuffer.read(cx).location_for_path(&path, cx) {
+ pub fn scroll_to(
+ &mut self,
+ entry: GitStatusEntry,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(git_repo) = self.git_state.read(cx).active_repository() else {
+ return;
+ };
+ let repo = git_repo.read(cx);
+
+ let Some(abs_path) = repo
+ .repo_path_to_project_path(&entry.repo_path)
+ .and_then(|project_path| self.project.read(cx).absolute_path(&project_path, cx))
+ else {
+ return;
+ };
+
+ let namespace = if repo.has_conflict(&entry.repo_path) {
+ CONFLICT_NAMESPACE
+ } else if entry.status.is_created() {
+ NEW_NAMESPACE
+ } else {
+ TRACKED_NAMESPACE
+ };
+
+ let path_key = PathKey::namespaced(namespace, &abs_path);
+
+ self.scroll_to_path(path_key, window, cx)
+ }
+
+ fn scroll_to_path(&mut self, path_key: PathKey, window: &mut Window, cx: &mut Context<Self>) {
+ if let Some(position) = self.multibuffer.read(cx).location_for_path(&path_key, cx) {
self.editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::focused()), window, cx, |s| {
s.select_ranges([position..position]);
})
})
} else {
- self.pending_scroll = Some(path);
+ self.pending_scroll = Some(path_key);
}
}
@@ -192,7 +235,7 @@ impl ProjectDiff {
.update(cx, |workspace, cx| {
if let Some(git_panel) = workspace.panel::<GitPanel>(cx) {
git_panel.update(cx, |git_panel, cx| {
- git_panel.set_focused_path(project_path.into(), window, cx)
+ git_panel.select_entry_by_path(project_path.into(), window, cx)
})
}
})
@@ -213,38 +256,47 @@ impl ProjectDiff {
let mut previous_paths = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
let mut result = vec![];
- for entry in repo.status() {
- if !entry.status.has_changes() {
- continue;
+ repo.update(cx, |repo, cx| {
+ for entry in repo.status() {
+ if !entry.status.has_changes() {
+ continue;
+ }
+ let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path) else {
+ continue;
+ };
+ let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else {
+ continue;
+ };
+ let namespace = if repo.has_conflict(&entry.repo_path) {
+ CONFLICT_NAMESPACE
+ } else if entry.status.is_created() {
+ NEW_NAMESPACE
+ } else {
+ TRACKED_NAMESPACE
+ };
+ let path_key = PathKey::namespaced(namespace, &abs_path);
+
+ previous_paths.remove(&path_key);
+ let load_buffer = self
+ .project
+ .update(cx, |project, cx| project.open_buffer(project_path, cx));
+
+ let project = self.project.clone();
+ result.push(cx.spawn(|_, mut cx| async move {
+ let buffer = load_buffer.await?;
+ let changes = project
+ .update(&mut cx, |project, cx| {
+ project.open_uncommitted_diff(buffer.clone(), cx)
+ })?
+ .await?;
+ Ok(DiffBuffer {
+ path_key,
+ buffer,
+ diff: changes,
+ })
+ }));
}
- let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path) else {
- continue;
- };
- let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else {
- continue;
- };
- let abs_path = Arc::from(abs_path);
-
- previous_paths.remove(&abs_path);
- let load_buffer = self
- .project
- .update(cx, |project, cx| project.open_buffer(project_path, cx));
-
- let project = self.project.clone();
- result.push(cx.spawn(|_, mut cx| async move {
- let buffer = load_buffer.await?;
- let changes = project
- .update(&mut cx, |project, cx| {
- project.open_unstaged_changes(buffer.clone(), cx)
- })?
- .await?;
- Ok(DiffBuffer {
- abs_path,
- buffer,
- change_set: changes,
- })
- }));
- }
+ });
self.multibuffer.update(cx, |multibuffer, cx| {
for path in previous_paths {
multibuffer.remove_excerpts_for_path(path, cx);
@@ -259,28 +311,31 @@ impl ProjectDiff {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let abs_path = diff_buffer.abs_path;
+ let path_key = diff_buffer.path_key;
let buffer = diff_buffer.buffer;
- let change_set = diff_buffer.change_set;
+ let diff = diff_buffer.diff;
let snapshot = buffer.read(cx).snapshot();
- let diff_hunk_ranges = change_set
- .read(cx)
- .diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot)
- .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot))
- .collect::<Vec<_>>();
+ let diff = diff.read(cx);
+ let diff_hunk_ranges = if diff.snapshot.base_text.is_none() {
+ vec![Point::zero()..snapshot.max_point()]
+ } else {
+ diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot)
+ .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot))
+ .collect::<Vec<_>>()
+ };
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.set_excerpts_for_path(
- abs_path.clone(),
+ path_key.clone(),
buffer,
diff_hunk_ranges,
editor::DEFAULT_MULTIBUFFER_CONTEXT,
cx,
);
});
- if self.pending_scroll.as_ref() == Some(&abs_path) {
- self.scroll_to(abs_path, window, cx);
+ if self.pending_scroll.as_ref() == Some(&path_key) {
+ self.scroll_to_path(path_key, window, cx);
}
}
@@ -390,9 +445,16 @@ impl Item for ProjectDiff {
where
Self: Sized,
{
- Some(
- cx.new(|cx| ProjectDiff::new(self.project.clone(), self.workspace.clone(), window, cx)),
- )
+ let workspace = self.workspace.upgrade()?;
+ Some(cx.new(|cx| {
+ ProjectDiff::new(
+ self.project.clone(),
+ workspace,
+ self.git_panel.clone(),
+ window,
+ cx,
+ )
+ }))
}
fn is_dirty(&self, cx: &App) -> bool {
@@ -4,7 +4,7 @@ use gpui::{
};
use picker::{Picker, PickerDelegate};
use project::{
- git::{GitState, RepositoryHandle},
+ git::{GitState, Repository},
Project,
};
use std::sync::Arc;
@@ -49,7 +49,7 @@ impl RepositorySelector {
fn handle_project_git_event(
&mut self,
git_state: &Entity<GitState>,
- _event: &project::git::Event,
+ _event: &project::git::GitEvent,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -117,13 +117,13 @@ impl<T: PopoverTrigger> RenderOnce for RepositorySelectorPopoverMenu<T> {
pub struct RepositorySelectorDelegate {
project: WeakEntity<Project>,
repository_selector: WeakEntity<RepositorySelector>,
- repository_entries: Vec<RepositoryHandle>,
- filtered_repositories: Vec<RepositoryHandle>,
+ repository_entries: Vec<Entity<Repository>>,
+ filtered_repositories: Vec<Entity<Repository>>,
selected_index: usize,
}
impl RepositorySelectorDelegate {
- pub fn update_repository_entries(&mut self, all_repositories: Vec<RepositoryHandle>) {
+ pub fn update_repository_entries(&mut self, all_repositories: Vec<Entity<Repository>>) {
self.repository_entries = all_repositories.clone();
self.filtered_repositories = all_repositories;
self.selected_index = 0;
@@ -194,7 +194,7 @@ impl PickerDelegate for RepositorySelectorDelegate {
let Some(selected_repo) = self.filtered_repositories.get(self.selected_index) else {
return;
};
- selected_repo.activate(cx);
+ selected_repo.update(cx, |selected_repo, cx| selected_repo.activate(cx));
self.dismissed(window, cx);
}
@@ -222,7 +222,7 @@ impl PickerDelegate for RepositorySelectorDelegate {
) -> Option<Self::ListItem> {
let project = self.project.upgrade()?;
let repo_info = self.filtered_repositories.get(ix)?;
- let display_name = repo_info.display_name(project.read(cx), cx);
+ let display_name = repo_info.read(cx).display_name(project.read(cx), cx);
// TODO: Implement repository item rendering
Some(
ListItem::new(ix)
@@ -305,8 +305,14 @@ pub enum Model {
Gemini15Pro,
#[serde(rename = "gemini-1.5-flash")]
Gemini15Flash,
- #[serde(rename = "gemini-2.0-flash-exp")]
+ #[serde(rename = "gemini-2.0-pro-exp")]
+ Gemini20Pro,
+ #[serde(rename = "gemini-2.0-flash")]
Gemini20Flash,
+ #[serde(rename = "gemini-2.0-flash-thinking-exp")]
+ Gemini20FlashThinking,
+ #[serde(rename = "gemini-2.0-flash-lite-preview")]
+ Gemini20FlashLite,
#[serde(rename = "custom")]
Custom {
name: String,
@@ -321,7 +327,10 @@ impl Model {
match self {
Model::Gemini15Pro => "gemini-1.5-pro",
Model::Gemini15Flash => "gemini-1.5-flash",
- Model::Gemini20Flash => "gemini-2.0-flash-exp",
+ Model::Gemini20Pro => "gemini-2.0-pro-exp",
+ Model::Gemini20Flash => "gemini-2.0-flash",
+ Model::Gemini20FlashThinking => "gemini-2.0-flash-thinking-exp",
+ Model::Gemini20FlashLite => "gemini-2.0-flash-lite-preview",
Model::Custom { name, .. } => name,
}
}
@@ -330,7 +339,10 @@ impl Model {
match self {
Model::Gemini15Pro => "Gemini 1.5 Pro",
Model::Gemini15Flash => "Gemini 1.5 Flash",
+ Model::Gemini20Pro => "Gemini 2.0 Pro",
Model::Gemini20Flash => "Gemini 2.0 Flash",
+ Model::Gemini20FlashThinking => "Gemini 2.0 Flash Thinking",
+ Model::Gemini20FlashLite => "Gemini 2.0 Flash Lite",
Self::Custom {
name, display_name, ..
} => display_name.as_ref().unwrap_or(name),
@@ -341,7 +353,10 @@ impl Model {
match self {
Model::Gemini15Pro => 2_000_000,
Model::Gemini15Flash => 1_000_000,
+ Model::Gemini20Pro => 2_000_000,
Model::Gemini20Flash => 1_000_000,
+ Model::Gemini20FlashThinking => 1_000_000,
+ Model::Gemini20FlashLite => 1_000_000,
Model::Custom { max_tokens, .. } => *max_tokens,
}
}
@@ -364,6 +364,20 @@ impl EntityInputHandler for TextInput {
),
))
}
+
+ fn character_index_for_point(
+ &mut self,
+ point: gpui::Point<Pixels>,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) -> Option<usize> {
+ let line_point = self.last_bounds?.localize(&point)?;
+ let last_layout = self.last_layout.as_ref()?;
+
+ assert_eq!(last_layout.text, self.content);
+ let utf8_index = last_layout.index_for_x(point.x - line_point.x)?;
+ Some(self.offset_to_utf16(utf8_index))
+ }
}
struct TextElement {
@@ -0,0 +1,64 @@
+use gpui::{
+ div, prelude::*, px, size, App, Application, Bounds, Context, Window, WindowBounds,
+ WindowOptions,
+};
+
+struct HelloWorld {}
+
+impl Render for HelloWorld {
+ fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
+ div()
+ .bg(gpui::white())
+ .flex()
+ .flex_col()
+ .gap_3()
+ .p_4()
+ .size_full()
+ .child(div().child("Text left"))
+ .child(div().text_center().child("Text center"))
+ .child(div().text_right().child("Text right"))
+ .child(
+ div()
+ .flex()
+ .gap_2()
+ .justify_between()
+ .child(
+ div()
+ .w(px(400.))
+ .border_1()
+ .border_color(gpui::blue())
+ .p_1()
+ .whitespace_nowrap()
+ .overflow_hidden()
+ .text_center()
+ .child("A long non-wrapping text align center"),
+ )
+ .child(
+ div()
+ .w_32()
+ .border_1()
+ .border_color(gpui::blue())
+ .p_1()
+ .whitespace_nowrap()
+ .overflow_hidden()
+ .text_right()
+ .child("100%"),
+ ),
+ )
+ }
+}
+
+fn main() {
+ Application::new().run(|cx: &mut App| {
+ let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx);
+ cx.open_window(
+ WindowOptions {
+ window_bounds: Some(WindowBounds::Windowed(bounds)),
+ ..Default::default()
+ },
+ |_, cx| cx.new(|_| HelloWorld {}),
+ )
+ .unwrap();
+ cx.activate(true);
+ });
+}
@@ -1684,7 +1684,7 @@ impl Interactivity {
.ok()
.and_then(|mut text| text.pop())
{
- text.paint(hitbox.origin, FONT_SIZE, TextAlign::Left, window, cx)
+ text.paint(hitbox.origin, FONT_SIZE, TextAlign::Left, None, window, cx)
.ok();
let text_bounds = crate::Bounds {
@@ -2008,18 +2008,27 @@ impl Interactivity {
let build_tooltip = Rc::new(move |window: &mut Window, cx: &mut App| {
Some(((tooltip_builder.build)(window, cx), tooltip_is_hoverable))
});
- // Use bounds instead of testing hitbox since check_is_hovered is also called
- // during prepaint.
- let source_bounds = hitbox.bounds;
- let check_is_hovered = Rc::new(move |window: &Window| {
- pending_mouse_down.borrow().is_none()
- && source_bounds.contains(&window.mouse_position())
+ // Use bounds instead of testing hitbox since this is called during prepaint.
+ let check_is_hovered_during_prepaint = Rc::new({
+ let pending_mouse_down = pending_mouse_down.clone();
+ let source_bounds = hitbox.bounds;
+ move |window: &Window| {
+ pending_mouse_down.borrow().is_none()
+ && source_bounds.contains(&window.mouse_position())
+ }
+ });
+ let check_is_hovered = Rc::new({
+ let hitbox = hitbox.clone();
+ move |window: &Window| {
+ pending_mouse_down.borrow().is_none() && hitbox.is_hovered(window)
+ }
});
register_tooltip_mouse_handlers(
&active_tooltip,
self.tooltip_id,
build_tooltip,
check_is_hovered,
+ check_is_hovered_during_prepaint,
window,
);
}
@@ -2361,6 +2370,7 @@ pub(crate) fn register_tooltip_mouse_handlers(
tooltip_id: Option<TooltipId>,
build_tooltip: Rc<dyn Fn(&mut Window, &mut App) -> Option<(AnyView, bool)>>,
check_is_hovered: Rc<dyn Fn(&Window) -> bool>,
+ check_is_hovered_during_prepaint: Rc<dyn Fn(&Window) -> bool>,
window: &mut Window,
) {
window.on_mouse_event({
@@ -2372,6 +2382,7 @@ pub(crate) fn register_tooltip_mouse_handlers(
&active_tooltip,
&build_tooltip,
&check_is_hovered,
+ &check_is_hovered_during_prepaint,
phase,
window,
cx,
@@ -2398,10 +2409,22 @@ pub(crate) fn register_tooltip_mouse_handlers(
});
}
+/// Handles displaying tooltips when an element is hovered.
+///
+/// The mouse hovering logic also relies on being called from window prepaint in order to handle the
+/// case where the element the tooltip is on is not rendered - in that case its mouse listeners are
+/// also not registered. During window prepaint, the hitbox information is not available, so
+/// `check_is_hovered_during_prepaint` is used which bases the check off of the absolute bounds of
+/// the element.
+///
+/// TODO: There's a minor bug due to the use of absolute bounds while checking during prepaint - it
+/// does not know if the hitbox is occluded. In the case where a tooltip gets displayed and then
+/// gets occluded after display, it will stick around until the mouse exits the hover bounds.
fn handle_tooltip_mouse_move(
active_tooltip: &Rc<RefCell<Option<ActiveTooltip>>>,
build_tooltip: &Rc<dyn Fn(&mut Window, &mut App) -> Option<(AnyView, bool)>>,
check_is_hovered: &Rc<dyn Fn(&Window) -> bool>,
+ check_is_hovered_during_prepaint: &Rc<dyn Fn(&Window) -> bool>,
phase: DispatchPhase,
window: &mut Window,
cx: &mut App,
@@ -2447,7 +2470,7 @@ fn handle_tooltip_mouse_move(
let delayed_show_task = window.spawn(cx, {
let active_tooltip = active_tooltip.clone();
let build_tooltip = build_tooltip.clone();
- let check_is_hovered = check_is_hovered.clone();
+ let check_is_hovered_during_prepaint = check_is_hovered_during_prepaint.clone();
move |mut cx| async move {
cx.background_executor().timer(TOOLTIP_SHOW_DELAY).await;
cx.update(|window, cx| {
@@ -2463,7 +2486,7 @@ fn handle_tooltip_mouse_move(
handle_tooltip_check_visible_and_update(
&active_tooltip,
tooltip_is_hoverable,
- &check_is_hovered,
+ &check_is_hovered_during_prepaint,
tooltip_bounds,
window,
cx,
@@ -3,6 +3,7 @@ use crate::{
DefiniteLength, Element, ElementId, GlobalElementId, Hitbox, Image, InteractiveElement,
Interactivity, IntoElement, LayoutId, Length, ObjectFit, Pixels, RenderImage, Resource,
SharedString, SharedUri, StyleRefinement, Styled, SvgSize, Task, Window,
+ SMOOTH_SVG_SCALE_FACTOR,
};
use anyhow::{anyhow, Result};
@@ -610,7 +611,7 @@ impl Asset for ImageAssetLoader {
} else {
let pixmap =
// TODO: Can we make svgs always rescale?
- svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(1.0))?;
+ svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(SMOOTH_SVG_SCALE_FACTOR))?;
let mut buffer =
ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap();
@@ -392,8 +392,15 @@ impl TextLayout {
let mut line_origin = bounds.origin;
let text_style = window.text_style();
for line in &element_state.lines {
- line.paint(line_origin, line_height, text_style.text_align, window, cx)
- .log_err();
+ line.paint(
+ line_origin,
+ line_height,
+ text_style.text_align,
+ Some(bounds),
+ window,
+ cx,
+ )
+ .log_err();
line_origin.y += line.size(line_height).height;
}
}
@@ -734,7 +741,6 @@ impl Element for InteractiveText {
if let Some(tooltip_builder) = self.tooltip_builder.clone() {
let active_tooltip = interactive_state.active_tooltip.clone();
- let pending_mouse_down = interactive_state.mouse_down_index.clone();
let build_tooltip = Rc::new({
let tooltip_is_hoverable = false;
let text_layout = text_layout.clone();
@@ -746,11 +752,12 @@ impl Element for InteractiveText {
.map(|view| (view, tooltip_is_hoverable))
}
});
- // Use bounds instead of testing hitbox since check_is_hovered is also
- // called during prepaint.
- let source_bounds = hitbox.bounds;
- let check_is_hovered = Rc::new({
+
+ // Use bounds instead of testing hitbox since this is called during prepaint.
+ let check_is_hovered_during_prepaint = Rc::new({
+ let source_bounds = hitbox.bounds;
let text_layout = text_layout.clone();
+ let pending_mouse_down = interactive_state.mouse_down_index.clone();
move |window: &Window| {
text_layout
.index_for_position(window.mouse_position())
@@ -759,11 +766,26 @@ impl Element for InteractiveText {
&& pending_mouse_down.get().is_none()
}
});
+
+ let check_is_hovered = Rc::new({
+ let hitbox = hitbox.clone();
+ let text_layout = text_layout.clone();
+ let pending_mouse_down = interactive_state.mouse_down_index.clone();
+ move |window: &Window| {
+ text_layout
+ .index_for_position(window.mouse_position())
+ .is_ok()
+ && hitbox.is_hovered(window)
+ && pending_mouse_down.get().is_none()
+ }
+ });
+
register_tooltip_mouse_handlers(
&active_tooltip,
self.tooltip_id,
build_tooltip,
check_is_hovered,
+ check_is_hovered_during_prepaint,
window,
);
}
@@ -217,6 +217,19 @@ impl Point<Pixels> {
}
}
+impl<T> Point<T>
+where
+ T: Sub<T, Output = T> + Debug + Clone + Default,
+{
+ /// Get the position of this point, relative to the given origin
+ pub fn relative_to(&self, origin: &Point<T>) -> Point<T> {
+ point(
+ self.x.clone() - origin.x.clone(),
+ self.y.clone() - origin.y.clone(),
+ )
+ }
+}
+
impl<T, Rhs> Mul<Rhs> for Point<T>
where
T: Mul<Rhs, Output = T> + Clone + Default + Debug,
@@ -376,6 +389,13 @@ pub struct Size<T: Clone + Default + Debug> {
pub height: T,
}
+impl<T: Clone + Default + Debug> Size<T> {
+ /// Create a new Size, a synonym for [`size`]
+ pub fn new(width: T, height: T) -> Self {
+ size(width, height)
+ }
+}
+
/// Constructs a new `Size<T>` with the provided width and height.
///
/// # Arguments
@@ -1456,6 +1476,17 @@ where
}
}
+impl<T> Bounds<T>
+where
+ T: Add<T, Output = T> + PartialOrd + Clone + Default + Debug + Sub<T, Output = T>,
+{
+ /// Convert a point to the coordinate space defined by this Bounds
+ pub fn localize(&self, point: &Point<T>) -> Option<Point<T>> {
+ self.contains(point)
+ .then(|| point.relative_to(&self.origin))
+ }
+}
+
/// Checks if the bounds represent an empty area.
///
/// # Returns
@@ -62,6 +62,14 @@ pub trait EntityInputHandler: 'static + Sized {
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<Bounds<Pixels>>;
+
+ /// See [`InputHandler::character_index_for_point`] for details
+ fn character_index_for_point(
+ &mut self,
+ point: crate::Point<Pixels>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Option<usize>;
}
/// The canonical implementation of [`PlatformInputHandler`]. Call [`Window::handle_input`]
@@ -158,4 +166,15 @@ impl<V: EntityInputHandler> InputHandler for ElementInputHandler<V> {
view.bounds_for_range(range_utf16, self.element_bounds, window, cx)
})
}
+
+ fn character_index_for_point(
+ &mut self,
+ point: crate::Point<Pixels>,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<usize> {
+ self.view.update(cx, |view, cx| {
+ view.character_index_for_point(point, window, cx)
+ })
+ }
}
@@ -792,6 +792,14 @@ impl PlatformInputHandler {
cx,
)
}
+
+ #[allow(unused)]
+ pub fn character_index_for_point(&mut self, point: Point<Pixels>) -> Option<usize> {
+ self.cx
+ .update(|window, cx| self.handler.character_index_for_point(point, window, cx))
+ .ok()
+ .flatten()
+ }
}
/// A struct representing a selection in a text buffer, in UTF16 characters.
@@ -882,6 +890,16 @@ pub trait InputHandler: 'static {
cx: &mut App,
) -> Option<Bounds<Pixels>>;
+ /// Get the character offset for the given point in terms of UTF16 characters
+ ///
+ /// Corresponds to [characterIndexForPoint:](https://developer.apple.com/documentation/appkit/nstextinputclient/characterindex(for:))
+ fn character_index_for_point(
+ &mut self,
+ point: Point<Pixels>,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<usize>;
+
/// Allows a given input context to opt into getting raw key repeats instead of
/// sending these to the platform.
/// TODO: Ideally we should be able to set ApplePressAndHoldEnabled in NSUserDefaults
@@ -1132,11 +1132,10 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
size,
..
} => {
- assert_eq!(
- format,
- wl_keyboard::KeymapFormat::XkbV1,
- "Unsupported keymap format"
- );
+ if format != wl_keyboard::KeymapFormat::XkbV1 {
+ log::error!("Received keymap format {:?}, expected XkbV1", format);
+ return;
+ }
let xkb_context = xkb::Context::new(xkb::CONTEXT_NO_FLAGS);
let keymap = unsafe {
xkb::Keymap::new_from_fd(
@@ -17,8 +17,8 @@ use cocoa::{
},
base::{id, nil},
foundation::{
- NSArray, NSAutoreleasePool, NSDictionary, NSFastEnumeration, NSInteger, NSPoint, NSRect,
- NSSize, NSString, NSUInteger,
+ NSArray, NSAutoreleasePool, NSDictionary, NSFastEnumeration, NSInteger, NSNotFound,
+ NSPoint, NSRect, NSSize, NSString, NSUInteger,
},
};
use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect};
@@ -227,6 +227,11 @@ unsafe fn build_classes() {
accepts_first_mouse as extern "C" fn(&Object, Sel, id) -> BOOL,
);
+ decl.add_method(
+ sel!(characterIndexForPoint:),
+ character_index_for_point as extern "C" fn(&Object, Sel, NSPoint) -> u64,
+ );
+
decl.register()
};
}
@@ -1687,17 +1692,7 @@ extern "C" fn first_rect_for_character_range(
range: NSRange,
_: id,
) -> NSRect {
- let frame: NSRect = unsafe {
- let state = get_window_state(this);
- let lock = state.lock();
- let mut frame = NSWindow::frame(lock.native_window);
- let content_layout_rect: CGRect = msg_send![lock.native_window, contentLayoutRect];
- let style_mask: NSWindowStyleMask = msg_send![lock.native_window, styleMask];
- if !style_mask.contains(NSWindowStyleMask::NSFullSizeContentViewWindowMask) {
- frame.origin.y -= frame.size.height - content_layout_rect.size.height;
- }
- frame
- };
+ let frame = get_frame(this);
with_input_handler(this, |input_handler| {
input_handler.bounds_for_range(range.to_range()?)
})
@@ -1718,6 +1713,20 @@ extern "C" fn first_rect_for_character_range(
)
}
+fn get_frame(this: &Object) -> NSRect {
+ unsafe {
+ let state = get_window_state(this);
+ let lock = state.lock();
+ let mut frame = NSWindow::frame(lock.native_window);
+ let content_layout_rect: CGRect = msg_send![lock.native_window, contentLayoutRect];
+ let style_mask: NSWindowStyleMask = msg_send![lock.native_window, styleMask];
+ if !style_mask.contains(NSWindowStyleMask::NSFullSizeContentViewWindowMask) {
+ frame.origin.y -= frame.size.height - content_layout_rect.size.height;
+ }
+ frame
+ }
+}
+
extern "C" fn insert_text(this: &Object, _: Sel, text: id, replacement_range: NSRange) {
unsafe {
let is_attributed_string: BOOL =
@@ -1831,6 +1840,24 @@ extern "C" fn accepts_first_mouse(this: &Object, _: Sel, _: id) -> BOOL {
YES
}
+extern "C" fn character_index_for_point(this: &Object, _: Sel, position: NSPoint) -> u64 {
+ let position = screen_point_to_gpui_point(this, position);
+ with_input_handler(this, |input_handler| {
+ input_handler.character_index_for_point(position)
+ })
+ .flatten()
+ .map(|index| index as u64)
+ .unwrap_or(NSNotFound as u64)
+}
+
+fn screen_point_to_gpui_point(this: &Object, position: NSPoint) -> Point<Pixels> {
+ let frame = get_frame(this);
+ let window_x = position.x - frame.origin.x;
+ let window_y = frame.size.height - (position.y - frame.origin.y);
+ let position = point(px(window_x as f32), px(window_y as f32));
+ position
+}
+
extern "C" fn dragging_entered(this: &Object, _: Sel, dragging_info: id) -> NSDragOperation {
let window_state = unsafe { get_window_state(this) };
let position = drag_event_position(&window_state, dragging_info);
@@ -428,17 +428,24 @@ impl DirectWriteState {
target_font.fallbacks.as_ref(),
)
.unwrap_or_else(|| {
- let family = self.system_ui_font_name.clone();
- log::error!("{} not found, use {} instead.", target_font.family, family);
- self.get_font_id_from_font_collection(
- family.as_ref(),
- target_font.weight,
- target_font.style,
- &target_font.features,
- target_font.fallbacks.as_ref(),
- true,
- )
- .unwrap()
+ #[cfg(any(test, feature = "test-support"))]
+ {
+ panic!("ERROR: {} font not found!", target_font.family);
+ }
+ #[cfg(not(any(test, feature = "test-support")))]
+ {
+ let family = self.system_ui_font_name.clone();
+ log::error!("{} not found, use {} instead.", target_font.family, family);
+ self.get_font_id_from_font_collection(
+ family.as_ref(),
+ target_font.weight,
+ target_font.style,
+ &target_font.features,
+ target_font.fallbacks.as_ref(),
+ true,
+ )
+ .unwrap()
+ }
})
}
}
@@ -756,21 +756,20 @@ fn should_auto_hide_scrollbars() -> Result<bool> {
#[cfg(test)]
mod tests {
- use crate::{ClipboardItem, Platform, WindowsPlatform};
+ use crate::{read_from_clipboard, write_to_clipboard, ClipboardItem};
#[test]
fn test_clipboard() {
- let platform = WindowsPlatform::new();
- let item = ClipboardItem::new_string("你好".to_string());
- platform.write_to_clipboard(item.clone());
- assert_eq!(platform.read_from_clipboard(), Some(item));
+ let item = ClipboardItem::new_string("你好,我是张小白".to_string());
+ write_to_clipboard(item.clone());
+ assert_eq!(read_from_clipboard(), Some(item));
let item = ClipboardItem::new_string("12345".to_string());
- platform.write_to_clipboard(item.clone());
- assert_eq!(platform.read_from_clipboard(), Some(item));
+ write_to_clipboard(item.clone());
+ assert_eq!(read_from_clipboard(), Some(item));
let item = ClipboardItem::new_string_with_json_metadata("abcdef".to_string(), vec![3, 4]);
- platform.write_to_clipboard(item.clone());
- assert_eq!(platform.read_from_clipboard(), Some(item));
+ write_to_clipboard(item.clone());
+ assert_eq!(read_from_clipboard(), Some(item));
}
}
@@ -3,6 +3,9 @@ use anyhow::anyhow;
use resvg::tiny_skia::Pixmap;
use std::{hash::Hash, sync::Arc};
+/// When rendering SVGs, we render them at twice the size to get a higher-quality result.
+pub const SMOOTH_SVG_SCALE_FACTOR: f32 = 2.;
+
#[derive(Clone, PartialEq, Hash, Eq)]
pub(crate) struct RenderSvgParams {
pub(crate) path: SharedString,
@@ -107,15 +107,21 @@ impl WrappedLine {
origin: Point<Pixels>,
line_height: Pixels,
align: TextAlign,
+ bounds: Option<Bounds<Pixels>>,
window: &mut Window,
cx: &mut App,
) -> Result<()> {
+ let align_width = match bounds {
+ Some(bounds) => Some(bounds.size.width),
+ None => self.layout.wrap_width,
+ };
+
paint_line(
origin,
&self.layout.unwrapped_layout,
line_height,
align,
- self.layout.wrap_width,
+ align_width,
&self.decoration_runs,
&self.wrap_boundaries,
window,
@@ -222,7 +228,7 @@ fn paint_line(
glyph_origin.x = aligned_origin_x(
origin,
align_width.unwrap_or(layout.width),
- prev_glyph_position.x,
+ glyph.position.x,
&align,
layout,
wraps.peek(),
@@ -426,17 +432,7 @@ fn aligned_origin_x(
wrap_boundary: Option<&&WrapBoundary>,
) -> Pixels {
let end_of_line = if let Some(WrapBoundary { run_ix, glyph_ix }) = wrap_boundary {
- if layout.runs[*run_ix].glyphs.len() == glyph_ix + 1 {
- // Next glyph is in next run
- layout
- .runs
- .get(run_ix + 1)
- .and_then(|run| run.glyphs.first())
- .map_or(layout.width, |glyph| glyph.position.x)
- } else {
- // Get next glyph
- layout.runs[*run_ix].glyphs[*glyph_ix + 1].position.x
- }
+ layout.runs[*run_ix].glyphs[*glyph_ix].position.x
} else {
layout.width
};
@@ -8,6 +8,7 @@ use anyhow::Result;
use collections::FxHashSet;
use refineable::Refineable;
use std::mem;
+use std::rc::Rc;
use std::{any::TypeId, fmt, ops::Range};
struct AnyViewState {
@@ -73,7 +74,7 @@ impl<V: Render> Element for Entity<V> {
pub struct AnyView {
entity: AnyEntity,
render: fn(&AnyView, &mut Window, &mut App) -> AnyElement,
- cached_style: Option<StyleRefinement>,
+ cached_style: Option<Rc<StyleRefinement>>,
}
impl<V: Render> From<Entity<V>> for AnyView {
@@ -91,7 +92,7 @@ impl AnyView {
/// When using this method, the view's previous layout and paint will be recycled from the previous frame if [Context::notify] has not been called since it was rendered.
/// The one exception is when [Window::refresh] is called, in which case caching is ignored.
pub fn cached(mut self, style: StyleRefinement) -> Self {
- self.cached_style = Some(style);
+ self.cached_style = Some(style.into());
self
}
@@ -13,7 +13,7 @@ use crate::{
Subscription, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement, TransformationMatrix,
Underline, UnderlineStyle, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem,
- SUBPIXEL_VARIANTS,
+ SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS,
};
use anyhow::{anyhow, Context as _, Result};
use collections::{FxHashMap, FxHashSet};
@@ -23,6 +23,7 @@ use futures::FutureExt;
#[cfg(target_os = "macos")]
use media::core_video::CVImageBuffer;
use parking_lot::RwLock;
+use raw_window_handle::{HandleError, HasWindowHandle};
use refineable::Refineable;
use slotmap::SlotMap;
use smallvec::SmallVec;
@@ -137,7 +138,7 @@ impl WindowInvalidator {
self.inner.borrow_mut().dirty_views = views;
}
- pub fn not_painting(&self) -> bool {
+ pub fn not_drawing(&self) -> bool {
self.inner.borrow().draw_phase == DrawPhase::None
}
@@ -1035,7 +1036,7 @@ impl Window {
/// Mark the window as dirty, scheduling it to be redrawn on the next frame.
pub fn refresh(&mut self) {
- if self.invalidator.not_painting() {
+ if self.invalidator.not_drawing() {
self.refreshing = true;
self.invalidator.set_dirty(true);
}
@@ -2553,12 +2554,11 @@ impl Window {
let element_opacity = self.element_opacity();
let scale_factor = self.scale_factor();
let bounds = bounds.scale(scale_factor);
- // Render the SVG at twice the size to get a higher quality result.
let params = RenderSvgParams {
path,
- size: bounds
- .size
- .map(|pixels| DevicePixels::from((pixels.0 * 2.).ceil() as i32)),
+ size: bounds.size.map(|pixels| {
+ DevicePixels::from((pixels.0 * SMOOTH_SVG_SCALE_FACTOR).ceil() as i32)
+ }),
};
let Some(tile) =
@@ -3944,6 +3944,12 @@ impl AnyWindowHandle {
}
}
+impl HasWindowHandle for Window {
+ fn window_handle(&self) -> Result<raw_window_handle::WindowHandle<'_>, HandleError> {
+ self.platform_window.window_handle()
+ }
+}
+
/// An identifier for an [`Element`](crate::Element).
///
/// Can be constructed with a string, a number, or both, as well
@@ -14,9 +14,9 @@ proc-macro = true
doctest = true
[dependencies]
-proc-macro2 = "1.0.66"
-quote = "1.0.9"
-syn = { version = "1.0.72", features = ["full", "extra-traits"] }
+proc-macro2.workspace = true
+quote.workspace = true
+syn.workspace = true
[dev-dependencies]
gpui.workspace = true
@@ -12,6 +12,9 @@ workspace = true
path = "src/image_viewer.rs"
doctest = false
+[features]
+test-support = ["gpui/test-support", "editor/test-support"]
+
[dependencies]
anyhow.workspace = true
db.workspace = true
@@ -19,11 +22,13 @@ editor.workspace = true
file_icons.workspace = true
gpui.workspace = true
project.workspace = true
+schemars.workspace = true
+serde.workspace = true
settings.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
-[features]
-test-support = ["gpui/test-support"]
+[dev-dependencies]
+editor = { workspace = true, features = ["test-support"] }
@@ -0,0 +1,124 @@
+use gpui::{div, Context, Entity, IntoElement, ParentElement, Render, Subscription};
+use project::image_store::{ImageFormat, ImageMetadata};
+use settings::Settings;
+use ui::prelude::*;
+use workspace::{ItemHandle, StatusItemView, Workspace};
+
+use crate::{ImageFileSizeUnit, ImageView, ImageViewerSettings};
+
+pub struct ImageInfo {
+ metadata: Option<ImageMetadata>,
+ _observe_active_image: Option<Subscription>,
+ observe_image_item: Option<Subscription>,
+}
+
+impl ImageInfo {
+ pub fn new(_workspace: &Workspace) -> Self {
+ Self {
+ metadata: None,
+ _observe_active_image: None,
+ observe_image_item: None,
+ }
+ }
+
+ fn update_metadata(&mut self, image_view: &Entity<ImageView>, cx: &mut Context<Self>) {
+ let image_item = image_view.read(cx).image_item.clone();
+ let current_metadata = image_item.read(cx).image_metadata;
+ if current_metadata.is_some() {
+ self.metadata = current_metadata;
+ cx.notify();
+ } else {
+ self.observe_image_item = Some(cx.observe(&image_item, |this, item, cx| {
+ this.metadata = item.read(cx).image_metadata;
+ cx.notify();
+ }));
+ }
+ }
+}
+
+fn format_file_size(size: u64, image_unit_type: ImageFileSizeUnit) -> String {
+ match image_unit_type {
+ ImageFileSizeUnit::Binary => {
+ if size < 1024 {
+ format!("{size}B")
+ } else if size < 1024 * 1024 {
+ format!("{:.1}KiB", size as f64 / 1024.0)
+ } else {
+ format!("{:.1}MiB", size as f64 / (1024.0 * 1024.0))
+ }
+ }
+ ImageFileSizeUnit::Decimal => {
+ if size < 1000 {
+ format!("{size}B")
+ } else if size < 1000 * 1000 {
+ format!("{:.1}KB", size as f64 / 1000.0)
+ } else {
+ format!("{:.1}MB", size as f64 / (1000.0 * 1000.0))
+ }
+ }
+ }
+}
+
+impl Render for ImageInfo {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let settings = ImageViewerSettings::get_global(cx);
+
+ let Some(metadata) = self.metadata.as_ref() else {
+ return div();
+ };
+
+ let mut components = Vec::new();
+ components.push(format!("{}x{}", metadata.width, metadata.height));
+ components.push(format_file_size(metadata.file_size, settings.unit));
+
+ if let Some(colors) = metadata.colors {
+ components.push(format!(
+ "{} channels, {} bits per pixel",
+ colors.channels,
+ colors.bits_per_pixel()
+ ));
+ }
+
+ components.push(
+ match metadata.format {
+ ImageFormat::Png => "PNG",
+ ImageFormat::Jpeg => "JPEG",
+ ImageFormat::Gif => "GIF",
+ ImageFormat::WebP => "WebP",
+ ImageFormat::Tiff => "TIFF",
+ ImageFormat::Bmp => "BMP",
+ ImageFormat::Ico => "ICO",
+ ImageFormat::Avif => "Avif",
+ _ => "Unknown",
+ }
+ .to_string(),
+ );
+
+ div().child(
+ Button::new("image-metadata", components.join(" • ")).label_size(LabelSize::Small),
+ )
+ }
+}
+
+impl StatusItemView for ImageInfo {
+ fn set_active_pane_item(
+ &mut self,
+ active_pane_item: Option<&dyn ItemHandle>,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self._observe_active_image = None;
+ self.observe_image_item = None;
+
+ if let Some(image_view) = active_pane_item.and_then(|item| item.act_as::<ImageView>(cx)) {
+ self.update_metadata(&image_view, cx);
+
+ self._observe_active_image = Some(cx.observe(&image_view, |this, view, cx| {
+ this.update_metadata(&view, cx);
+ }));
+ } else {
+ self.metadata = None;
+ }
+ cx.notify();
+ }
+}
@@ -1,3 +1,6 @@
+mod image_info;
+mod image_viewer_settings;
+
use std::path::PathBuf;
use anyhow::Context as _;
@@ -19,7 +22,8 @@ use workspace::{
ItemId, ItemSettings, ToolbarItemLocation, Workspace, WorkspaceId,
};
-const IMAGE_VIEWER_KIND: &str = "ImageView";
+pub use crate::image_info::*;
+pub use crate::image_viewer_settings::*;
pub struct ImageView {
image_item: Entity<ImageItem>,
@@ -31,7 +35,6 @@ impl ImageView {
pub fn new(
image_item: Entity<ImageItem>,
project: Entity<Project>,
-
cx: &mut Context<Self>,
) -> Self {
cx.subscribe(&image_item, Self::on_image_event).detach();
@@ -49,7 +52,9 @@ impl ImageView {
cx: &mut Context<Self>,
) {
match event {
- ImageItemEvent::FileHandleChanged | ImageItemEvent::Reloaded => {
+ ImageItemEvent::MetadataUpdated
+ | ImageItemEvent::FileHandleChanged
+ | ImageItemEvent::Reloaded => {
cx.emit(ImageViewEvent::TitleChanged);
cx.notify();
}
@@ -188,7 +193,7 @@ fn breadcrumbs_text_for_image(project: &Project, image: &ImageItem, cx: &App) ->
impl SerializableItem for ImageView {
fn serialized_item_kind() -> &'static str {
- IMAGE_VIEWER_KIND
+ "ImageView"
}
fn deserialize(
@@ -357,8 +362,9 @@ impl ProjectItem for ImageView {
}
pub fn init(cx: &mut App) {
+ ImageViewerSettings::register(cx);
workspace::register_project_item::<ImageView>(cx);
- workspace::register_serializable_item::<ImageView>(cx)
+ workspace::register_serializable_item::<ImageView>(cx);
}
mod persistence {
@@ -0,0 +1,42 @@
+use gpui::App;
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use settings::{Settings, SettingsSources};
+
+/// The settings for the image viewer.
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default)]
+pub struct ImageViewerSettings {
+ /// The unit to use for displaying image file sizes.
+ ///
+ /// Default: "binary"
+ #[serde(default)]
+ pub unit: ImageFileSizeUnit,
+}
+
+#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, Default)]
+#[serde(rename_all = "snake_case")]
+pub enum ImageFileSizeUnit {
+ /// Displays file size in binary units (e.g., KiB, MiB).
+ #[default]
+ Binary,
+ /// Displays file size in decimal units (e.g., KB, MB).
+ Decimal,
+}
+
+impl Settings for ImageViewerSettings {
+ const KEY: Option<&'static str> = Some("image_viewer");
+
+ type FileContent = Self;
+
+ fn load(
+ sources: SettingsSources<Self::FileContent>,
+ _: &mut App,
+ ) -> Result<Self, anyhow::Error> {
+ SettingsSources::<Self::FileContent>::json_merge_with(
+ [sources.default]
+ .into_iter()
+ .chain(sources.user)
+ .chain(sources.server),
+ )
+ }
+}
@@ -14,3 +14,4 @@ path = "src/inline_completion.rs"
[dependencies]
gpui.workspace = true
language.workspace = true
+project.workspace = true
@@ -1,5 +1,6 @@
use gpui::{App, Context, Entity};
use language::Buffer;
+use project::Project;
use std::ops::Range;
// TODO: Find a better home for `Direction`.
@@ -37,11 +38,10 @@ impl DataCollectionState {
}
}
-pub trait InlineCompletionProvider: 'static + Sized {
+pub trait EditPredictionProvider: 'static + Sized {
fn name() -> &'static str;
fn display_name() -> &'static str;
fn show_completions_in_menu() -> bool;
- fn show_completions_in_normal_mode() -> bool;
fn show_tab_accept_marker() -> bool {
false
}
@@ -58,6 +58,7 @@ pub trait InlineCompletionProvider: 'static + Sized {
fn is_refreshing(&self) -> bool;
fn refresh(
&mut self,
+ project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
@@ -93,7 +94,6 @@ pub trait InlineCompletionProviderHandle {
cx: &App,
) -> bool;
fn show_completions_in_menu(&self) -> bool;
- fn show_completions_in_normal_mode(&self) -> bool;
fn show_tab_accept_marker(&self) -> bool;
fn data_collection_state(&self, cx: &App) -> DataCollectionState;
fn toggle_data_collection(&self, cx: &mut App);
@@ -101,6 +101,7 @@ pub trait InlineCompletionProviderHandle {
fn is_refreshing(&self, cx: &App) -> bool;
fn refresh(
&self,
+ project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
@@ -125,7 +126,7 @@ pub trait InlineCompletionProviderHandle {
impl<T> InlineCompletionProviderHandle for Entity<T>
where
- T: InlineCompletionProvider,
+ T: EditPredictionProvider,
{
fn name(&self) -> &'static str {
T::name()
@@ -139,10 +140,6 @@ where
T::show_completions_in_menu()
}
- fn show_completions_in_normal_mode(&self) -> bool {
- T::show_completions_in_normal_mode()
- }
-
fn show_tab_accept_marker(&self) -> bool {
T::show_tab_accept_marker()
}
@@ -174,13 +171,14 @@ where
fn refresh(
&self,
+ project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
cx: &mut App,
) {
self.update(cx, |this, cx| {
- this.refresh(buffer, cursor_position, debounce, cx)
+ this.refresh(project, buffer, cursor_position, debounce, cx)
})
}
@@ -14,21 +14,24 @@ doctest = false
[dependencies]
anyhow.workspace = true
+client.workspace = true
copilot.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
gpui.workspace = true
+indoc.workspace = true
inline_completion.workspace = true
language.workspace = true
paths.workspace = true
+regex.workspace = true
settings.workspace = true
supermaven.workspace = true
+telemetry.workspace = true
ui.workspace = true
workspace.workspace = true
zed_actions.workspace = true
zeta.workspace = true
-client.workspace = true
[dev-dependencies]
copilot = { workspace = true, features = ["test-support"] }
@@ -1,7 +1,7 @@
use anyhow::Result;
use client::UserStore;
use copilot::{Copilot, Status};
-use editor::{actions::ShowInlineCompletion, scroll::Autoscroll, Editor};
+use editor::{actions::ShowEditPrediction, scroll::Autoscroll, Editor};
use feature_flags::{
FeatureFlagAppExt, PredictEditsFeatureFlag, PredictEditsRateCompletionsFeatureFlag,
};
@@ -11,18 +11,21 @@ use gpui::{
Corner, Entity, FocusHandle, Focusable, IntoElement, ParentElement, Render, Subscription,
WeakEntity,
};
+use indoc::indoc;
use language::{
- language_settings::{
- self, all_language_settings, AllLanguageSettings, InlineCompletionProvider,
- },
+ language_settings::{self, all_language_settings, AllLanguageSettings, EditPredictionProvider},
File, Language,
};
+use regex::Regex;
use settings::{update_settings_file, Settings, SettingsStore};
-use std::{path::Path, sync::Arc, time::Duration};
+use std::{
+ sync::{Arc, LazyLock},
+ time::Duration,
+};
use supermaven::{AccountStatus, Supermaven};
use ui::{
- prelude::*, Clickable, ContextMenu, ContextMenuEntry, IconButton, IconButtonShape, PopoverMenu,
- PopoverMenuHandle, Tooltip,
+ prelude::*, Clickable, ContextMenu, ContextMenuEntry, IconButton, IconButtonShape, Indicator,
+ PopoverMenu, PopoverMenuHandle, Tooltip,
};
use workspace::{
create_and_open_local_file, item::ItemHandle, notifications::NotificationId, StatusItemView,
@@ -32,7 +35,7 @@ use zed_actions::OpenBrowser;
use zeta::RateCompletionModal;
actions!(zeta, [RateCompletions]);
-actions!(inline_completion, [ToggleMenu]);
+actions!(edit_prediction, [ToggleMenu]);
const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot";
@@ -44,7 +47,7 @@ pub struct InlineCompletionButton {
editor_focus_handle: Option<FocusHandle>,
language: Option<Arc<Language>>,
file: Option<Arc<dyn File>>,
- inline_completion_provider: Option<Arc<dyn inline_completion::InlineCompletionProviderHandle>>,
+ edit_prediction_provider: Option<Arc<dyn inline_completion::InlineCompletionProviderHandle>>,
fs: Arc<dyn Fs>,
workspace: WeakEntity<Workspace>,
user_store: Entity<UserStore>,
@@ -62,18 +65,16 @@ impl Render for InlineCompletionButton {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let all_language_settings = all_language_settings(None, cx);
- match all_language_settings.inline_completions.provider {
- InlineCompletionProvider::None => div(),
+ match all_language_settings.edit_predictions.provider {
+ EditPredictionProvider::None => div(),
- InlineCompletionProvider::Copilot => {
+ EditPredictionProvider::Copilot => {
let Some(copilot) = Copilot::global(cx) else {
return div();
};
let status = copilot.read(cx).status();
- let enabled = self.editor_enabled.unwrap_or_else(|| {
- all_language_settings.inline_completions_enabled(None, None, cx)
- });
+ let enabled = self.editor_enabled.unwrap_or(false);
let icon = match status {
Status::Error(_) => IconName::CopilotError,
@@ -143,7 +144,7 @@ impl Render for InlineCompletionButton {
)
}
- InlineCompletionProvider::Supermaven => {
+ EditPredictionProvider::Supermaven => {
let Some(supermaven) = Supermaven::global(cx) else {
return div();
};
@@ -193,7 +194,7 @@ impl Render for InlineCompletionButton {
set_completion_provider(
fs.clone(),
cx,
- InlineCompletionProvider::Copilot,
+ EditPredictionProvider::Copilot,
)
},
)
@@ -223,15 +224,18 @@ impl Render for InlineCompletionButton {
);
}
- InlineCompletionProvider::Zed => {
+ EditPredictionProvider::Zed => {
if !cx.has_flag::<PredictEditsFeatureFlag>() {
return div();
}
- fn icon_button() -> IconButton {
- IconButton::new("zed-predict-pending-button", IconName::ZedPredict)
- .shape(IconButtonShape::Square)
- }
+ let enabled = self.editor_enabled.unwrap_or(true);
+
+ let zeta_icon = if enabled {
+ IconName::ZedPredict
+ } else {
+ IconName::ZedPredictDisabled
+ };
let current_user_terms_accepted =
self.user_store.read(cx).current_user_has_accepted_terms();
@@ -245,7 +249,10 @@ impl Render for InlineCompletionButton {
};
return div().child(
- icon_button()
+ IconButton::new("zed-predict-pending-button", zeta_icon)
+ .shape(IconButtonShape::Square)
+ .indicator(Indicator::dot().color(Color::Error))
+ .indicator_border_color(Some(cx.theme().colors().status_bar_background))
.tooltip(move |window, cx| {
Tooltip::with_meta(
"Edit Predictions",
@@ -256,6 +263,10 @@ impl Render for InlineCompletionButton {
)
})
.on_click(cx.listener(move |_, _, window, cx| {
+ telemetry::event!(
+ "Pending ToS Clicked",
+ source = "Edit Prediction Status Button"
+ );
window.dispatch_action(
zed_actions::OpenZedPredictOnboarding.boxed_clone(),
cx,
@@ -264,13 +275,27 @@ impl Render for InlineCompletionButton {
);
}
- let this = cx.entity().clone();
-
- if !self.popover_menu_handle.is_deployed() {
- icon_button().tooltip(|window, cx| {
- Tooltip::for_action("Edit Prediction", &ToggleMenu, window, cx)
+ let icon_button = IconButton::new("zed-predict-pending-button", zeta_icon)
+ .shape(IconButtonShape::Square)
+ .when(!self.popover_menu_handle.is_deployed(), |element| {
+ if enabled {
+ element.tooltip(|window, cx| {
+ Tooltip::for_action("Edit Prediction", &ToggleMenu, window, cx)
+ })
+ } else {
+ element.tooltip(|window, cx| {
+ Tooltip::with_meta(
+ "Edit Prediction",
+ Some(&ToggleMenu),
+ "Disabled For This File",
+ window,
+ cx,
+ )
+ })
+ }
});
- }
+
+ let this = cx.entity().clone();
let mut popover_menu = PopoverMenu::new("zeta")
.menu(move |window, cx| {
@@ -280,13 +305,13 @@ impl Render for InlineCompletionButton {
.with_handle(self.popover_menu_handle.clone());
let is_refreshing = self
- .inline_completion_provider
+ .edit_prediction_provider
.as_ref()
.map_or(false, |provider| provider.is_refreshing(cx));
if is_refreshing {
popover_menu = popover_menu.trigger(
- icon_button().with_animation(
+ icon_button.with_animation(
"pulsating-label",
Animation::new(Duration::from_secs(2))
.repeat()
@@ -295,7 +320,7 @@ impl Render for InlineCompletionButton {
),
);
} else {
- popover_menu = popover_menu.trigger(icon_button());
+ popover_menu = popover_menu.trigger(icon_button);
}
div().child(popover_menu.into_any_element())
@@ -325,7 +350,7 @@ impl InlineCompletionButton {
editor_focus_handle: None,
language: None,
file: None,
- inline_completion_provider: None,
+ edit_prediction_provider: None,
popover_menu_handle,
workspace,
fs,
@@ -348,111 +373,122 @@ impl InlineCompletionButton {
.entry("Use Supermaven", None, {
let fs = fs.clone();
move |_window, cx| {
- set_completion_provider(
- fs.clone(),
- cx,
- InlineCompletionProvider::Supermaven,
- )
+ set_completion_provider(fs.clone(), cx, EditPredictionProvider::Supermaven)
}
})
})
}
- // Predict Edits at Cursor – alt-tab
- // Automatically Predict:
- // ✓ PATH
- // ✓ Rust
- // ✓ All Files
pub fn build_language_settings_menu(&self, mut menu: ContextMenu, cx: &mut App) -> ContextMenu {
let fs = self.fs.clone();
- menu = menu.header("Predict Edits For:");
+ menu = menu.header("Show Edit Predictions For");
if let Some(language) = self.language.clone() {
let fs = fs.clone();
let language_enabled =
language_settings::language_settings(Some(language.name()), None, cx)
- .show_inline_completions;
+ .show_edit_predictions;
menu = menu.toggleable_entry(
language.name(),
language_enabled,
- IconPosition::Start,
+ IconPosition::End,
None,
move |_, cx| {
- toggle_inline_completions_for_language(language.clone(), fs.clone(), cx)
+ toggle_show_inline_completions_for_language(language.clone(), fs.clone(), cx)
},
);
}
let settings = AllLanguageSettings::get_global(cx);
- if let Some(file) = &self.file {
- let path = file.path().clone();
- let path_enabled = settings.inline_completions_enabled_for_path(&path);
-
- menu = menu.toggleable_entry(
- "This File",
- path_enabled,
- IconPosition::Start,
- None,
- move |window, cx| {
- if let Some(workspace) = window.root().flatten() {
- let workspace = workspace.downgrade();
- window
- .spawn(cx, |cx| {
- configure_disabled_globs(
- workspace,
- path_enabled.then_some(path.clone()),
- cx,
- )
- })
- .detach_and_log_err(cx);
- }
- },
- );
- }
-
- let globally_enabled = settings.inline_completions_enabled(None, None, cx);
+ let globally_enabled = settings.show_inline_completions(None, cx);
menu = menu.toggleable_entry(
"All Files",
globally_enabled,
- IconPosition::Start,
+ IconPosition::End,
None,
move |_, cx| toggle_inline_completions_globally(fs.clone(), cx),
);
+ menu = menu.separator().header("Privacy Settings");
- if let Some(provider) = &self.inline_completion_provider {
+ if let Some(provider) = &self.edit_prediction_provider {
let data_collection = provider.data_collection_state(cx);
-
if data_collection.is_supported() {
let provider = provider.clone();
- menu = menu
- .separator()
- .header("Help Improve The Model")
- .header("Valid Only For OSS Projects");
+ let enabled = data_collection.is_enabled();
+
menu = menu.item(
// TODO: We want to add something later that communicates whether
// the current project is open-source.
ContextMenuEntry::new("Share Training Data")
- .toggleable(IconPosition::Start, data_collection.is_enabled())
+ .toggleable(IconPosition::End, data_collection.is_enabled())
+ .documentation_aside(|_| {
+ Label::new(indoc!{"
+ Help us improve our open model by sharing data from open source repositories. \
+ Zed must detect a license file in your repo for this setting to take effect.\
+ "}).into_any_element()
+ })
.handler(move |_, cx| {
provider.toggle_data_collection(cx);
- }),
- );
+
+ if !enabled {
+ telemetry::event!(
+ "Data Collection Enabled",
+ source = "Edit Prediction Status Menu"
+ );
+ } else {
+ telemetry::event!(
+ "Data Collection Disabled",
+ source = "Edit Prediction Status Menu"
+ );
+ }
+ })
+ )
}
}
+ menu = menu.item(
+ ContextMenuEntry::new("Configure Excluded Files")
+ .documentation_aside(|_| {
+ Label::new(indoc!{"
+ Open your settings to add sensitive paths for which Zed will never predict edits."}).into_any_element()
+ })
+ .handler(move |window, cx| {
+ if let Some(workspace) = window.root().flatten() {
+ let workspace = workspace.downgrade();
+ window
+ .spawn(cx, |cx| {
+ open_disabled_globs_setting_in_editor(
+ workspace,
+ cx,
+ )
+ })
+ .detach_and_log_err(cx);
+ }
+ }),
+ );
+
+ if !self.editor_enabled.unwrap_or(true) {
+ menu = menu.item(
+ ContextMenuEntry::new("This file is excluded.")
+ .disabled(true)
+ .icon(IconName::ZedPredictDisabled)
+ .icon_size(IconSize::Small),
+ );
+ }
+
if let Some(editor_focus_handle) = self.editor_focus_handle.clone() {
menu = menu
.separator()
.entry(
"Predict Edit at Cursor",
- Some(Box::new(ShowInlineCompletion)),
+ Some(Box::new(ShowEditPrediction)),
{
let editor_focus_handle = editor_focus_handle.clone();
move |window, cx| {
- editor_focus_handle.dispatch_action(&ShowInlineCompletion, window, cx);
+ editor_focus_handle.dispatch_action(&ShowEditPrediction, window, cx);
}
},
)
@@ -528,15 +564,14 @@ impl InlineCompletionButton {
self.editor_enabled = {
let file = file.as_ref();
Some(
- file.map(|file| !file.is_private()).unwrap_or(true)
- && all_language_settings(file, cx).inline_completions_enabled(
- language,
- file.map(|file| file.path().as_ref()),
- cx,
- ),
+ file.map(|file| {
+ all_language_settings(Some(file), cx)
+ .inline_completions_enabled_for_path(file.path())
+ })
+ .unwrap_or(true),
)
};
- self.inline_completion_provider = editor.inline_completion_provider();
+ self.edit_prediction_provider = editor.edit_prediction_provider();
self.language = language.cloned();
self.file = file;
self.editor_focus_handle = Some(editor.focus_handle(cx));
@@ -598,9 +633,8 @@ impl SupermavenButtonStatus {
}
}
-async fn configure_disabled_globs(
+async fn open_disabled_globs_setting_in_editor(
workspace: WeakEntity<Workspace>,
- path_to_disable: Option<Arc<Path>>,
mut cx: AsyncWindowContext,
) -> Result<()> {
let settings_editor = workspace
@@ -619,34 +653,34 @@ async fn configure_disabled_globs(
let text = item.buffer().read(cx).snapshot(cx).text();
let settings = cx.global::<SettingsStore>();
- let edits = settings.edits_for_update::<AllLanguageSettings>(&text, |file| {
- let copilot = file.inline_completions.get_or_insert_with(Default::default);
- let globs = copilot.disabled_globs.get_or_insert_with(|| {
- settings
- .get::<AllLanguageSettings>(None)
- .inline_completions
- .disabled_globs
- .iter()
- .map(|glob| glob.glob().to_string())
- .collect()
- });
- if let Some(path_to_disable) = &path_to_disable {
- globs.push(path_to_disable.to_string_lossy().into_owned());
- } else {
- globs.clear();
- }
+ // Ensure that we always have "inline_completions { "disabled_globs": [] }"
+ let edits = settings.edits_for_update::<AllLanguageSettings>(&text, |file| {
+ file.edit_predictions
+ .get_or_insert_with(Default::default)
+ .disabled_globs
+ .get_or_insert_with(Vec::new);
});
if !edits.is_empty() {
+ item.edit(edits.iter().cloned(), cx);
+ }
+
+ let text = item.buffer().read(cx).snapshot(cx).text();
+
+ static DISABLED_GLOBS_REGEX: LazyLock<Regex> = LazyLock::new(|| {
+ Regex::new(r#""disabled_globs":\s*\[\s*(?P<content>(?:.|\n)*?)\s*\]"#).unwrap()
+ });
+ // Only capture [...]
+ let range = DISABLED_GLOBS_REGEX.captures(&text).and_then(|captures| {
+ captures
+ .name("content")
+ .map(|inner_match| inner_match.start()..inner_match.end())
+ });
+ if let Some(range) = range {
item.change_selections(Some(Autoscroll::newest()), window, cx, |selections| {
- selections.select_ranges(edits.iter().map(|e| e.0.clone()));
+ selections.select_ranges(vec![range]);
});
-
- // When *enabling* a path, don't actually perform an edit, just select the range.
- if path_to_disable.is_some() {
- item.edit(edits.iter().cloned(), cx);
- }
}
})?;
@@ -654,29 +688,32 @@ async fn configure_disabled_globs(
}
fn toggle_inline_completions_globally(fs: Arc<dyn Fs>, cx: &mut App) {
- let show_inline_completions =
- all_language_settings(None, cx).inline_completions_enabled(None, None, cx);
+ let show_edit_predictions = all_language_settings(None, cx).show_inline_completions(None, cx);
update_settings_file::<AllLanguageSettings>(fs, cx, move |file, _| {
- file.defaults.show_inline_completions = Some(!show_inline_completions)
+ file.defaults.show_edit_predictions = Some(!show_edit_predictions)
});
}
-fn set_completion_provider(fs: Arc<dyn Fs>, cx: &mut App, provider: InlineCompletionProvider) {
+fn set_completion_provider(fs: Arc<dyn Fs>, cx: &mut App, provider: EditPredictionProvider) {
update_settings_file::<AllLanguageSettings>(fs, cx, move |file, _| {
file.features
.get_or_insert(Default::default())
- .inline_completion_provider = Some(provider);
+ .edit_prediction_provider = Some(provider);
});
}
-fn toggle_inline_completions_for_language(language: Arc<Language>, fs: Arc<dyn Fs>, cx: &mut App) {
- let show_inline_completions =
- all_language_settings(None, cx).inline_completions_enabled(Some(&language), None, cx);
+fn toggle_show_inline_completions_for_language(
+ language: Arc<Language>,
+ fs: Arc<dyn Fs>,
+ cx: &mut App,
+) {
+ let show_edit_predictions =
+ all_language_settings(None, cx).show_inline_completions(Some(&language), cx);
update_settings_file::<AllLanguageSettings>(fs, cx, move |file, _| {
file.languages
.entry(language.name())
.or_default()
- .show_inline_completions = Some(!show_inline_completions);
+ .show_edit_predictions = Some(!show_edit_predictions);
});
}
@@ -684,6 +721,6 @@ fn hide_copilot(fs: Arc<dyn Fs>, cx: &mut App) {
update_settings_file::<AllLanguageSettings>(fs, cx, move |file, _| {
file.features
.get_or_insert(Default::default())
- .inline_completion_provider = Some(InlineCompletionProvider::None);
+ .edit_prediction_provider = Some(EditPredictionProvider::None);
});
}
@@ -28,7 +28,7 @@ use gpui::{
AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels,
SharedString, StyledText, Task, TaskLabel, TextStyle, Window,
};
-use lsp::LanguageServerId;
+use lsp::{LanguageServerId, NumberOrString};
use parking_lot::Mutex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -197,12 +197,12 @@ struct SelectionSet {
}
/// A diagnostic associated with a certain range of a buffer.
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Diagnostic {
/// The name of the service that produced this diagnostic.
pub source: Option<String>,
/// A machine-readable code that identifies this diagnostic.
- pub code: Option<String>,
+ pub code: Option<NumberOrString>,
/// Whether this diagnostic is a hint, warning, or error.
pub severity: DiagnosticSeverity,
/// The human-readable message associated with this diagnostic.
@@ -1001,6 +1001,51 @@ impl Buffer {
}
}
+ pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
+ let entity_id = cx.reserve_entity::<Self>().entity_id();
+ let buffer_id = entity_id.as_non_zero_u64().into();
+ let text =
+ TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
+ let syntax = SyntaxMap::new(&text).snapshot();
+ BufferSnapshot {
+ text,
+ syntax,
+ file: None,
+ diagnostics: Default::default(),
+ remote_selections: Default::default(),
+ language: None,
+ non_text_state_update_count: 0,
+ }
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn build_snapshot_sync(
+ text: Rope,
+ language: Option<Arc<Language>>,
+ language_registry: Option<Arc<LanguageRegistry>>,
+ cx: &mut App,
+ ) -> BufferSnapshot {
+ let entity_id = cx.reserve_entity::<Self>().entity_id();
+ let buffer_id = entity_id.as_non_zero_u64().into();
+ let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
+ let mut syntax = SyntaxMap::new(&text).snapshot();
+ if let Some(language) = language.clone() {
+ let text = text.clone();
+ let language = language.clone();
+ let language_registry = language_registry.clone();
+ syntax.reparse(&text, language_registry, language);
+ }
+ BufferSnapshot {
+ text,
+ syntax,
+ file: None,
+ diagnostics: Default::default(),
+ remote_selections: Default::default(),
+ language,
+ non_text_state_update_count: 0,
+ }
+ }
+
/// Retrieve a snapshot of the buffer's current state. This is computationally
/// cheap, and allows reading from the buffer on a background thread.
pub fn snapshot(&self) -> BufferSnapshot {
@@ -2,6 +2,7 @@ use crate::{range_to_lsp, Diagnostic};
use anyhow::Result;
use collections::HashMap;
use lsp::LanguageServerId;
+use serde::Serialize;
use std::{
cmp::{Ordering, Reverse},
iter,
@@ -25,7 +26,7 @@ pub struct DiagnosticSet {
/// the diagnostics are stored internally as [`Anchor`]s, but can be
/// resolved to different coordinates types like [`usize`] byte offsets or
/// [`Point`](gpui::Point)s.
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub struct DiagnosticEntry<T> {
/// The range of the buffer where the diagnostic applies.
pub range: Range<T>,
@@ -35,7 +36,7 @@ pub struct DiagnosticEntry<T> {
/// A group of related diagnostics, ordered by their start position
/// in the buffer.
-#[derive(Debug)]
+#[derive(Debug, Serialize)]
pub struct DiagnosticGroup<T> {
/// The diagnostics.
pub entries: Vec<DiagnosticEntry<T>>,
@@ -43,6 +44,20 @@ pub struct DiagnosticGroup<T> {
pub primary_ix: usize,
}
+impl DiagnosticGroup<Anchor> {
+ /// Converts the entries in this [`DiagnosticGroup`] to a different buffer coordinate type.
+ pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticGroup<O> {
+ DiagnosticGroup {
+ entries: self
+ .entries
+ .iter()
+ .map(|entry| entry.resolve(buffer))
+ .collect(),
+ primary_ix: self.primary_ix,
+ }
+ }
+}
+
#[derive(Clone, Debug)]
pub struct Summary {
start: Anchor,
@@ -56,17 +71,11 @@ impl DiagnosticEntry<PointUtf16> {
/// Returns a raw LSP diagnostic used to provide diagnostic context to LSP
/// codeAction request
pub fn to_lsp_diagnostic_stub(&self) -> Result<lsp::Diagnostic> {
- let code = self
- .diagnostic
- .code
- .clone()
- .map(lsp::NumberOrString::String);
-
let range = range_to_lsp(self.range.clone())?;
Ok(lsp::Diagnostic {
- code,
range,
+ code: self.diagnostic.code.clone(),
severity: Some(self.diagnostic.severity),
source: self.diagnostic.source.clone(),
message: self.diagnostic.message.clone(),
@@ -21,6 +21,7 @@ mod toolchain;
pub mod buffer_tests;
pub mod markdown;
+pub use crate::language_settings::InlineCompletionPreviewMode;
use crate::language_settings::SoftWrap;
use anyhow::{anyhow, Context as _, Result};
use async_trait::async_trait;
@@ -31,10 +32,7 @@ use gpui::{App, AsyncApp, Entity, SharedString, Task};
pub use highlight_map::HighlightMap;
use http_client::HttpClient;
pub use language_registry::{LanguageName, LoadedLanguage};
-use lsp::{
- CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServerBinaryOptions,
- LanguageServerName,
-};
+use lsp::{CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServerBinaryOptions};
use parking_lot::Mutex;
use regex::Regex;
use schemars::{
@@ -72,12 +70,12 @@ use util::serde::default_true;
pub use buffer::Operation;
pub use buffer::*;
-pub use diagnostic_set::DiagnosticEntry;
+pub use diagnostic_set::{DiagnosticEntry, DiagnosticGroup};
pub use language_registry::{
AvailableLanguage, LanguageNotFound, LanguageQueries, LanguageRegistry,
LanguageServerBinaryStatus, QUERY_FILENAME_PREFIXES,
};
-pub use lsp::LanguageServerId;
+pub use lsp::{LanguageServerId, LanguageServerName};
pub use outline::*;
pub use syntax_map::{OwnedSyntaxLayer, SyntaxLayer, ToTreeSitterPoint, TreeSitterOptions};
pub use text::{AnchorRangeExt, LineEnding};
@@ -31,7 +31,7 @@ use sum_tree::Bias;
use text::{Point, Rope};
use theme::Theme;
use unicase::UniCase;
-use util::{maybe, paths::PathExt, post_inc, ResultExt};
+use util::{maybe, post_inc, ResultExt};
#[derive(
Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema,
@@ -659,7 +659,10 @@ impl LanguageRegistry {
user_file_types: Option<&HashMap<Arc<str>, GlobSet>>,
) -> Option<AvailableLanguage> {
let filename = path.file_name().and_then(|name| name.to_str());
- let extension = path.extension_or_hidden_file_name();
+ // `Path.extension()` returns None for files with a leading '.'
+ // and no other extension which is not the desired behavior here,
+ // as we want `.zshrc` to result in extension being `Some("zshrc")`
+ let extension = filename.and_then(|filename| filename.split('.').last());
let path_suffixes = [extension, filename, path.to_str()];
let empty = GlobSet::empty();
@@ -60,7 +60,7 @@ pub fn all_language_settings<'a>(
#[derive(Debug, Clone)]
pub struct AllLanguageSettings {
/// The edit prediction settings.
- pub inline_completions: InlineCompletionSettings,
+ pub edit_predictions: EditPredictionSettings,
defaults: LanguageSettings,
languages: HashMap<LanguageName, LanguageSettings>,
pub(crate) file_types: HashMap<Arc<str>, GlobSet>,
@@ -110,11 +110,11 @@ pub struct LanguageSettings {
/// - `"..."` - A placeholder to refer to the **rest** of the registered language servers for this language.
pub language_servers: Vec<String>,
/// Controls whether edit predictions are shown immediately (true)
- /// or manually by triggering `editor::ShowInlineCompletion` (false).
- pub show_inline_completions: bool,
+ /// or manually by triggering `editor::ShowEditPrediction` (false).
+ pub show_edit_predictions: bool,
/// Controls whether edit predictions are shown in the given language
/// scopes.
- pub inline_completions_disabled_in: Vec<String>,
+ pub edit_predictions_disabled_in: Vec<String>,
/// Whether to show tabs and spaces in the editor.
pub show_whitespaces: ShowWhitespaceSetting,
/// Whether to start a new line with a comment when a previous line is a comment as well.
@@ -198,7 +198,7 @@ impl LanguageSettings {
/// The provider that supplies edit predictions.
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
-pub enum InlineCompletionProvider {
+pub enum EditPredictionProvider {
None,
#[default]
Copilot,
@@ -206,14 +206,40 @@ pub enum InlineCompletionProvider {
Zed,
}
+impl EditPredictionProvider {
+ pub fn is_zed(&self) -> bool {
+ match self {
+ EditPredictionProvider::Zed => true,
+ EditPredictionProvider::None
+ | EditPredictionProvider::Copilot
+ | EditPredictionProvider::Supermaven => false,
+ }
+ }
+}
+
/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot)
/// or [Supermaven](https://supermaven.com).
#[derive(Clone, Debug, Default)]
-pub struct InlineCompletionSettings {
+pub struct EditPredictionSettings {
/// The provider that supplies edit predictions.
- pub provider: InlineCompletionProvider,
+ pub provider: EditPredictionProvider,
/// A list of globs representing files that edit predictions should be disabled for.
+ /// This list adds to a pre-existing, sensible default set of globs.
+ /// Any additional ones you add are combined with them.
pub disabled_globs: Vec<GlobMatcher>,
+ /// When to show edit predictions previews in buffer.
+ pub inline_preview: InlineCompletionPreviewMode,
+}
+
+/// The mode in which edit predictions should be displayed.
+#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum InlineCompletionPreviewMode {
+ /// Display inline when there are no language server completions available.
+ #[default]
+ Auto,
+ /// Display inline when holding modifier key (alt by default).
+ WhenHoldingModifier,
}
/// The settings for all languages.
@@ -224,7 +250,7 @@ pub struct AllLanguageSettingsContent {
pub features: Option<FeaturesContent>,
/// The edit prediction settings.
#[serde(default)]
- pub inline_completions: Option<InlineCompletionSettingsContent>,
+ pub edit_predictions: Option<InlineCompletionSettingsContent>,
/// The default language settings.
#[serde(flatten)]
pub defaults: LanguageSettingsContent,
@@ -323,11 +349,11 @@ pub struct LanguageSettingsContent {
#[serde(default)]
pub language_servers: Option<Vec<String>>,
/// Controls whether edit predictions are shown immediately (true)
- /// or manually by triggering `editor::ShowInlineCompletion` (false).
+ /// or manually by triggering `editor::ShowEditPrediction` (false).
///
/// Default: true
#[serde(default)]
- pub show_inline_completions: Option<bool>,
+ pub show_edit_predictions: Option<bool>,
/// Controls whether edit predictions are shown in the given language
/// scopes.
///
@@ -335,7 +361,7 @@ pub struct LanguageSettingsContent {
///
/// Default: []
#[serde(default)]
- pub inline_completions_disabled_in: Option<Vec<String>>,
+ pub edit_predictions_disabled_in: Option<Vec<String>>,
/// Whether to show tabs and spaces in the editor.
#[serde(default)]
pub show_whitespaces: Option<ShowWhitespaceSetting>,
@@ -404,8 +430,13 @@ pub struct LanguageSettingsContent {
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)]
pub struct InlineCompletionSettingsContent {
/// A list of globs representing files that edit predictions should be disabled for.
+ /// This list adds to a pre-existing, sensible default set of globs.
+ /// Any additional ones you add are combined with them.
#[serde(default)]
pub disabled_globs: Option<Vec<String>>,
+ /// When to show edit predictions previews in buffer.
+ #[serde(default)]
+ pub inline_preview: InlineCompletionPreviewMode,
}
/// The settings for enabling/disabling features.
@@ -415,7 +446,7 @@ pub struct FeaturesContent {
/// Whether the GitHub Copilot feature is enabled.
pub copilot: Option<bool>,
/// Determines which edit prediction provider to use.
- pub inline_completion_provider: Option<InlineCompletionProvider>,
+ pub edit_prediction_provider: Option<EditPredictionProvider>,
}
/// Controls the soft-wrapping behavior in the editor.
@@ -879,27 +910,21 @@ impl AllLanguageSettings {
/// Returns whether edit predictions are enabled for the given path.
pub fn inline_completions_enabled_for_path(&self, path: &Path) -> bool {
!self
- .inline_completions
+ .edit_predictions
.disabled_globs
.iter()
.any(|glob| glob.is_match(path))
}
/// Returns whether edit predictions are enabled for the given language and path.
- pub fn inline_completions_enabled(
- &self,
- language: Option<&Arc<Language>>,
- path: Option<&Path>,
- cx: &App,
- ) -> bool {
- if let Some(path) = path {
- if !self.inline_completions_enabled_for_path(path) {
- return false;
- }
- }
-
+ pub fn show_inline_completions(&self, language: Option<&Arc<Language>>, cx: &App) -> bool {
self.language(None, language.map(|l| l.name()).as_ref(), cx)
- .show_inline_completions
+ .show_edit_predictions
+ }
+
+ /// Returns the edit predictions preview mode for the given language and path.
+ pub fn inline_completions_preview_mode(&self) -> InlineCompletionPreviewMode {
+ self.edit_predictions.inline_preview
}
}
@@ -994,12 +1019,18 @@ impl settings::Settings for AllLanguageSettings {
}
let mut copilot_enabled = default_value.features.as_ref().and_then(|f| f.copilot);
- let mut inline_completion_provider = default_value
+ let mut edit_prediction_provider = default_value
.features
.as_ref()
- .and_then(|f| f.inline_completion_provider);
+ .and_then(|f| f.edit_prediction_provider);
+ let mut inline_completions_preview = default_value
+ .edit_predictions
+ .as_ref()
+ .map(|inline_completions| inline_completions.inline_preview)
+ .ok_or_else(Self::missing_default)?;
+
let mut completion_globs: HashSet<&String> = default_value
- .inline_completions
+ .edit_predictions
.as_ref()
.and_then(|c| c.disabled_globs.as_ref())
.map(|globs| globs.iter().collect())
@@ -1024,16 +1055,17 @@ impl settings::Settings for AllLanguageSettings {
if let Some(provider) = user_settings
.features
.as_ref()
- .and_then(|f| f.inline_completion_provider)
+ .and_then(|f| f.edit_prediction_provider)
{
- inline_completion_provider = Some(provider);
+ edit_prediction_provider = Some(provider);
}
- if let Some(globs) = user_settings
- .inline_completions
- .as_ref()
- .and_then(|f| f.disabled_globs.as_ref())
- {
- completion_globs.extend(globs.iter());
+
+ if let Some(inline_completions) = user_settings.edit_predictions.as_ref() {
+ inline_completions_preview = inline_completions.inline_preview;
+
+ if let Some(disabled_globs) = inline_completions.disabled_globs.as_ref() {
+ completion_globs.extend(disabled_globs.iter());
+ }
}
// A user's global settings override the default global settings and
@@ -1074,18 +1106,19 @@ impl settings::Settings for AllLanguageSettings {
}
Ok(Self {
- inline_completions: InlineCompletionSettings {
- provider: if let Some(provider) = inline_completion_provider {
+ edit_predictions: EditPredictionSettings {
+ provider: if let Some(provider) = edit_prediction_provider {
provider
} else if copilot_enabled.unwrap_or(true) {
- InlineCompletionProvider::Copilot
+ EditPredictionProvider::Copilot
} else {
- InlineCompletionProvider::None
+ EditPredictionProvider::None
},
disabled_globs: completion_globs
.iter()
.filter_map(|g| Some(globset::Glob::new(g).ok()?.compile_matcher()))
.collect(),
+ inline_preview: inline_completions_preview,
},
defaults,
languages,
@@ -1190,12 +1223,12 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent
);
merge(&mut settings.language_servers, src.language_servers.clone());
merge(
- &mut settings.show_inline_completions,
- src.show_inline_completions,
+ &mut settings.show_edit_predictions,
+ src.show_edit_predictions,
);
merge(
- &mut settings.inline_completions_disabled_in,
- src.inline_completions_disabled_in.clone(),
+ &mut settings.edit_predictions_disabled_in,
+ src.edit_predictions_disabled_in.clone(),
);
merge(&mut settings.show_whitespaces, src.show_whitespaces);
merge(
@@ -213,7 +213,7 @@ pub fn serialize_diagnostics<'a>(
group_id: entry.diagnostic.group_id as u64,
is_primary: entry.diagnostic.is_primary,
is_valid: true,
- code: entry.diagnostic.code.clone(),
+ code: entry.diagnostic.code.as_ref().map(|s| s.to_string()),
is_disk_based: entry.diagnostic.is_disk_based,
is_unnecessary: entry.diagnostic.is_unnecessary,
data: entry.diagnostic.data.as_ref().map(|data| data.to_string()),
@@ -419,7 +419,7 @@ pub fn deserialize_diagnostics(
},
message: diagnostic.message,
group_id: diagnostic.group_id as usize,
- code: diagnostic.code,
+ code: diagnostic.code.map(lsp::NumberOrString::from_string),
is_primary: diagnostic.is_primary,
is_disk_based: diagnostic.is_disk_based,
is_unnecessary: diagnostic.is_unnecessary,
@@ -20,16 +20,16 @@ anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
base64.workspace = true
collections.workspace = true
+deepseek = { workspace = true, features = ["schemars"] }
futures.workspace = true
google_ai = { workspace = true, features = ["schemars"] }
gpui.workspace = true
http_client.workspace = true
image.workspace = true
+lmstudio = { workspace = true, features = ["schemars"] }
log.workspace = true
ollama = { workspace = true, features = ["schemars"] }
open_ai = { workspace = true, features = ["schemars"] }
-lmstudio = { workspace = true, features = ["schemars"] }
-deepseek = { workspace = true, features = ["schemars"] }
parking_lot.workspace = true
proto.workspace = true
schemars.workspace = true
@@ -90,7 +90,10 @@ impl CloudModel {
Self::Google(model) => match model {
google_ai::Model::Gemini15Pro
| google_ai::Model::Gemini15Flash
+ | google_ai::Model::Gemini20Pro
| google_ai::Model::Gemini20Flash
+ | google_ai::Model::Gemini20FlashThinking
+ | google_ai::Model::Gemini20FlashLite
| google_ai::Model::Custom { .. } => {
LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
}
@@ -17,6 +17,7 @@ anyhow.workspace = true
client.workspace = true
collections.workspace = true
copilot = { workspace = true, features = ["schemars"] }
+deepseek = { workspace = true, features = ["schemars"] }
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
@@ -25,11 +26,10 @@ google_ai = { workspace = true, features = ["schemars"] }
gpui.workspace = true
http_client.workspace = true
language_model.workspace = true
+lmstudio = { workspace = true, features = ["schemars"] }
menu.workspace = true
ollama = { workspace = true, features = ["schemars"] }
-lmstudio = { workspace = true, features = ["schemars"] }
open_ai = { workspace = true, features = ["schemars"] }
-deepseek = { workspace = true, features = ["schemars"] }
project.workspace = true
proto.workspace = true
schemars.workspace = true
@@ -11,8 +11,7 @@ use ui::{
Window,
};
use ui::{Button, ButtonStyle};
-use workspace::Item;
-use workspace::Workspace;
+use workspace::{Item, SplitDirection, Workspace};
actions!(debug, [OpenKeyContextView]);
@@ -20,7 +19,12 @@ pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
workspace.register_action(|workspace, _: &OpenKeyContextView, window, cx| {
let key_context_view = cx.new(|cx| KeyContextView::new(window, cx));
- workspace.add_item_to_active_pane(Box::new(key_context_view), None, true, window, cx)
+ workspace.split_item(
+ SplitDirection::Right,
+ Box::new(key_context_view),
+ window,
+ cx,
+ )
});
})
.detach();
@@ -11,6 +11,7 @@ use lsp_log::LogKind;
use project::{FakeFs, Project};
use serde_json::json;
use settings::SettingsStore;
+use util::path;
#[gpui::test]
async fn test_lsp_logs(cx: &mut TestAppContext) {
@@ -22,7 +23,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/the-root",
+ path!("/the-root"),
json!({
"test.rs": "",
"package.json": "",
@@ -30,7 +31,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) {
)
.await;
- let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -57,7 +58,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) {
let _rust_buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/the-root/test.rs"), cx)
})
.await
.unwrap();
@@ -13,11 +13,13 @@ test-support = [
"load-grammars"
]
load-grammars = [
+ "tree-sitter",
"tree-sitter-bash",
"tree-sitter-c",
"tree-sitter-cpp",
"tree-sitter-css",
"tree-sitter-diff",
+ "tree-sitter-gitcommit",
"tree-sitter-go",
"tree-sitter-go-mod",
"tree-sitter-gowork",
@@ -29,7 +31,6 @@ load-grammars = [
"tree-sitter-rust",
"tree-sitter-typescript",
"tree-sitter-yaml",
- "tree-sitter",
]
[dependencies]
@@ -46,12 +47,12 @@ log.workspace = true
lsp.workspace = true
node_runtime.workspace = true
paths.workspace = true
-pet.workspace = true
-pet-fs.workspace = true
-pet-core.workspace = true
pet-conda.workspace = true
+pet-core.workspace = true
+pet-fs.workspace = true
pet-poetry.workspace = true
pet-reporter.workspace = true
+pet.workspace = true
project.workspace = true
regex.workspace = true
rope.workspace = true
@@ -69,6 +70,7 @@ tree-sitter-c = { workspace = true, optional = true }
tree-sitter-cpp = { workspace = true, optional = true }
tree-sitter-css = { workspace = true, optional = true }
tree-sitter-diff = { workspace = true, optional = true }
+tree-sitter-gitcommit = { workspace = true, optional = true }
tree-sitter-go = { workspace = true, optional = true }
tree-sitter-go-mod = { workspace = true, optional = true }
tree-sitter-gowork = { workspace = true, optional = true }
@@ -83,15 +85,15 @@ tree-sitter-yaml = { workspace = true, optional = true }
util.workspace = true
[dev-dependencies]
-tree-sitter.workspace = true
+pretty_assertions.workspace = true
text.workspace = true
theme = { workspace = true, features = ["test-support"] }
-unindent.workspace = true
-workspace = { workspace = true, features = ["test-support"] }
-tree-sitter-typescript.workspace = true
-tree-sitter-python.workspace = true
-tree-sitter-go.workspace = true
+tree-sitter-bash.workspace = true
tree-sitter-c.workspace = true
tree-sitter-css.workspace = true
-tree-sitter-bash.workspace = true
-pretty_assertions.workspace = true
+tree-sitter-go.workspace = true
+tree-sitter-python.workspace = true
+tree-sitter-typescript.workspace = true
+tree-sitter.workspace = true
+unindent.workspace = true
+workspace = { workspace = true, features = ["test-support"] }
@@ -1,7 +1,8 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
-use language::{LspAdapter, LspAdapterDelegate};
+use gpui::AsyncApp;
+use language::{LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
use lsp::{LanguageServerBinary, LanguageServerName};
use node_runtime::NodeRuntime;
use project::Fs;
@@ -39,6 +40,24 @@ impl LspAdapter for CssLspAdapter {
LanguageServerName("vscode-css-language-server".into())
}
+ async fn check_if_user_installed(
+ &self,
+ delegate: &dyn LspAdapterDelegate,
+ _: Arc<dyn LanguageToolchainStore>,
+ _: &AsyncApp,
+ ) -> Option<LanguageServerBinary> {
+ let path = delegate
+ .which("vscode-css-language-server".as_ref())
+ .await?;
+ let env = delegate.shell_env().await;
+
+ Some(LanguageServerBinary {
+ path,
+ env: Some(env),
+ arguments: vec!["--stdio".into()],
+ })
+ }
+
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
@@ -0,0 +1,18 @@
+name = "Git Commit"
+grammar = "git_commit"
+path_suffixes = [
+ "TAG_EDITMSG",
+ "MERGE_MSG",
+ "COMMIT_EDITMSG",
+ "NOTES_EDITMSG",
+ "EDIT_DESCRIPTION",
+]
+line_comments = ["#"]
+brackets = [
+ { start = "(", end = ")", close = true, newline = false },
+ { start = "`", end = "`", close = true, newline = false },
+ { start = "\"", end = "\"", close = true, newline = false },
+ { start = "'", end = "'", close = true, newline = false },
+ { start = "{", end = "}", close = true, newline = false },
+ { start = "[", end = "]", close = true, newline = false },
+]
@@ -0,0 +1,18 @@
+(subject) @markup.heading
+(path) @string.special.path
+(branch) @string.special.symbol
+(commit) @constant
+(item) @markup.link.url
+(header) @tag
+
+(change kind: "new file" @diff.plus)
+(change kind: "deleted" @diff.minus)
+(change kind: "modified" @diff.delta)
+(change kind: "renamed" @diff.delta.moved)
+
+(trailer
+ key: (trailer_key) @variable.other.member
+ value: (trailer_value) @string)
+
+[":" "=" "->" (scissors)] @punctuation.delimiter
+(comment) @comment
@@ -0,0 +1,5 @@
+((scissors) @content
+ (#set! "language" "diff"))
+
+((rebase_command) @content
+ (#set! "language" "git_rebase"))
@@ -149,6 +149,24 @@ impl LspAdapter for JsonLspAdapter {
LanguageServerName("json-language-server".into())
}
+ async fn check_if_user_installed(
+ &self,
+ delegate: &dyn LspAdapterDelegate,
+ _: Arc<dyn LanguageToolchainStore>,
+ _: &AsyncApp,
+ ) -> Option<LanguageServerBinary> {
+ let path = delegate
+ .which("vscode-json-language-server".as_ref())
+ .await?;
+ let env = delegate.shell_env().await;
+
+ Some(LanguageServerBinary {
+ path,
+ env: Some(env),
+ arguments: vec!["--stdio".into()],
+ })
+ }
+
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
@@ -2,7 +2,6 @@ use anyhow::Context as _;
use gpui::{App, UpdateGlobal};
use json::json_task_context;
pub use language::*;
-use lsp::LanguageServerName;
use node_runtime::NodeRuntime;
use python::{PythonContextProvider, PythonToolchainProvider};
use rust_embed::RustEmbed;
@@ -31,6 +30,25 @@ mod yaml;
#[exclude = "*.rs"]
struct LanguageDir;
+/// A shared grammar for plain text, exposed for reuse by downstream crates.
+#[cfg(feature = "tree-sitter-gitcommit")]
+pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock<Arc<Language>> =
+ std::sync::LazyLock::new(|| {
+ Arc::new(Language::new(
+ LanguageConfig {
+ name: "Git Commit".into(),
+ soft_wrap: Some(language::language_settings::SoftWrap::EditorWidth),
+ matcher: LanguageMatcher {
+ path_suffixes: vec!["COMMIT_EDITMSG".to_owned()],
+ first_line_pattern: None,
+ },
+ line_comments: vec![Arc::from("#")],
+ ..LanguageConfig::default()
+ },
+ Some(tree_sitter_gitcommit::LANGUAGE.into()),
+ ))
+ });
+
pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mut App) {
#[cfg(feature = "load-grammars")]
languages.register_native_grammars([
@@ -53,6 +71,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
("tsx", tree_sitter_typescript::LANGUAGE_TSX),
("typescript", tree_sitter_typescript::LANGUAGE_TYPESCRIPT),
("yaml", tree_sitter_yaml::LANGUAGE),
+ ("gitcommit", tree_sitter_gitcommit::LANGUAGE),
]);
macro_rules! language {
@@ -83,19 +83,28 @@ impl LspAdapter for PythonLspAdapter {
_: Arc<dyn LanguageToolchainStore>,
_: &AsyncApp,
) -> Option<LanguageServerBinary> {
- let node = delegate.which("node".as_ref()).await?;
- let (node_modules_path, _) = delegate
- .npm_package_installed_version(Self::SERVER_NAME.as_ref())
- .await
- .log_err()??;
+ if let Some(pyright_bin) = delegate.which(Self::SERVER_NAME.as_ref()).await {
+ let env = delegate.shell_env().await;
+ Some(LanguageServerBinary {
+ path: pyright_bin,
+ env: Some(env),
+ arguments: vec!["--stdio".into()],
+ })
+ } else {
+ let node = delegate.which("node".as_ref()).await?;
+ let (node_modules_path, _) = delegate
+ .npm_package_installed_version(Self::SERVER_NAME.as_ref())
+ .await
+ .log_err()??;
- let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH);
+ let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH);
- Some(LanguageServerBinary {
- path: node,
- env: None,
- arguments: server_binary_arguments(&path),
- })
+ Some(LanguageServerBinary {
+ path: node,
+ env: None,
+ arguments: server_binary_arguments(&path),
+ })
+ }
}
async fn fetch_latest_server_version(
@@ -791,19 +800,28 @@ impl LspAdapter for PyLspAdapter {
toolchains: Arc<dyn LanguageToolchainStore>,
cx: &AsyncApp,
) -> Option<LanguageServerBinary> {
- let venv = toolchains
- .active_toolchain(
- delegate.worktree_id(),
- LanguageName::new("Python"),
- &mut cx.clone(),
- )
- .await?;
- let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp");
- pylsp_path.exists().then(|| LanguageServerBinary {
- path: venv.path.to_string().into(),
- arguments: vec![pylsp_path.into()],
- env: None,
- })
+ if let Some(pylsp_bin) = delegate.which(Self::SERVER_NAME.as_ref()).await {
+ let env = delegate.shell_env().await;
+ Some(LanguageServerBinary {
+ path: pylsp_bin,
+ env: Some(env),
+ arguments: vec![],
+ })
+ } else {
+ let venv = toolchains
+ .active_toolchain(
+ delegate.worktree_id(),
+ LanguageName::new("Python"),
+ &mut cx.clone(),
+ )
+ .await?;
+ let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp");
+ pylsp_path.exists().then(|| LanguageServerBinary {
+ path: venv.path.to_string().into(),
+ arguments: vec![pylsp_path.into()],
+ env: None,
+ })
+ }
}
async fn fetch_latest_server_version(
@@ -818,11 +818,12 @@ mod tests {
use lsp::CompletionItemLabelDetails;
use settings::SettingsStore;
use theme::SyntaxTheme;
+ use util::path;
#[gpui::test]
async fn test_process_rust_diagnostics() {
let mut params = lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/a").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/a")).unwrap(),
version: None,
diagnostics: vec![
// no newlines
@@ -47,6 +47,22 @@ impl LspAdapter for TailwindLspAdapter {
Self::SERVER_NAME.clone()
}
+ async fn check_if_user_installed(
+ &self,
+ delegate: &dyn LspAdapterDelegate,
+ _: Arc<dyn LanguageToolchainStore>,
+ _: &AsyncApp,
+ ) -> Option<LanguageServerBinary> {
+ let path = delegate.which(Self::SERVER_NAME.as_ref()).await?;
+ let env = delegate.shell_env().await;
+
+ Some(LanguageServerBinary {
+ path,
+ env: Some(env),
+ arguments: vec!["--stdio".into()],
+ })
+ }
+
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
@@ -22,7 +22,7 @@ collections.workspace = true
futures.workspace = true
gpui.workspace = true
log.workspace = true
-lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "72357d6f6d212bdffba3b5ef4b31d8ca856058e7" }
+lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "1fff0dd12e2071c5667327394cfec163d2a466ab" }
parking_lot.workspace = true
postage.workspace = true
serde.workspace = true
@@ -724,6 +724,9 @@ impl LanguageServer {
}),
rename: Some(RenameClientCapabilities {
prepare_support: Some(true),
+ prepare_support_default_behavior: Some(
+ PrepareSupportDefaultBehavior::IDENTIFIER,
+ ),
..Default::default()
}),
hover: Some(HoverClientCapabilities {
@@ -83,7 +83,6 @@ pub fn main() {
selection.fade_out(0.7);
selection
},
- break_style: Default::default(),
heading: Default::default(),
};
let markdown = cx.new(|cx| {
@@ -28,7 +28,6 @@ pub struct MarkdownStyle {
pub block_quote_border_color: Hsla,
pub syntax: Arc<SyntaxTheme>,
pub selection_background_color: Hsla,
- pub break_style: StyleRefinement,
pub heading: StyleRefinement,
}
@@ -44,11 +43,11 @@ impl Default for MarkdownStyle {
block_quote_border_color: Default::default(),
syntax: Arc::new(SyntaxTheme::default()),
selection_background_color: Default::default(),
- break_style: Default::default(),
heading: Default::default(),
}
}
}
+
pub struct Markdown {
source: String,
selection: Selection,
@@ -751,8 +750,8 @@ impl Element for MarkdownElement {
}
_ => log::error!("unsupported markdown tag end: {:?}", tag),
},
- MarkdownEvent::Text => {
- builder.push_text(&parsed_markdown.source[range.clone()], range.start);
+ MarkdownEvent::Text(parsed) => {
+ builder.push_text(parsed, range.start);
}
MarkdownEvent::Code => {
builder.push_text_style(self.style.inline_code.clone());
@@ -777,12 +776,7 @@ impl Element for MarkdownElement {
builder.pop_div()
}
MarkdownEvent::SoftBreak => builder.push_text(" ", range.start),
- MarkdownEvent::HardBreak => {
- let mut d = div().py_3();
- d.style().refine(&self.style.break_style);
- builder.push_div(d, range, markdown_end);
- builder.pop_div()
- }
+ MarkdownEvent::HardBreak => builder.push_text("\n", range.start),
_ => log::error!("unsupported markdown event {:?}", event),
}
}
@@ -37,9 +37,10 @@ pub fn parse_markdown(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
}
events.push((range, MarkdownEvent::End(tag)));
}
- pulldown_cmark::Event::Text(_) => {
+ pulldown_cmark::Event::Text(parsed) => {
// Automatically detect links in text if we're not already within a markdown
// link.
+ let mut parsed = parsed.as_ref();
if !within_link {
let mut finder = LinkFinder::new();
finder.kinds(&[linkify::LinkKind::Url]);
@@ -49,7 +50,12 @@ pub fn parse_markdown(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
text_range.start + link.start()..text_range.start + link.end();
if link_range.start > range.start {
- events.push((range.start..link_range.start, MarkdownEvent::Text));
+ let (text, tail) = parsed.split_at(link_range.start - range.start);
+ events.push((
+ range.start..link_range.start,
+ MarkdownEvent::Text(SharedString::new(text)),
+ ));
+ parsed = tail;
}
events.push((
@@ -61,15 +67,20 @@ pub fn parse_markdown(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
id: SharedString::default(),
}),
));
- events.push((link_range.clone(), MarkdownEvent::Text));
+
+ let (link_text, tail) = parsed.split_at(link_range.end - link_range.start);
+ events.push((
+ link_range.clone(),
+ MarkdownEvent::Text(SharedString::new(link_text)),
+ ));
events.push((link_range.clone(), MarkdownEvent::End(MarkdownTagEnd::Link)));
range.start = link_range.end;
+ parsed = tail;
}
}
-
if range.start < range.end {
- events.push((range, MarkdownEvent::Text));
+ events.push((range, MarkdownEvent::Text(SharedString::new(parsed))));
}
}
pulldown_cmark::Event::Code(_) => {
@@ -94,7 +105,7 @@ pub fn parse_markdown(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
events
}
-pub fn parse_links_only(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
+pub fn parse_links_only(mut text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
let mut events = Vec::new();
let mut finder = LinkFinder::new();
finder.kinds(&[linkify::LinkKind::Url]);
@@ -106,9 +117,15 @@ pub fn parse_links_only(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
let link_range = link.start()..link.end();
if link_range.start > text_range.start {
- events.push((text_range.start..link_range.start, MarkdownEvent::Text));
+ let (head, tail) = text.split_at(link_range.start - text_range.start);
+ events.push((
+ text_range.start..link_range.start,
+ MarkdownEvent::Text(SharedString::new(head)),
+ ));
+ text = tail;
}
+ let (link_text, tail) = text.split_at(link_range.end - link_range.start);
events.push((
link_range.clone(),
MarkdownEvent::Start(MarkdownTag::Link {
@@ -118,14 +135,18 @@ pub fn parse_links_only(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
id: SharedString::default(),
}),
));
- events.push((link_range.clone(), MarkdownEvent::Text));
+ events.push((
+ link_range.clone(),
+ MarkdownEvent::Text(SharedString::new(link_text)),
+ ));
events.push((link_range.clone(), MarkdownEvent::End(MarkdownTagEnd::Link)));
text_range.start = link_range.end;
+ text = tail;
}
if text_range.end > text_range.start {
- events.push((text_range, MarkdownEvent::Text));
+ events.push((text_range, MarkdownEvent::Text(SharedString::new(text))));
}
events
@@ -142,7 +163,7 @@ pub enum MarkdownEvent {
/// End of a tagged element.
End(MarkdownTagEnd),
/// A text node.
- Text,
+ Text(SharedString),
/// An inline code node.
Code,
/// An HTML node.
@@ -0,0 +1,22 @@
+[package]
+name = "migrator"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/migrator.rs"
+doctest = false
+
+[dependencies]
+collections.workspace = true
+convert_case.workspace = true
+tree-sitter-json.workspace = true
+tree-sitter.workspace = true
+
+[dev-dependencies]
+pretty_assertions.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,863 @@
+use collections::HashMap;
+use convert_case::{Case, Casing};
+use std::{cmp::Reverse, ops::Range, sync::LazyLock};
+use tree_sitter::{Query, QueryMatch};
+
+fn migrate(text: &str, patterns: MigrationPatterns, query: &Query) -> Option<String> {
+ let mut parser = tree_sitter::Parser::new();
+ parser
+ .set_language(&tree_sitter_json::LANGUAGE.into())
+ .unwrap();
+ let syntax_tree = parser.parse(&text, None).unwrap();
+
+ let mut cursor = tree_sitter::QueryCursor::new();
+ let matches = cursor.matches(query, syntax_tree.root_node(), text.as_bytes());
+
+ let mut edits = vec![];
+ for mat in matches {
+ if let Some((_, callback)) = patterns.get(mat.pattern_index) {
+ edits.extend(callback(&text, &mat, query));
+ }
+ }
+
+ edits.sort_by_key(|(range, _)| (range.start, Reverse(range.end)));
+ edits.dedup_by(|(range_b, _), (range_a, _)| {
+ range_a.contains(&range_b.start) || range_a.contains(&range_b.end)
+ });
+
+ if edits.is_empty() {
+ None
+ } else {
+ let mut text = text.to_string();
+ for (range, replacement) in edits.into_iter().rev() {
+ text.replace_range(range, &replacement);
+ }
+ Some(text)
+ }
+}
+
+pub fn migrate_keymap(text: &str) -> Option<String> {
+ let transformed_text = migrate(
+ text,
+ KEYMAP_MIGRATION_TRANSFORMATION_PATTERNS,
+ &KEYMAP_MIGRATION_TRANSFORMATION_QUERY,
+ );
+ let replacement_text = migrate(
+ &transformed_text.as_ref().unwrap_or(&text.to_string()),
+ KEYMAP_MIGRATION_REPLACEMENT_PATTERNS,
+ &KEYMAP_MIGRATION_REPLACEMENT_QUERY,
+ );
+ replacement_text.or(transformed_text)
+}
+
+pub fn migrate_settings(text: &str) -> Option<String> {
+ migrate(
+ &text,
+ SETTINGS_MIGRATION_PATTERNS,
+ &SETTINGS_MIGRATION_QUERY,
+ )
+}
+
+type MigrationPatterns = &'static [(
+ &'static str,
+ fn(&str, &QueryMatch, &Query) -> Option<(Range<usize>, String)>,
+)];
+
+static KEYMAP_MIGRATION_TRANSFORMATION_PATTERNS: MigrationPatterns = &[
+ (ACTION_ARRAY_PATTERN, replace_array_with_single_string),
+ (
+ ACTION_ARGUMENT_OBJECT_PATTERN,
+ replace_action_argument_object_with_single_value,
+ ),
+ (ACTION_STRING_PATTERN, rename_string_action),
+ (CONTEXT_PREDICATE_PATTERN, rename_context_key),
+];
+
+static KEYMAP_MIGRATION_TRANSFORMATION_QUERY: LazyLock<Query> = LazyLock::new(|| {
+ Query::new(
+ &tree_sitter_json::LANGUAGE.into(),
+ &KEYMAP_MIGRATION_TRANSFORMATION_PATTERNS
+ .iter()
+ .map(|pattern| pattern.0)
+ .collect::<String>(),
+ )
+ .unwrap()
+});
+
+const ACTION_ARRAY_PATTERN: &str = r#"(document
+ (array
+ (object
+ (pair
+ key: (string (string_content) @name)
+ value: (
+ (object
+ (pair
+ key: (string)
+ value: ((array
+ . (string (string_content) @action_name)
+ . (string (string_content) @argument)
+ .)) @array
+ )
+ )
+ )
+ )
+ )
+ )
+ (#eq? @name "bindings")
+)"#;
+
+fn replace_array_with_single_string(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let array_ix = query.capture_index_for_name("array").unwrap();
+ let action_name_ix = query.capture_index_for_name("action_name").unwrap();
+ let argument_ix = query.capture_index_for_name("argument").unwrap();
+
+ let action_name = contents.get(
+ mat.nodes_for_capture_index(action_name_ix)
+ .next()?
+ .byte_range(),
+ )?;
+ let argument = contents.get(
+ mat.nodes_for_capture_index(argument_ix)
+ .next()?
+ .byte_range(),
+ )?;
+
+ let replacement = TRANSFORM_ARRAY.get(&(action_name, argument))?;
+ let replacement_as_string = format!("\"{replacement}\"");
+ let range_to_replace = mat.nodes_for_capture_index(array_ix).next()?.byte_range();
+
+ Some((range_to_replace, replacement_as_string))
+}
+
+#[rustfmt::skip]
+static TRANSFORM_ARRAY: LazyLock<HashMap<(&str, &str), &str>> = LazyLock::new(|| {
+ HashMap::from_iter([
+ // activate
+ (("workspace::ActivatePaneInDirection", "Up"), "workspace::ActivatePaneUp"),
+ (("workspace::ActivatePaneInDirection", "Down"), "workspace::ActivatePaneDown"),
+ (("workspace::ActivatePaneInDirection", "Left"), "workspace::ActivatePaneLeft"),
+ (("workspace::ActivatePaneInDirection", "Right"), "workspace::ActivatePaneRight"),
+ // swap
+ (("workspace::SwapPaneInDirection", "Up"), "workspace::SwapPaneUp"),
+ (("workspace::SwapPaneInDirection", "Down"), "workspace::SwapPaneDown"),
+ (("workspace::SwapPaneInDirection", "Left"), "workspace::SwapPaneLeft"),
+ (("workspace::SwapPaneInDirection", "Right"), "workspace::SwapPaneRight"),
+ // menu
+ (("app_menu::NavigateApplicationMenuInDirection", "Left"), "app_menu::ActivateMenuLeft"),
+ (("app_menu::NavigateApplicationMenuInDirection", "Right"), "app_menu::ActivateMenuRight"),
+ // vim push
+ (("vim::PushOperator", "Change"), "vim::PushChange"),
+ (("vim::PushOperator", "Delete"), "vim::PushDelete"),
+ (("vim::PushOperator", "Yank"), "vim::PushYank"),
+ (("vim::PushOperator", "Replace"), "vim::PushReplace"),
+ (("vim::PushOperator", "DeleteSurrounds"), "vim::PushDeleteSurrounds"),
+ (("vim::PushOperator", "Mark"), "vim::PushMark"),
+ (("vim::PushOperator", "Indent"), "vim::PushIndent"),
+ (("vim::PushOperator", "Outdent"), "vim::PushOutdent"),
+ (("vim::PushOperator", "AutoIndent"), "vim::PushAutoIndent"),
+ (("vim::PushOperator", "Rewrap"), "vim::PushRewrap"),
+ (("vim::PushOperator", "ShellCommand"), "vim::PushShellCommand"),
+ (("vim::PushOperator", "Lowercase"), "vim::PushLowercase"),
+ (("vim::PushOperator", "Uppercase"), "vim::PushUppercase"),
+ (("vim::PushOperator", "OppositeCase"), "vim::PushOppositeCase"),
+ (("vim::PushOperator", "Register"), "vim::PushRegister"),
+ (("vim::PushOperator", "RecordRegister"), "vim::PushRecordRegister"),
+ (("vim::PushOperator", "ReplayRegister"), "vim::PushReplayRegister"),
+ (("vim::PushOperator", "ReplaceWithRegister"), "vim::PushReplaceWithRegister"),
+ (("vim::PushOperator", "ToggleComments"), "vim::PushToggleComments"),
+ // vim switch
+ (("vim::SwitchMode", "Normal"), "vim::SwitchToNormalMode"),
+ (("vim::SwitchMode", "Insert"), "vim::SwitchToInsertMode"),
+ (("vim::SwitchMode", "Replace"), "vim::SwitchToReplaceMode"),
+ (("vim::SwitchMode", "Visual"), "vim::SwitchToVisualMode"),
+ (("vim::SwitchMode", "VisualLine"), "vim::SwitchToVisualLineMode"),
+ (("vim::SwitchMode", "VisualBlock"), "vim::SwitchToVisualBlockMode"),
+ (("vim::SwitchMode", "HelixNormal"), "vim::SwitchToHelixNormalMode"),
+ // vim resize
+ (("vim::ResizePane", "Widen"), "vim::ResizePaneRight"),
+ (("vim::ResizePane", "Narrow"), "vim::ResizePaneLeft"),
+ (("vim::ResizePane", "Shorten"), "vim::ResizePaneDown"),
+ (("vim::ResizePane", "Lengthen"), "vim::ResizePaneUp"),
+ ])
+});
+
+const ACTION_ARGUMENT_OBJECT_PATTERN: &str = r#"(document
+ (array
+ (object
+ (pair
+ key: (string (string_content) @name)
+ value: (
+ (object
+ (pair
+ key: (string)
+ value: ((array
+ . (string (string_content) @action_name)
+ . (object
+ (pair
+ key: (string (string_content) @action_key)
+ value: (_) @argument))
+ . ) @array
+ ))
+ )
+ )
+ )
+ )
+ )
+ (#eq? @name "bindings")
+)"#;
+
+/// [ "editor::FoldAtLevel", { "level": 1 } ] -> [ "editor::FoldAtLevel", 1 ]
+fn replace_action_argument_object_with_single_value(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let array_ix = query.capture_index_for_name("array").unwrap();
+ let action_name_ix = query.capture_index_for_name("action_name").unwrap();
+ let action_key_ix = query.capture_index_for_name("action_key").unwrap();
+ let argument_ix = query.capture_index_for_name("argument").unwrap();
+
+ let action_name = contents.get(
+ mat.nodes_for_capture_index(action_name_ix)
+ .next()?
+ .byte_range(),
+ )?;
+ let action_key = contents.get(
+ mat.nodes_for_capture_index(action_key_ix)
+ .next()?
+ .byte_range(),
+ )?;
+ let argument = contents.get(
+ mat.nodes_for_capture_index(argument_ix)
+ .next()?
+ .byte_range(),
+ )?;
+
+ let new_action_name = UNWRAP_OBJECTS.get(&action_name)?.get(&action_key)?;
+
+ let range_to_replace = mat.nodes_for_capture_index(array_ix).next()?.byte_range();
+ let replacement = format!("[\"{}\", {}]", new_action_name, argument);
+ Some((range_to_replace, replacement))
+}
+
+// "ctrl-k ctrl-1": [ "editor::PushOperator", { "Object": {} } ] -> [ "editor::vim::PushObject", {} ]
+static UNWRAP_OBJECTS: LazyLock<HashMap<&str, HashMap<&str, &str>>> = LazyLock::new(|| {
+ HashMap::from_iter([
+ (
+ "editor::FoldAtLevel",
+ HashMap::from_iter([("level", "editor::FoldAtLevel")]),
+ ),
+ (
+ "vim::PushOperator",
+ HashMap::from_iter([
+ ("Object", "vim::PushObject"),
+ ("FindForward", "vim::PushFindForward"),
+ ("FindBackward", "vim::PushFindBackward"),
+ ("Sneak", "vim::PushSneak"),
+ ("SneakBackward", "vim::PushSneakBackward"),
+ ("AddSurrounds", "vim::PushAddSurrounds"),
+ ("ChangeSurrounds", "vim::PushChangeSurrounds"),
+ ("Jump", "vim::PushJump"),
+ ("Digraph", "vim::PushDigraph"),
+ ("Literal", "vim::PushLiteral"),
+ ]),
+ ),
+ ])
+});
+
+static KEYMAP_MIGRATION_REPLACEMENT_PATTERNS: MigrationPatterns = &[(
+ ACTION_ARGUMENT_SNAKE_CASE_PATTERN,
+ action_argument_snake_case,
+)];
+
+static KEYMAP_MIGRATION_REPLACEMENT_QUERY: LazyLock<Query> = LazyLock::new(|| {
+ Query::new(
+ &tree_sitter_json::LANGUAGE.into(),
+ &KEYMAP_MIGRATION_REPLACEMENT_PATTERNS
+ .iter()
+ .map(|pattern| pattern.0)
+ .collect::<String>(),
+ )
+ .unwrap()
+});
+
+const ACTION_STRING_PATTERN: &str = r#"(document
+ (array
+ (object
+ (pair
+ key: (string (string_content) @name)
+ value: (
+ (object
+ (pair
+ key: (string)
+ value: (string (string_content) @action_name)
+ )
+ )
+ )
+ )
+ )
+ )
+ (#eq? @name "bindings")
+)"#;
+
+fn rename_string_action(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let action_name_ix = query.capture_index_for_name("action_name").unwrap();
+ let action_name_range = mat
+ .nodes_for_capture_index(action_name_ix)
+ .next()?
+ .byte_range();
+ let action_name = contents.get(action_name_range.clone())?;
+ let new_action_name = STRING_REPLACE.get(&action_name)?;
+ Some((action_name_range, new_action_name.to_string()))
+}
+
+// "ctrl-k ctrl-1": "inline_completion::ToggleMenu" -> "edit_prediction::ToggleMenu"
+#[rustfmt::skip]
+static STRING_REPLACE: LazyLock<HashMap<&str, &str>> = LazyLock::new(|| {
+ HashMap::from_iter([
+ ("inline_completion::ToggleMenu", "edit_prediction::ToggleMenu"),
+ ("editor::NextInlineCompletion", "editor::NextEditPrediction"),
+ ("editor::PreviousInlineCompletion", "editor::PreviousEditPrediction"),
+ ("editor::AcceptPartialInlineCompletion", "editor::AcceptPartialEditPrediction"),
+ ("editor::ShowInlineCompletion", "editor::ShowEditPrediction"),
+ ("editor::AcceptInlineCompletion", "editor::AcceptEditPrediction"),
+ ("editor::ToggleInlineCompletions", "editor::ToggleEditPrediction"),
+ ])
+});
+
+const CONTEXT_PREDICATE_PATTERN: &str = r#"
+(array
+ (object
+ (pair
+ key: (string (string_content) @name)
+ value: (string (string_content) @context_predicate)
+ )
+ )
+)
+(#eq? @name "context")
+"#;
+
+fn rename_context_key(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let context_predicate_ix = query.capture_index_for_name("context_predicate").unwrap();
+ let context_predicate_range = mat
+ .nodes_for_capture_index(context_predicate_ix)
+ .next()?
+ .byte_range();
+ let old_predicate = contents.get(context_predicate_range.clone())?.to_string();
+ let mut new_predicate = old_predicate.to_string();
+ for (old_key, new_key) in CONTEXT_REPLACE.iter() {
+ new_predicate = new_predicate.replace(old_key, new_key);
+ }
+ if new_predicate != old_predicate {
+ Some((context_predicate_range, new_predicate.to_string()))
+ } else {
+ None
+ }
+}
+
+const ACTION_ARGUMENT_SNAKE_CASE_PATTERN: &str = r#"(document
+ (array
+ (object
+ (pair
+ key: (string (string_content) @name)
+ value: (
+ (object
+ (pair
+ key: (string)
+ value: ((array
+ . (string (string_content) @action_name)
+ . (object
+ (pair
+ key: (string (string_content) @argument_key)
+ value: (_) @argument_value))
+ . ) @array
+ ))
+ )
+ )
+ )
+ )
+ )
+ (#eq? @name "bindings")
+)"#;
+
+fn is_snake_case(text: &str) -> bool {
+ text == text.to_case(Case::Snake)
+}
+
+fn to_snake_case(text: &str) -> String {
+ text.to_case(Case::Snake)
+}
+
+/// [ "editor::FoldAtLevel", { "SomeKey": "Value" } ] -> [ "editor::FoldAtLevel", { "some_key" : "value" } ]
+fn action_argument_snake_case(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let array_ix = query.capture_index_for_name("array").unwrap();
+ let action_name_ix = query.capture_index_for_name("action_name").unwrap();
+ let argument_key_ix = query.capture_index_for_name("argument_key").unwrap();
+ let argument_value_ix = query.capture_index_for_name("argument_value").unwrap();
+ let action_name = contents.get(
+ mat.nodes_for_capture_index(action_name_ix)
+ .next()?
+ .byte_range(),
+ )?;
+
+ let argument_key = contents.get(
+ mat.nodes_for_capture_index(argument_key_ix)
+ .next()?
+ .byte_range(),
+ )?;
+
+ let argument_value_node = mat.nodes_for_capture_index(argument_value_ix).next()?;
+ let argument_value = contents.get(argument_value_node.byte_range())?;
+
+ let mut needs_replacement = false;
+ let mut new_key = argument_key.to_string();
+ if !is_snake_case(argument_key) {
+ new_key = to_snake_case(argument_key);
+ needs_replacement = true;
+ }
+
+ let mut new_value = argument_value.to_string();
+ if argument_value_node.kind() == "string" {
+ let inner_value = argument_value.trim_matches('"');
+ if !is_snake_case(inner_value) {
+ new_value = format!("\"{}\"", to_snake_case(inner_value));
+ needs_replacement = true;
+ }
+ }
+
+ if !needs_replacement {
+ return None;
+ }
+
+ let range_to_replace = mat.nodes_for_capture_index(array_ix).next()?.byte_range();
+ let replacement = format!(
+ "[\"{}\", {{ \"{}\": {} }}]",
+ action_name, new_key, new_value
+ );
+
+ Some((range_to_replace, replacement))
+}
+
+// "context": "Editor && inline_completion && !showing_completions" -> "Editor && edit_prediction && !showing_completions"
+pub static CONTEXT_REPLACE: LazyLock<HashMap<&str, &str>> = LazyLock::new(|| {
+ HashMap::from_iter([
+ ("inline_completion", "edit_prediction"),
+ (
+ "inline_completion_requires_modifier",
+ "edit_prediction_requires_modifier",
+ ),
+ ])
+});
+
+static SETTINGS_MIGRATION_PATTERNS: MigrationPatterns = &[
+ (SETTINGS_STRING_REPLACE_QUERY, replace_setting_name),
+ (SETTINGS_REPLACE_NESTED_KEY, replace_setting_nested_key),
+ (
+ SETTINGS_REPLACE_IN_LANGUAGES_QUERY,
+ replace_setting_in_languages,
+ ),
+];
+
+static SETTINGS_MIGRATION_QUERY: LazyLock<Query> = LazyLock::new(|| {
+ Query::new(
+ &tree_sitter_json::LANGUAGE.into(),
+ &SETTINGS_MIGRATION_PATTERNS
+ .iter()
+ .map(|pattern| pattern.0)
+ .collect::<String>(),
+ )
+ .unwrap()
+});
+
+static SETTINGS_STRING_REPLACE_QUERY: &str = r#"(document
+ (object
+ (pair
+ key: (string (string_content) @name)
+ value: (_)
+ )
+ )
+)"#;
+
+fn replace_setting_name(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let setting_capture_ix = query.capture_index_for_name("name").unwrap();
+ let setting_name_range = mat
+ .nodes_for_capture_index(setting_capture_ix)
+ .next()?
+ .byte_range();
+ let setting_name = contents.get(setting_name_range.clone())?;
+ let new_setting_name = SETTINGS_STRING_REPLACE.get(&setting_name)?;
+ Some((setting_name_range, new_setting_name.to_string()))
+}
+
+#[rustfmt::skip]
+pub static SETTINGS_STRING_REPLACE: LazyLock<HashMap<&'static str, &'static str>> = LazyLock::new(|| {
+ HashMap::from_iter([
+ ("show_inline_completions_in_menu", "show_edit_predictions_in_menu"),
+ ("show_inline_completions", "show_edit_predictions"),
+ ("inline_completions_disabled_in", "edit_predictions_disabled_in"),
+ ("inline_completions", "edit_predictions")
+ ])
+});
+
+static SETTINGS_REPLACE_NESTED_KEY: &str = r#"
+(object
+ (pair
+ key: (string (string_content) @parent_key)
+ value: (object
+ (pair
+ key: (string (string_content) @setting_name)
+ value: (_) @value
+ )
+ )
+ )
+)
+"#;
+
+fn replace_setting_nested_key(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let parent_object_capture_ix = query.capture_index_for_name("parent_key").unwrap();
+ let parent_object_range = mat
+ .nodes_for_capture_index(parent_object_capture_ix)
+ .next()?
+ .byte_range();
+ let parent_object_name = contents.get(parent_object_range.clone())?;
+
+ let setting_name_ix = query.capture_index_for_name("setting_name").unwrap();
+ let setting_range = mat
+ .nodes_for_capture_index(setting_name_ix)
+ .next()?
+ .byte_range();
+ let setting_name = contents.get(setting_range.clone())?;
+
+ let new_setting_name = SETTINGS_NESTED_STRING_REPLACE
+ .get(&parent_object_name)?
+ .get(setting_name)?;
+
+ Some((setting_range, new_setting_name.to_string()))
+}
+
+// "features": {
+// "inline_completion_provider": "copilot"
+// },
+pub static SETTINGS_NESTED_STRING_REPLACE: LazyLock<
+ HashMap<&'static str, HashMap<&'static str, &'static str>>,
+> = LazyLock::new(|| {
+ HashMap::from_iter([(
+ "features",
+ HashMap::from_iter([("inline_completion_provider", "edit_prediction_provider")]),
+ )])
+});
+
+static SETTINGS_REPLACE_IN_LANGUAGES_QUERY: &str = r#"
+(object
+ (pair
+ key: (string (string_content) @languages)
+ value: (object
+ (pair
+ key: (string)
+ value: (object
+ (pair
+ key: (string (string_content) @setting_name)
+ value: (_) @value
+ )
+ )
+ ))
+ )
+)
+(#eq? @languages "languages")
+"#;
+
+fn replace_setting_in_languages(
+ contents: &str,
+ mat: &QueryMatch,
+ query: &Query,
+) -> Option<(Range<usize>, String)> {
+ let setting_capture_ix = query.capture_index_for_name("setting_name").unwrap();
+ let setting_name_range = mat
+ .nodes_for_capture_index(setting_capture_ix)
+ .next()?
+ .byte_range();
+ let setting_name = contents.get(setting_name_range.clone())?;
+ let new_setting_name = LANGUAGE_SETTINGS_REPLACE.get(&setting_name)?;
+
+ Some((setting_name_range, new_setting_name.to_string()))
+}
+
+#[rustfmt::skip]
+static LANGUAGE_SETTINGS_REPLACE: LazyLock<
+ HashMap<&'static str, &'static str>,
+> = LazyLock::new(|| {
+ HashMap::from_iter([
+ ("show_inline_completions", "show_edit_predictions"),
+ ("inline_completions_disabled_in", "edit_predictions_disabled_in"),
+ ])
+});
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn assert_migrate_keymap(input: &str, output: Option<&str>) {
+ let migrated = migrate_keymap(&input);
+ pretty_assertions::assert_eq!(migrated.as_deref(), output);
+ }
+
+ fn assert_migrate_settings(input: &str, output: Option<&str>) {
+ let migrated = migrate_settings(&input);
+ pretty_assertions::assert_eq!(migrated.as_deref(), output);
+ }
+
+ #[test]
+ fn test_replace_array_with_single_string() {
+ assert_migrate_keymap(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": ["workspace::ActivatePaneInDirection", "Up"]
+ }
+ }
+ ]
+ "#,
+ Some(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": "workspace::ActivatePaneUp"
+ }
+ }
+ ]
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_replace_action_argument_object_with_single_value() {
+ assert_migrate_keymap(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": ["editor::FoldAtLevel", { "level": 1 }]
+ }
+ }
+ ]
+ "#,
+ Some(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": ["editor::FoldAtLevel", 1]
+ }
+ }
+ ]
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_replace_action_argument_object_with_single_value_2() {
+ assert_migrate_keymap(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": ["vim::PushOperator", { "Object": { "some" : "value" } }]
+ }
+ }
+ ]
+ "#,
+ Some(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": ["vim::PushObject", { "some" : "value" }]
+ }
+ }
+ ]
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_rename_string_action() {
+ assert_migrate_keymap(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": "inline_completion::ToggleMenu"
+ }
+ }
+ ]
+ "#,
+ Some(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": "edit_prediction::ToggleMenu"
+ }
+ }
+ ]
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_rename_context_key() {
+ assert_migrate_keymap(
+ r#"
+ [
+ {
+ "context": "Editor && inline_completion && !showing_completions"
+ }
+ ]
+ "#,
+ Some(
+ r#"
+ [
+ {
+ "context": "Editor && edit_prediction && !showing_completions"
+ }
+ ]
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_action_argument_snake_case() {
+ // First performs transformations, then replacements
+ assert_migrate_keymap(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": ["vim::PushOperator", { "Object": { "SomeKey": "Value" } }],
+ "cmd-2": ["vim::SomeOtherAction", { "OtherKey": "Value" }],
+ "cmd-3": ["vim::SomeDifferentAction", { "OtherKey": true }],
+ "cmd-4": ["vim::OneMore", { "OtherKey": 4 }]
+ }
+ }
+ ]
+ "#,
+ Some(
+ r#"
+ [
+ {
+ "bindings": {
+ "cmd-1": ["vim::PushObject", { "some_key": "value" }],
+ "cmd-2": ["vim::SomeOtherAction", { "other_key": "value" }],
+ "cmd-3": ["vim::SomeDifferentAction", { "other_key": true }],
+ "cmd-4": ["vim::OneMore", { "other_key": 4 }]
+ }
+ }
+ ]
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_replace_setting_name() {
+ assert_migrate_settings(
+ r#"
+ {
+ "show_inline_completions_in_menu": true,
+ "show_inline_completions": true,
+ "inline_completions_disabled_in": ["string"],
+ "inline_completions": { "some" : "value" }
+ }
+ "#,
+ Some(
+ r#"
+ {
+ "show_edit_predictions_in_menu": true,
+ "show_edit_predictions": true,
+ "edit_predictions_disabled_in": ["string"],
+ "edit_predictions": { "some" : "value" }
+ }
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_nested_string_replace_for_settings() {
+ assert_migrate_settings(
+ r#"
+ {
+ "features": {
+ "inline_completion_provider": "zed"
+ },
+ }
+ "#,
+ Some(
+ r#"
+ {
+ "features": {
+ "edit_prediction_provider": "zed"
+ },
+ }
+ "#,
+ ),
+ )
+ }
+
+ #[test]
+ fn test_replace_settings_in_languages() {
+ assert_migrate_settings(
+ r#"
+ {
+ "languages": {
+ "Astro": {
+ "show_inline_completions": true
+ }
+ }
+ }
+ "#,
+ Some(
+ r#"
+ {
+ "languages": {
+ "Astro": {
+ "show_edit_predictions": true
+ }
+ }
+ }
+ "#,
+ ),
+ )
+ }
+}
@@ -14,9 +14,10 @@ doctest = false
[features]
test-support = [
- "text/test-support",
- "language/test-support",
+ "diff/test-support",
"gpui/test-support",
+ "language/test-support",
+ "text/test-support",
"util/test-support",
]
@@ -25,15 +26,14 @@ anyhow.workspace = true
clock.workspace = true
collections.workspace = true
ctor.workspace = true
+diff.workspace = true
env_logger.workspace = true
futures.workspace = true
-git.workspace = true
gpui.workspace = true
itertools.workspace = true
language.workspace = true
log.workspace = true
parking_lot.workspace = true
-project.workspace = true
rand.workspace = true
rope.workspace = true
smol.workspace = true
@@ -47,12 +47,13 @@ tree-sitter.workspace = true
util.workspace = true
[dev-dependencies]
+diff = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
+indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
+pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
settings = { workspace = true, features = ["test-support"] }
text = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }
-pretty_assertions.workspace = true
-indoc.workspace = true
@@ -70,15 +70,15 @@ impl Anchor {
return text_cmp;
}
if self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some() {
- if let Some(diff_base) = snapshot.diffs.get(&excerpt.buffer_id) {
- let self_anchor = self
- .diff_base_anchor
- .filter(|a| diff_base.base_text.can_resolve(a));
- let other_anchor = other
- .diff_base_anchor
- .filter(|a| diff_base.base_text.can_resolve(a));
+ if let Some(base_text) = snapshot
+ .diffs
+ .get(&excerpt.buffer_id)
+ .and_then(|diff| diff.base_text.as_ref())
+ {
+ let self_anchor = self.diff_base_anchor.filter(|a| base_text.can_resolve(a));
+ let other_anchor = other.diff_base_anchor.filter(|a| base_text.can_resolve(a));
return match (self_anchor, other_anchor) {
- (Some(a), Some(b)) => a.cmp(&b, &diff_base.base_text),
+ (Some(a), Some(b)) => a.cmp(&b, base_text),
(Some(_), None) => match other.text_anchor.bias {
Bias::Left => Ordering::Greater,
Bias::Right => Ordering::Less,
@@ -107,9 +107,13 @@ impl Anchor {
excerpt_id: self.excerpt_id,
text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
diff_base_anchor: self.diff_base_anchor.map(|a| {
- if let Some(base) = snapshot.diffs.get(&excerpt.buffer_id) {
- if a.buffer_id == Some(base.base_text.remote_id()) {
- return a.bias_left(&base.base_text);
+ if let Some(base_text) = snapshot
+ .diffs
+ .get(&excerpt.buffer_id)
+ .and_then(|diff| diff.base_text.as_ref())
+ {
+ if a.buffer_id == Some(base_text.remote_id()) {
+ return a.bias_left(base_text);
}
}
a
@@ -128,9 +132,13 @@ impl Anchor {
excerpt_id: self.excerpt_id,
text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
diff_base_anchor: self.diff_base_anchor.map(|a| {
- if let Some(base) = snapshot.diffs.get(&excerpt.buffer_id) {
- if a.buffer_id == Some(base.base_text.remote_id()) {
- return a.bias_right(&base.base_text);
+ if let Some(base_text) = snapshot
+ .diffs
+ .get(&excerpt.buffer_id)
+ .and_then(|diff| diff.base_text.as_ref())
+ {
+ if a.buffer_id == Some(base_text.remote_id()) {
+ return a.bias_right(&base_text);
}
}
a
@@ -9,8 +9,8 @@ pub use position::{TypedOffset, TypedPoint, TypedRow};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use collections::{BTreeMap, Bound, HashMap, HashSet};
+use diff::{BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffHunkStatus};
use futures::{channel::mpsc, SinkExt};
-use git::diff::DiffHunkStatus;
use gpui::{App, Context, Entity, EntityId, EventEmitter, Task};
use itertools::Itertools;
use language::{
@@ -21,14 +21,14 @@ use language::{
TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId, TreeSitterOptions,
Unclipped,
};
-use project::buffer_store::{BufferChangeSet, BufferChangeSetEvent};
+
use rope::DimensionPair;
use smallvec::SmallVec;
use smol::future::yield_now;
use std::{
any::type_name,
borrow::Cow,
- cell::{Ref, RefCell, RefMut},
+ cell::{Ref, RefCell},
cmp, fmt,
future::Future,
io,
@@ -67,8 +67,8 @@ pub struct MultiBuffer {
/// Contains the state of the buffers being edited
buffers: RefCell<HashMap<BufferId, BufferState>>,
// only used by consumers using `set_excerpts_for_buffer`
- buffers_by_path: BTreeMap<Arc<Path>, Vec<ExcerptId>>,
- diff_bases: HashMap<BufferId, ChangeSetState>,
+ buffers_by_path: BTreeMap<PathKey, Vec<ExcerptId>>,
+ diffs: HashMap<BufferId, DiffState>,
all_diff_hunks_expanded: bool,
subscriptions: Topic,
/// If true, the multi-buffer only contains a single [`Buffer`] and a single [`Excerpt`]
@@ -143,6 +143,15 @@ impl MultiBufferDiffHunk {
}
}
+#[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)]
+pub struct PathKey(String);
+
+impl PathKey {
+ pub fn namespaced(namespace: &str, path: &Path) -> Self {
+ Self(format!("{}/{}", namespace, path.to_string_lossy()))
+ }
+}
+
pub type MultiBufferPoint = Point;
type ExcerptOffset = TypedOffset<Excerpt>;
type ExcerptPoint = TypedPoint<Excerpt>;
@@ -206,18 +215,32 @@ struct BufferState {
_subscriptions: [gpui::Subscription; 2],
}
-struct ChangeSetState {
- change_set: Entity<BufferChangeSet>,
+struct DiffState {
+ diff: Entity<BufferDiff>,
_subscription: gpui::Subscription,
}
+impl DiffState {
+ fn new(diff: Entity<BufferDiff>, cx: &mut Context<MultiBuffer>) -> Self {
+ DiffState {
+ _subscription: cx.subscribe(&diff, |this, diff, event, cx| match event {
+ BufferDiffEvent::DiffChanged { changed_range } => {
+ this.buffer_diff_changed(diff, changed_range.clone(), cx)
+ }
+ BufferDiffEvent::LanguageChanged => this.buffer_diff_language_changed(diff, cx),
+ }),
+ diff,
+ }
+ }
+}
+
/// The contents of a [`MultiBuffer`] at a single point in time.
#[derive(Clone, Default)]
pub struct MultiBufferSnapshot {
singleton: bool,
excerpts: SumTree<Excerpt>,
excerpt_ids: SumTree<ExcerptIdMapping>,
- diffs: TreeMap<BufferId, DiffSnapshot>,
+ diffs: TreeMap<BufferId, BufferDiffSnapshot>,
pub diff_transforms: SumTree<DiffTransform>,
trailing_excerpt_update_count: usize,
non_text_state_update_count: usize,
@@ -243,12 +266,6 @@ pub enum DiffTransform {
},
}
-#[derive(Clone)]
-struct DiffSnapshot {
- diff: git::diff::BufferDiff,
- base_text: language::BufferSnapshot,
-}
-
#[derive(Clone)]
pub struct ExcerptInfo {
pub id: ExcerptId,
@@ -290,9 +307,10 @@ impl ExcerptBoundary {
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct RowInfo {
+ pub buffer_id: Option<BufferId>,
pub buffer_row: Option<u32>,
pub multibuffer_row: Option<MultiBufferRow>,
- pub diff_status: Option<git::diff::DiffHunkStatus>,
+ pub diff_status: Option<diff::DiffHunkStatus>,
}
/// A slice into a [`Buffer`] that is being edited in a [`MultiBuffer`].
@@ -371,7 +389,7 @@ pub struct MultiBufferRows<'a> {
pub struct MultiBufferChunks<'a> {
excerpts: Cursor<'a, Excerpt, ExcerptOffset>,
diff_transforms: Cursor<'a, DiffTransform, (usize, ExcerptOffset)>,
- diffs: &'a TreeMap<BufferId, DiffSnapshot>,
+ diffs: &'a TreeMap<BufferId, BufferDiffSnapshot>,
diff_base_chunks: Option<(BufferId, BufferChunks<'a>)>,
buffer_chunk: Option<Chunk<'a>>,
range: Range<usize>,
@@ -405,7 +423,7 @@ pub struct ReversedMultiBufferBytes<'a> {
struct MultiBufferCursor<'a, D: TextDimension> {
excerpts: Cursor<'a, Excerpt, ExcerptDimension<D>>,
diff_transforms: Cursor<'a, DiffTransform, (OutputDimension<D>, ExcerptDimension<D>)>,
- diffs: &'a TreeMap<BufferId, DiffSnapshot>,
+ diffs: &'a TreeMap<BufferId, BufferDiffSnapshot>,
cached_region: Option<MultiBufferRegion<'a, D>>,
}
@@ -491,7 +509,7 @@ impl MultiBuffer {
..MultiBufferSnapshot::default()
}),
buffers: RefCell::default(),
- diff_bases: HashMap::default(),
+ diffs: HashMap::default(),
all_diff_hunks_expanded: false,
subscriptions: Topic::default(),
singleton: false,
@@ -513,7 +531,7 @@ impl MultiBuffer {
snapshot: Default::default(),
buffers: Default::default(),
buffers_by_path: Default::default(),
- diff_bases: HashMap::default(),
+ diffs: HashMap::default(),
all_diff_hunks_expanded: false,
subscriptions: Default::default(),
singleton: false,
@@ -547,27 +565,14 @@ impl MultiBuffer {
);
}
let mut diff_bases = HashMap::default();
- for (buffer_id, change_set_state) in self.diff_bases.iter() {
- diff_bases.insert(
- *buffer_id,
- ChangeSetState {
- _subscription: new_cx.subscribe(
- &change_set_state.change_set,
- |this, change_set, event, cx| match event {
- BufferChangeSetEvent::DiffChanged { changed_range } => {
- this.buffer_diff_changed(change_set, changed_range.clone(), cx)
- }
- },
- ),
- change_set: change_set_state.change_set.clone(),
- },
- );
+ for (buffer_id, diff) in self.diffs.iter() {
+ diff_bases.insert(*buffer_id, DiffState::new(diff.diff.clone(), new_cx));
}
Self {
snapshot: RefCell::new(self.snapshot.borrow().clone()),
buffers: RefCell::new(buffers),
buffers_by_path: Default::default(),
- diff_bases,
+ diffs: diff_bases,
all_diff_hunks_expanded: self.all_diff_hunks_expanded,
subscriptions: Default::default(),
singleton: self.singleton,
@@ -1394,7 +1399,7 @@ impl MultiBuffer {
anchor_ranges
}
- pub fn location_for_path(&self, path: &Arc<Path>, cx: &App) -> Option<Anchor> {
+ pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option<Anchor> {
let excerpt_id = self.buffers_by_path.get(path)?.first()?;
let snapshot = self.snapshot(cx);
let excerpt = snapshot.excerpt(*excerpt_id)?;
@@ -1407,31 +1412,26 @@ impl MultiBuffer {
pub fn set_excerpts_for_path(
&mut self,
- path: Arc<Path>,
+ path: PathKey,
buffer: Entity<Buffer>,
ranges: Vec<Range<Point>>,
context_line_count: u32,
cx: &mut Context<Self>,
) {
let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
- let (mut insert_after, excerpt_ids) =
- if let Some(existing) = self.buffers_by_path.get(&path) {
- (*existing.last().unwrap(), existing.clone())
- } else {
- (
- self.buffers_by_path
- .range(..path.clone())
- .next_back()
- .map(|(_, value)| *value.last().unwrap())
- .unwrap_or(ExcerptId::min()),
- Vec::default(),
- )
- };
+
+ let mut insert_after = self
+ .buffers_by_path
+ .range(..path.clone())
+ .next_back()
+ .map(|(_, value)| *value.last().unwrap())
+ .unwrap_or(ExcerptId::min());
+ let existing = self.buffers_by_path.get(&path).cloned().unwrap_or_default();
let (new, _) = build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
let mut new_iter = new.into_iter().peekable();
- let mut existing_iter = excerpt_ids.into_iter().peekable();
+ let mut existing_iter = existing.into_iter().peekable();
let mut new_excerpt_ids = Vec::new();
let mut to_remove = Vec::new();
@@ -1485,7 +1485,6 @@ impl MultiBuffer {
// maybe merge overlapping excerpts?
// it's hard to distinguish between a manually expanded excerpt, and one that
// got smaller because of a missing diff.
- //
if existing_start == new.context.start && existing_end == new.context.end {
new_excerpt_ids.append(&mut self.insert_excerpts_after(
insert_after,
@@ -1516,11 +1515,11 @@ impl MultiBuffer {
}
}
- pub fn paths(&self) -> impl Iterator<Item = Arc<Path>> + '_ {
+ pub fn paths(&self) -> impl Iterator<Item = PathKey> + '_ {
self.buffers_by_path.keys().cloned()
}
- pub fn remove_excerpts_for_path(&mut self, path: Arc<Path>, cx: &mut Context<Self>) {
+ pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context<Self>) {
if let Some(to_remove) = self.buffers_by_path.remove(&path) {
self.remove_excerpts(to_remove, cx)
}
@@ -1742,7 +1741,7 @@ impl MultiBuffer {
}
self.sync_diff_transforms(
- snapshot,
+ &mut snapshot,
vec![Edit {
old: edit_start..edit_start,
new: edit_start..edit_end,
@@ -1775,7 +1774,7 @@ impl MultiBuffer {
snapshot.has_conflict = false;
self.sync_diff_transforms(
- snapshot,
+ &mut snapshot,
vec![Edit {
old: start..prev_len,
new: start..start,
@@ -2053,7 +2052,7 @@ impl MultiBuffer {
snapshot.trailing_excerpt_update_count += 1;
}
- self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
+ self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
cx.emit(Event::Edited {
singleton_buffer_edited: false,
edited_buffer: None,
@@ -2142,38 +2141,49 @@ impl MultiBuffer {
});
}
+ fn buffer_diff_language_changed(&mut self, diff: Entity<BufferDiff>, cx: &mut Context<Self>) {
+ self.sync(cx);
+ let mut snapshot = self.snapshot.borrow_mut();
+ let diff = diff.read(cx);
+ let buffer_id = diff.buffer_id;
+ let diff = diff.snapshot.clone();
+ snapshot.diffs.insert(buffer_id, diff);
+ }
+
fn buffer_diff_changed(
&mut self,
- change_set: Entity<BufferChangeSet>,
+ diff: Entity<BufferDiff>,
range: Range<text::Anchor>,
cx: &mut Context<Self>,
) {
- let change_set = change_set.read(cx);
- let buffer_id = change_set.buffer_id;
- let diff = change_set.diff_to_buffer.clone();
- let base_text = change_set.base_text.clone();
self.sync(cx);
- let mut snapshot = self.snapshot.borrow_mut();
- let base_text_changed = snapshot
- .diffs
- .get(&buffer_id)
- .map_or(true, |diff_snapshot| {
- change_set.base_text.as_ref().map_or(true, |base_text| {
- base_text.remote_id() != diff_snapshot.base_text.remote_id()
- })
- });
- if let Some(base_text) = base_text {
- snapshot.diffs.insert(
- buffer_id,
- DiffSnapshot {
- diff: diff.clone(),
- base_text,
- },
- );
- } else {
- snapshot.diffs.remove(&buffer_id);
+ let diff = diff.read(cx);
+ let buffer_id = diff.buffer_id;
+ let mut diff = diff.snapshot.clone();
+ if diff.base_text.is_none() && self.all_diff_hunks_expanded {
+ diff = BufferDiffSnapshot::new_with_single_insertion(cx);
}
+
+ let mut snapshot = self.snapshot.borrow_mut();
+ let base_text_changed =
+ snapshot
+ .diffs
+ .get(&buffer_id)
+ .map_or(true, |diff_snapshot| {
+ match (&diff_snapshot.base_text, &diff.base_text) {
+ (None, None) => false,
+ (None, Some(_)) => true,
+ (Some(_), None) => true,
+ (Some(old), Some(new)) => {
+ let (old_id, old_empty) = (old.remote_id(), old.is_empty());
+ let (new_id, new_empty) = (new.remote_id(), new.is_empty());
+ new_id != old_id && (!new_empty || !old_empty)
+ }
+ }
+ });
+ snapshot.diffs.insert(buffer_id, diff);
+
let buffers = self.buffers.borrow();
let Some(buffer_state) = buffers.get(&buffer_id) else {
return;
@@ -2218,7 +2228,7 @@ impl MultiBuffer {
}
self.sync_diff_transforms(
- snapshot,
+ &mut snapshot,
excerpt_edits,
DiffChangeKind::DiffUpdated {
base_changed: base_text_changed,
@@ -2309,29 +2319,14 @@ impl MultiBuffer {
self.as_singleton().unwrap().read(cx).is_parsing()
}
- pub fn add_change_set(&mut self, change_set: Entity<BufferChangeSet>, cx: &mut Context<Self>) {
- let buffer_id = change_set.read(cx).buffer_id;
- self.buffer_diff_changed(change_set.clone(), text::Anchor::MIN..text::Anchor::MAX, cx);
- self.diff_bases.insert(
- buffer_id,
- ChangeSetState {
- _subscription: cx.subscribe(
- &change_set,
- |this, change_set, event, cx| match event {
- BufferChangeSetEvent::DiffChanged { changed_range } => {
- this.buffer_diff_changed(change_set, changed_range.clone(), cx);
- }
- },
- ),
- change_set,
- },
- );
+ pub fn add_diff(&mut self, diff: Entity<BufferDiff>, cx: &mut Context<Self>) {
+ let buffer_id = diff.read(cx).buffer_id;
+ self.buffer_diff_changed(diff.clone(), text::Anchor::MIN..text::Anchor::MAX, cx);
+ self.diffs.insert(buffer_id, DiffState::new(diff, cx));
}
- pub fn change_set_for(&self, buffer_id: BufferId) -> Option<Entity<BufferChangeSet>> {
- self.diff_bases
- .get(&buffer_id)
- .map(|state| state.change_set.clone())
+ pub fn diff_for(&self, buffer_id: BufferId) -> Option<Entity<BufferDiff>> {
+ self.diffs.get(&buffer_id).map(|state| state.diff.clone())
}
pub fn expand_diff_hunks(&mut self, ranges: Vec<Range<Anchor>>, cx: &mut Context<Self>) {
@@ -2381,24 +2376,17 @@ impl MultiBuffer {
false
}
- fn expand_or_collapse_diff_hunks(
+ fn expand_or_collapse_diff_hunks_internal(
&mut self,
- ranges: Vec<Range<Anchor>>,
+ ranges: impl Iterator<Item = (Range<Point>, ExcerptId)>,
expand: bool,
cx: &mut Context<Self>,
) {
self.sync(cx);
- let snapshot = self.snapshot.borrow_mut();
+ let mut snapshot = self.snapshot.borrow_mut();
let mut excerpt_edits = Vec::new();
- for range in ranges.iter() {
- let end_excerpt_id = range.end.excerpt_id;
- let range = range.to_point(&snapshot);
- let mut peek_end = range.end;
- if range.end.row < snapshot.max_row().0 {
- peek_end = Point::new(range.end.row + 1, 0);
- };
-
- for diff_hunk in snapshot.diff_hunks_in_range(range.start..peek_end) {
+ for (range, end_excerpt_id) in ranges {
+ for diff_hunk in snapshot.diff_hunks_in_range(range) {
if diff_hunk.excerpt_id.cmp(&end_excerpt_id, &snapshot).is_gt() {
continue;
}
@@ -2422,7 +2410,7 @@ impl MultiBuffer {
}
self.sync_diff_transforms(
- snapshot,
+ &mut snapshot,
excerpt_edits,
DiffChangeKind::ExpandOrCollapseHunks { expand },
);
@@ -2433,6 +2421,44 @@ impl MultiBuffer {
});
}
+ pub fn expand_or_collapse_diff_hunks_narrow(
+ &mut self,
+ ranges: Vec<Range<Anchor>>,
+ expand: bool,
+ cx: &mut Context<Self>,
+ ) {
+ let snapshot = self.snapshot.borrow().clone();
+ self.expand_or_collapse_diff_hunks_internal(
+ ranges
+ .iter()
+ .map(move |range| (range.to_point(&snapshot), range.end.excerpt_id)),
+ expand,
+ cx,
+ );
+ }
+
+ pub fn expand_or_collapse_diff_hunks(
+ &mut self,
+ ranges: Vec<Range<Anchor>>,
+ expand: bool,
+ cx: &mut Context<Self>,
+ ) {
+ let snapshot = self.snapshot.borrow().clone();
+ self.expand_or_collapse_diff_hunks_internal(
+ ranges.iter().map(move |range| {
+ let end_excerpt_id = range.end.excerpt_id;
+ let range = range.to_point(&snapshot);
+ let mut peek_end = range.end;
+ if range.end.row < snapshot.max_row().0 {
+ peek_end = Point::new(range.end.row + 1, 0);
+ };
+ (range.start..peek_end, end_excerpt_id)
+ }),
+ expand,
+ cx,
+ );
+ }
+
pub fn resize_excerpt(
&mut self,
id: ExcerptId,
@@ -2491,7 +2517,7 @@ impl MultiBuffer {
drop(cursor);
snapshot.excerpts = new_excerpts;
- self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
+ self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
cx.emit(Event::Edited {
singleton_buffer_edited: false,
edited_buffer: None,
@@ -2592,7 +2618,7 @@ impl MultiBuffer {
drop(cursor);
snapshot.excerpts = new_excerpts;
- self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
+ self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
cx.emit(Event::Edited {
singleton_buffer_edited: false,
edited_buffer: None,
@@ -2705,12 +2731,12 @@ impl MultiBuffer {
drop(cursor);
snapshot.excerpts = new_excerpts;
- self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
+ self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
}
fn sync_diff_transforms(
&self,
- mut snapshot: RefMut<MultiBufferSnapshot>,
+ snapshot: &mut MultiBufferSnapshot,
excerpt_edits: Vec<text::Edit<ExcerptOffset>>,
change_kind: DiffChangeKind,
) {
@@ -2791,11 +2817,23 @@ impl MultiBuffer {
if excerpt_edits.peek().map_or(true, |next_edit| {
next_edit.old.start >= old_diff_transforms.end(&()).0
}) {
+ let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end)
+ && match old_diff_transforms.item() {
+ Some(DiffTransform::BufferContent {
+ inserted_hunk_anchor: Some(hunk_anchor),
+ ..
+ }) => excerpts
+ .item()
+ .is_some_and(|excerpt| hunk_anchor.1.is_valid(&excerpt.buffer)),
+ _ => true,
+ };
+
let mut excerpt_offset = edit.new.end;
- if old_diff_transforms.start().0 < edit.old.end {
+ if !keep_next_old_transform {
excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end;
old_diff_transforms.next(&());
}
+
old_expanded_hunks.clear();
self.push_buffer_content_transform(
&snapshot,
@@ -2877,9 +2915,11 @@ impl MultiBuffer {
while let Some(excerpt) = excerpts.item() {
// Recompute the expanded hunks in the portion of the excerpt that
// intersects the edit.
- if let Some(diff_state) = snapshot.diffs.get(&excerpt.buffer_id) {
- let diff = &diff_state.diff;
- let base_text = &diff_state.base_text;
+ if let Some((diff, base_text)) = snapshot
+ .diffs
+ .get(&excerpt.buffer_id)
+ .and_then(|diff| Some((diff, diff.base_text.as_ref()?)))
+ {
let buffer = &excerpt.buffer;
let excerpt_start = *excerpts.start();
let excerpt_end = excerpt_start + ExcerptOffset::new(excerpt.text_summary.len);
@@ -2894,12 +2934,14 @@ impl MultiBuffer {
buffer.anchor_before(edit_buffer_start)..buffer.anchor_after(edit_buffer_end);
for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) {
+ let hunk_buffer_range = hunk.buffer_range.to_offset(buffer);
+
let hunk_anchor = (excerpt.id, hunk.buffer_range.start);
- if !hunk_anchor.1.is_valid(buffer) {
+ if hunk_buffer_range.start < excerpt_buffer_start {
+ log::trace!("skipping hunk that starts before excerpt");
continue;
}
- let hunk_buffer_range = hunk.buffer_range.to_offset(buffer);
let hunk_excerpt_start = excerpt_start
+ ExcerptOffset::new(
hunk_buffer_range.start.saturating_sub(excerpt_buffer_start),
@@ -2941,8 +2983,9 @@ impl MultiBuffer {
if should_expand_hunk {
did_expand_hunks = true;
log::trace!(
- "expanding hunk {:?}",
+ "expanding hunk {:?}, excerpt:{:?}",
hunk_excerpt_start.value..hunk_excerpt_end.value,
+ excerpt.id
);
if !hunk.diff_base_byte_range.is_empty()
@@ -3389,18 +3432,17 @@ impl MultiBufferSnapshot {
self.diff_hunks_in_range(Anchor::min()..Anchor::max())
}
- pub fn diff_hunks_in_range<T: ToOffset>(
+ pub fn diff_hunks_in_range<T: ToPoint>(
&self,
range: Range<T>,
) -> impl Iterator<Item = MultiBufferDiffHunk> + '_ {
- let range = range.start.to_offset(self)..range.end.to_offset(self);
- self.lift_buffer_metadata(range.clone(), move |buffer, buffer_range| {
+ let query_range = range.start.to_point(self)..range.end.to_point(self);
+ self.lift_buffer_metadata(query_range.clone(), move |buffer, buffer_range| {
let diff = self.diffs.get(&buffer.remote_id())?;
let buffer_start = buffer.anchor_before(buffer_range.start);
let buffer_end = buffer.anchor_after(buffer_range.end);
Some(
- diff.diff
- .hunks_intersecting_range(buffer_start..buffer_end, buffer)
+ diff.hunks_intersecting_range(buffer_start..buffer_end, buffer)
.map(|hunk| {
(
Point::new(hunk.row_range.start, 0)..Point::new(hunk.row_range.end, 0),
@@ -3409,19 +3451,25 @@ impl MultiBufferSnapshot {
}),
)
})
- .map(|(range, hunk, excerpt)| {
+ .filter_map(move |(range, hunk, excerpt)| {
+ if range.start != range.end
+ && range.end == query_range.start
+ && !hunk.row_range.is_empty()
+ {
+ return None;
+ }
let end_row = if range.end.column == 0 {
range.end.row
} else {
range.end.row + 1
};
- MultiBufferDiffHunk {
+ Some(MultiBufferDiffHunk {
row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row),
buffer_id: excerpt.buffer_id,
excerpt_id: excerpt.id,
buffer_range: hunk.buffer_range.clone(),
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
- }
+ })
})
}
@@ -3560,8 +3608,8 @@ impl MultiBufferSnapshot {
/// multi-buffer coordinates.
fn lift_buffer_metadata<'a, D, M, I>(
&'a self,
- range: Range<usize>,
- get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range<usize>) -> Option<I>,
+ query_range: Range<D>,
+ get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range<D>) -> Option<I>,
) -> impl Iterator<Item = (Range<D>, M, &'a Excerpt)> + 'a
where
I: Iterator<Item = (Range<D>, M)> + 'a,
@@ -3569,18 +3617,19 @@ impl MultiBufferSnapshot {
{
let max_position = D::from_text_summary(&self.text_summary());
let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None;
- let mut cursor = self.cursor::<DimensionPair<usize, D>>();
+ let mut cursor = self.cursor::<D>();
// Find the excerpt and buffer offset where the given range ends.
- cursor.seek(&DimensionPair {
- key: range.end,
- value: None,
- });
+ cursor.seek(&query_range.end);
let mut range_end = None;
while let Some(region) = cursor.region() {
if region.is_main_buffer {
- let mut buffer_end = region.buffer_range.start.key;
- let overshoot = range.end.saturating_sub(region.range.start.key);
+ let mut buffer_end = region.buffer_range.start;
+ let overshoot = if query_range.end > region.range.start {
+ query_range.end - region.range.start
+ } else {
+ D::default()
+ };
buffer_end.add_assign(&overshoot);
range_end = Some((region.excerpt.id, buffer_end));
break;
@@ -3588,13 +3637,10 @@ impl MultiBufferSnapshot {
cursor.next();
}
- cursor.seek(&DimensionPair {
- key: range.start,
- value: None,
- });
+ cursor.seek(&query_range.start);
if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) {
- if region.range.start.key > 0 {
+ if region.range.start > D::zero(&()) {
cursor.prev()
}
}
@@ -3613,14 +3659,18 @@ impl MultiBufferSnapshot {
// and retrieve the metadata for the resulting range.
else {
let region = cursor.region()?;
- let buffer_start = if region.is_main_buffer {
- let start_overshoot = range.start.saturating_sub(region.range.start.key);
- (region.buffer_range.start.key + start_overshoot)
- .min(region.buffer_range.end.key)
+ let mut buffer_start;
+ if region.is_main_buffer {
+ buffer_start = region.buffer_range.start;
+ if query_range.start > region.range.start {
+ let overshoot = query_range.start - region.range.start;
+ buffer_start.add_assign(&overshoot);
+ }
+ buffer_start = buffer_start.min(region.buffer_range.end);
} else {
- cursor.main_buffer_position()?.key
+ buffer_start = cursor.main_buffer_position()?;
};
- let mut buffer_end = excerpt.range.context.end.to_offset(&excerpt.buffer);
+ let mut buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
if let Some((end_excerpt_id, end_buffer_offset)) = range_end {
if excerpt.id == end_excerpt_id {
buffer_end = buffer_end.min(end_buffer_offset);
@@ -3637,53 +3687,56 @@ impl MultiBufferSnapshot {
};
// Visit each metadata item.
- if let Some((range, metadata)) = metadata_iter.and_then(Iterator::next) {
+ if let Some((metadata_buffer_range, metadata)) = metadata_iter.and_then(Iterator::next)
+ {
// Find the multibuffer regions that contain the start and end of
// the metadata item's range.
- if range.start > D::default() {
+ if metadata_buffer_range.start > D::default() {
while let Some(region) = cursor.region() {
- if !region.is_main_buffer
- || region.buffer.remote_id() == excerpt.buffer_id
- && region.buffer_range.end.value.unwrap() < range.start
+ if region.is_main_buffer
+ && (region.buffer_range.end >= metadata_buffer_range.start
+ || cursor.is_at_end_of_excerpt())
{
- cursor.next();
- } else {
break;
}
+ cursor.next();
}
}
let start_region = cursor.region()?;
while let Some(region) = cursor.region() {
- if !region.is_main_buffer
- || region.buffer.remote_id() == excerpt.buffer_id
- && region.buffer_range.end.value.unwrap() <= range.end
+ if region.is_main_buffer
+ && (region.buffer_range.end > metadata_buffer_range.end
+ || cursor.is_at_end_of_excerpt())
{
- cursor.next();
- } else {
break;
}
+ cursor.next();
}
- let end_region = cursor
- .region()
- .filter(|region| region.buffer.remote_id() == excerpt.buffer_id);
+ let end_region = cursor.region();
// Convert the metadata item's range into multibuffer coordinates.
- let mut start = start_region.range.start.value.unwrap();
- let region_buffer_start = start_region.buffer_range.start.value.unwrap();
- if start_region.is_main_buffer && range.start > region_buffer_start {
- start.add_assign(&(range.start - region_buffer_start));
+ let mut start_position = start_region.range.start;
+ let region_buffer_start = start_region.buffer_range.start;
+ if start_region.is_main_buffer && metadata_buffer_range.start > region_buffer_start
+ {
+ start_position.add_assign(&(metadata_buffer_range.start - region_buffer_start));
+ start_position = start_position.min(start_region.range.end);
}
- let mut end = max_position;
- if let Some(end_region) = end_region {
- end = end_region.range.start.value.unwrap();
+
+ let mut end_position = max_position;
+ if let Some(end_region) = &end_region {
+ end_position = end_region.range.start;
debug_assert!(end_region.is_main_buffer);
- let region_buffer_start = end_region.buffer_range.start.value.unwrap();
- if range.end > region_buffer_start {
- end.add_assign(&(range.end - region_buffer_start));
+ let region_buffer_start = end_region.buffer_range.start;
+ if metadata_buffer_range.end > region_buffer_start {
+ end_position.add_assign(&(metadata_buffer_range.end - region_buffer_start));
}
+ end_position = end_position.min(end_region.range.end);
}
- return Some((start..end, metadata, excerpt));
+ if start_position <= query_range.end && end_position >= query_range.start {
+ return Some((start_position..end_position, metadata, excerpt));
+ }
}
// When there are no more metadata items for this excerpt, move to the next excerpt.
else {
@@ -3725,8 +3778,8 @@ impl MultiBufferSnapshot {
let buffer_end = excerpt.buffer.anchor_before(buffer_offset);
let buffer_end_row = buffer_end.to_point(&excerpt.buffer).row;
- if let Some(diff_state) = self.diffs.get(&excerpt.buffer_id) {
- for hunk in diff_state.diff.hunks_intersecting_range_rev(
+ if let Some(diff) = self.diffs.get(&excerpt.buffer_id) {
+ for hunk in diff.hunks_intersecting_range_rev(
excerpt.range.context.start..buffer_end,
&excerpt.buffer,
) {
@@ -3794,7 +3847,7 @@ impl MultiBufferSnapshot {
}
pub fn has_diff_hunks(&self) -> bool {
- self.diffs.values().any(|diff| !diff.diff.is_empty())
+ self.diffs.values().any(|diff| !diff.is_empty())
}
pub fn surrounding_word<T: ToOffset>(
@@ -4256,7 +4309,11 @@ impl MultiBufferSnapshot {
} => {
let buffer_start = base_text_byte_range.start + start_overshoot;
let mut buffer_end = base_text_byte_range.start + end_overshoot;
- let Some(buffer_diff) = self.diffs.get(buffer_id) else {
+ let Some(base_text) = self
+ .diffs
+ .get(buffer_id)
+ .and_then(|diff| diff.base_text.as_ref())
+ else {
panic!("{:?} is in non-existent deleted hunk", range.start)
};
@@ -4266,9 +4323,8 @@ impl MultiBufferSnapshot {
buffer_end -= 1;
}
- let mut summary = buffer_diff
- .base_text
- .text_summary_for_range::<D, _>(buffer_start..buffer_end);
+ let mut summary =
+ base_text.text_summary_for_range::<D, _>(buffer_start..buffer_end);
if include_trailing_newline {
summary.add_assign(&D::from_text_summary(&TextSummary::newline()))
@@ -4305,12 +4361,15 @@ impl MultiBufferSnapshot {
..
} => {
let buffer_end = base_text_byte_range.start + overshoot;
- let Some(buffer_diff) = self.diffs.get(buffer_id) else {
- panic!("{:?} is in non-extant deleted hunk", range.end)
+ let Some(base_text) = self
+ .diffs
+ .get(buffer_id)
+ .and_then(|diff| diff.base_text.as_ref())
+ else {
+ panic!("{:?} is in non-existent deleted hunk", range.end)
};
- let mut suffix = buffer_diff
- .base_text
+ let mut suffix = base_text
.text_summary_for_range::<D, _>(base_text_byte_range.start..buffer_end);
if *has_trailing_newline && buffer_end == base_text_byte_range.end + 1 {
suffix.add_assign(&D::from_text_summary(&TextSummary::newline()))
@@ -4410,14 +4469,18 @@ impl MultiBufferSnapshot {
}) => {
let mut in_deleted_hunk = false;
if let Some(diff_base_anchor) = &anchor.diff_base_anchor {
- if let Some(diff) = self.diffs.get(buffer_id) {
- if diff.base_text.can_resolve(&diff_base_anchor) {
- let base_text_offset = diff_base_anchor.to_offset(&diff.base_text);
+ if let Some(base_text) = self
+ .diffs
+ .get(buffer_id)
+ .and_then(|diff| diff.base_text.as_ref())
+ {
+ if base_text.can_resolve(&diff_base_anchor) {
+ let base_text_offset = diff_base_anchor.to_offset(&base_text);
if base_text_offset >= base_text_byte_range.start
&& base_text_offset <= base_text_byte_range.end
{
- let position_in_hunk =
- diff.base_text.text_summary_for_range::<D, _>(
+ let position_in_hunk = base_text
+ .text_summary_for_range::<D, _>(
base_text_byte_range.start..base_text_offset,
);
position.add_assign(&position_in_hunk);
@@ -4509,7 +4572,16 @@ impl MultiBufferSnapshot {
}
let excerpt_start_position = D::from_text_summary(&cursor.start().text);
- if let Some(excerpt) = cursor.item().filter(|excerpt| excerpt.id == excerpt_id) {
+ if let Some(excerpt) = cursor.item() {
+ if excerpt.id != excerpt_id {
+ let position = self.resolve_summary_for_anchor(
+ &Anchor::min(),
+ excerpt_start_position,
+ &mut diff_transforms_cursor,
+ );
+ summaries.extend(excerpt_anchors.map(|_| position));
+ continue;
+ }
let excerpt_buffer_start =
excerpt.range.context.start.summary::<D>(&excerpt.buffer);
let excerpt_buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
@@ -4734,15 +4806,17 @@ impl MultiBufferSnapshot {
..
}) = diff_transforms.item()
{
- let diff_base = self.diffs.get(buffer_id).expect("missing diff base");
+ let base_text = self
+ .diffs
+ .get(buffer_id)
+ .and_then(|diff| diff.base_text.as_ref())
+ .expect("missing diff base");
if offset_in_transform > base_text_byte_range.len() {
debug_assert!(*has_trailing_newline);
bias = Bias::Right;
} else {
diff_base_anchor = Some(
- diff_base
- .base_text
- .anchor_at(base_text_byte_range.start + offset_in_transform, bias),
+ base_text.anchor_at(base_text_byte_range.start + offset_in_transform, bias),
);
bias = Bias::Left;
}
@@ -5525,7 +5599,7 @@ impl MultiBufferSnapshot {
buffer_id: BufferId,
group_id: usize,
) -> impl Iterator<Item = DiagnosticEntry<Point>> + '_ {
- self.lift_buffer_metadata(0..self.len(), move |buffer, _| {
+ self.lift_buffer_metadata(Point::zero()..self.max_point(), move |buffer, _| {
if buffer.remote_id() != buffer_id {
return None;
};
@@ -5538,15 +5612,19 @@ impl MultiBufferSnapshot {
.map(|(range, diagnostic, _)| DiagnosticEntry { diagnostic, range })
}
- pub fn diagnostics_in_range<'a, T, O>(
+ pub fn diagnostics_in_range<'a, T>(
&'a self,
range: Range<T>,
- ) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
+ ) -> impl Iterator<Item = DiagnosticEntry<T>> + 'a
where
- T: 'a + ToOffset,
- O: 'a + text::FromAnchor + Copy + TextDimension + Ord + Sub<O, Output = O> + fmt::Debug,
+ T: 'a
+ + text::ToOffset
+ + text::FromAnchor
+ + TextDimension
+ + Ord
+ + Sub<T, Output = T>
+ + fmt::Debug,
{
- let range = range.start.to_offset(self)..range.end.to_offset(self);
self.lift_buffer_metadata(range, move |buffer, buffer_range| {
Some(
buffer
@@ -6036,6 +6114,24 @@ where
self.cached_region.clone()
}
+ fn is_at_end_of_excerpt(&mut self) -> bool {
+ if self.diff_transforms.end(&()).1 < self.excerpts.end(&()) {
+ return false;
+ } else if self.diff_transforms.end(&()).1 > self.excerpts.end(&())
+ || self.diff_transforms.item().is_none()
+ {
+ return true;
+ }
+
+ self.diff_transforms.next(&());
+ let next_transform = self.diff_transforms.item();
+ self.diff_transforms.prev(&());
+
+ next_transform.map_or(true, |next_transform| {
+ matches!(next_transform, DiffTransform::BufferContent { .. })
+ })
+ }
+
fn main_buffer_position(&self) -> Option<D> {
let excerpt = self.excerpts.item()?;
let buffer = &excerpt.buffer;
@@ -6056,7 +6152,7 @@ where
..
} => {
let diff = self.diffs.get(&buffer_id)?;
- let buffer = &diff.base_text;
+ let buffer = diff.base_text.as_ref()?;
let mut rope_cursor = buffer.as_rope().cursor(0);
let buffer_start = rope_cursor.summary::<D>(base_text_byte_range.start);
let buffer_range_len = rope_cursor.summary::<D>(base_text_byte_range.end);
@@ -6879,6 +6975,7 @@ impl<'a> Iterator for MultiBufferRows<'a> {
if self.is_empty && self.point.row == 0 {
self.point += Point::new(1, 0);
return Some(RowInfo {
+ buffer_id: None,
buffer_row: Some(0),
multibuffer_row: Some(MultiBufferRow(0)),
diff_status: None,
@@ -1,12 +1,12 @@
use super::*;
-use git::diff::DiffHunkStatus;
+use diff::DiffHunkStatus;
use gpui::{App, TestAppContext};
use indoc::indoc;
use language::{Buffer, Rope};
use parking_lot::RwLock;
use rand::prelude::*;
use settings::SettingsStore;
-use std::{env, path::PathBuf};
+use std::env;
use util::test::sample_text;
#[ctor::ctor]
@@ -19,12 +19,14 @@ fn init_logger() {
#[gpui::test]
fn test_empty_singleton(cx: &mut App) {
let buffer = cx.new(|cx| Buffer::local("", cx));
+ let buffer_id = buffer.read(cx).remote_id();
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let snapshot = multibuffer.read(cx).snapshot(cx);
assert_eq!(snapshot.text(), "");
assert_eq!(
snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>(),
[RowInfo {
+ buffer_id: Some(buffer_id),
buffer_row: Some(0),
multibuffer_row: Some(MultiBufferRow(0)),
diff_status: None
@@ -359,17 +361,9 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) {
let base_text = "one\ntwo\nthree\n";
let text = "one\nthree\n";
let buffer = cx.new(|cx| Buffer::local(text, cx));
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
- let change_set = cx.new(|cx| {
- let mut change_set = BufferChangeSet::new(&buffer, cx);
- let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
- change_set
- });
- cx.run_until_parked();
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
- multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.add_change_set(change_set, cx)
- });
+ multibuffer.update(cx, |multibuffer, cx| multibuffer.add_diff(diff, cx));
let (before, after) = multibuffer.update(cx, |multibuffer, cx| {
let before = multibuffer.snapshot(cx).anchor_before(Point::new(1, 0));
@@ -382,7 +376,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) {
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
let actual_text = snapshot.text();
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
- let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default());
+ let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default(), None);
pretty_assertions::assert_eq!(
actual_diff,
indoc! {
@@ -409,20 +403,14 @@ fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
let base_text = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\n";
let text = "one\nfour\nseven\n";
let buffer = cx.new(|cx| Buffer::local(text, cx));
- let change_set = cx.new(|cx| {
- let mut change_set = BufferChangeSet::new(&buffer, cx);
- let snapshot = buffer.read(cx).snapshot();
- let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
- change_set
- });
- cx.run_until_parked();
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
(multibuffer.snapshot(cx), multibuffer.subscribe())
});
multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.add_change_set(change_set, cx);
+ multibuffer.add_diff(diff, cx);
multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
});
@@ -508,17 +496,11 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) {
let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n";
let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n";
let buffer = cx.new(|cx| Buffer::local(text, cx));
- let change_set = cx.new(|cx| {
- let mut change_set = BufferChangeSet::new(&buffer, cx);
- let snapshot = buffer.read(cx).text_snapshot();
- let _ = change_set.set_base_text(base_text.into(), snapshot, cx);
- change_set
- });
- cx.run_until_parked();
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer, cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.add_change_set(change_set.clone(), cx);
+ multibuffer.add_diff(diff.clone(), cx);
(multibuffer.snapshot(cx), multibuffer.subscribe())
});
@@ -995,15 +977,10 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
let buffer = cx.new(|cx| Buffer::local("", cx));
let base_text = "a\nb\nc";
- let change_set = cx.new(|cx| {
- let snapshot = buffer.read(cx).snapshot();
- let mut change_set = BufferChangeSet::new(&buffer, cx);
- let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
- change_set
- });
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.set_all_diff_hunks_expanded(cx);
- multibuffer.add_change_set(change_set.clone(), cx);
+ multibuffer.add_diff(diff.clone(), cx);
multibuffer.push_excerpts(
buffer.clone(),
[ExcerptRange {
@@ -1039,8 +1016,8 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..0, "a\nb\nc")], None, cx);
- change_set.update(cx, |change_set, cx| {
- let _ = change_set.recalculate_diff(buffer.snapshot().text, cx);
+ diff.update(cx, |diff, cx| {
+ diff.recalculate_diff_sync(buffer.snapshot().text, cx);
});
assert_eq!(buffer.text(), "a\nb\nc")
});
@@ -1051,8 +1028,8 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
buffer.update(cx, |buffer, cx| {
buffer.undo(cx);
- change_set.update(cx, |change_set, cx| {
- let _ = change_set.recalculate_diff(buffer.snapshot().text, cx);
+ diff.update(cx, |diff, cx| {
+ diff.recalculate_diff_sync(buffer.snapshot().text, cx);
});
assert_eq!(buffer.text(), "")
});
@@ -1294,13 +1271,12 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
);
let buffer = cx.new(|cx| Buffer::local(text, cx));
- let change_set =
- cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx));
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
cx.run_until_parked();
let multibuffer = cx.new(|cx| {
let mut multibuffer = MultiBuffer::singleton(buffer.clone(), cx);
- multibuffer.add_change_set(change_set.clone(), cx);
+ multibuffer.add_diff(diff.clone(), cx);
multibuffer
});
@@ -1485,8 +1461,8 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
assert_line_indents(&snapshot);
// Recalculate the diff, changing the first diff hunk.
- let _ = change_set.update(cx, |change_set, cx| {
- change_set.recalculate_diff(buffer.read(cx).text_snapshot(), cx)
+ diff.update(cx, |diff, cx| {
+ diff.recalculate_diff_sync(buffer.read(cx).text_snapshot(), cx);
});
cx.run_until_parked();
assert_new_snapshot(
@@ -1538,13 +1514,12 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
);
let buffer = cx.new(|cx| Buffer::local(text, cx));
- let change_set =
- cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx));
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx));
cx.run_until_parked();
let multibuffer = cx.new(|cx| {
let mut multibuffer = MultiBuffer::singleton(buffer.clone(), cx);
- multibuffer.add_change_set(change_set.clone(), cx);
+ multibuffer.add_diff(diff.clone(), cx);
multibuffer
});
@@ -1601,6 +1576,107 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
);
}
+#[gpui::test]
+fn test_set_excerpts_for_buffer_ordering(cx: &mut TestAppContext) {
+ let buf1 = cx.new(|cx| {
+ Buffer::local(
+ indoc! {
+ "zero
+ one
+ two
+ two.five
+ three
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ eleven
+ ",
+ },
+ cx,
+ )
+ });
+ let path1: PathKey = PathKey::namespaced("0", Path::new("/"));
+
+ let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path1.clone(),
+ buf1.clone(),
+ vec![
+ Point::row_range(1..2),
+ Point::row_range(6..7),
+ Point::row_range(11..12),
+ ],
+ 1,
+ cx,
+ );
+ });
+
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {
+ "-----
+ zero
+ one
+ two
+ two.five
+ -----
+ four
+ five
+ six
+ seven
+ -----
+ nine
+ ten
+ eleven
+ "
+ },
+ );
+
+ buf1.update(cx, |buffer, cx| buffer.edit([(0..5, "")], None, cx));
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path1.clone(),
+ buf1.clone(),
+ vec![
+ Point::row_range(0..2),
+ Point::row_range(5..6),
+ Point::row_range(10..11),
+ ],
+ 1,
+ cx,
+ );
+ });
+
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {
+ "-----
+ one
+ two
+ two.five
+ three
+ -----
+ four
+ five
+ six
+ seven
+ -----
+ nine
+ ten
+ eleven
+ "
+ },
+ );
+}
+
#[gpui::test]
fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) {
let buf1 = cx.new(|cx| {
@@ -1619,7 +1695,7 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) {
cx,
)
});
- let path1: Arc<Path> = Arc::from(PathBuf::from("path1"));
+ let path1: PathKey = PathKey::namespaced("0", Path::new("/"));
let buf2 = cx.new(|cx| {
Buffer::local(
indoc! {
@@ -1638,7 +1714,7 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) {
cx,
)
});
- let path2: Arc<Path> = Arc::from(PathBuf::from("path2"));
+ let path2 = PathKey::namespaced("x", Path::new("/"));
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
multibuffer.update(cx, |multibuffer, cx| {
@@ -1840,10 +1916,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
let buffer_1 = cx.new(|cx| Buffer::local(text_1, cx));
let buffer_2 = cx.new(|cx| Buffer::local(text_2, cx));
- let change_set_1 =
- cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1.to_string(), &buffer_1, cx));
- let change_set_2 =
- cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2.to_string(), &buffer_2, cx));
+ let diff_1 = cx.new(|cx| BufferDiff::new_with_base_text(base_text_1, &buffer_1, cx));
+ let diff_2 = cx.new(|cx| BufferDiff::new_with_base_text(base_text_2, &buffer_2, cx));
cx.run_until_parked();
let multibuffer = cx.new(|cx| {
@@ -1864,8 +1938,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
}],
cx,
);
- multibuffer.add_change_set(change_set_1.clone(), cx);
- multibuffer.add_change_set(change_set_2.clone(), cx);
+ multibuffer.add_diff(diff_1.clone(), cx);
+ multibuffer.add_diff(diff_2.clone(), cx);
multibuffer
});
@@ -1925,11 +1999,11 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
let id_1 = buffer_1.read_with(cx, |buffer, _| buffer.remote_id());
let id_2 = buffer_2.read_with(cx, |buffer, _| buffer.remote_id());
- let base_id_1 = change_set_1.read_with(cx, |change_set, _| {
- change_set.base_text.as_ref().unwrap().remote_id()
+ let base_id_1 = diff_1.read_with(cx, |diff, _| {
+ diff.snapshot.base_text.as_ref().unwrap().remote_id()
});
- let base_id_2 = change_set_2.read_with(cx, |change_set, _| {
- change_set.base_text.as_ref().unwrap().remote_id()
+ let base_id_2 = diff_2.read_with(cx, |diff, _| {
+ diff.snapshot.base_text.as_ref().unwrap().remote_id()
});
let buffer_lines = (0..=snapshot.max_row().0)
@@ -2025,9 +2099,10 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
#[derive(Default)]
struct ReferenceMultibuffer {
excerpts: Vec<ReferenceExcerpt>,
- change_sets: HashMap<BufferId, Entity<BufferChangeSet>>,
+ diffs: HashMap<BufferId, Entity<BufferDiff>>,
}
+#[derive(Debug)]
struct ReferenceExcerpt {
id: ExcerptId,
buffer: Entity<Buffer>,
@@ -2037,6 +2112,7 @@ struct ReferenceExcerpt {
#[derive(Debug)]
struct ReferenceRegion {
+ buffer_id: Option<BufferId>,
range: Range<usize>,
buffer_start: Option<Point>,
status: Option<DiffHunkStatus>,
@@ -2112,42 +2188,31 @@ impl ReferenceMultibuffer {
.unwrap();
let buffer = excerpt.buffer.read(cx).snapshot();
let buffer_id = buffer.remote_id();
- let Some(change_set) = self.change_sets.get(&buffer_id) else {
+ let Some(diff) = self.diffs.get(&buffer_id) else {
return;
};
- let diff = change_set.read(cx).diff_to_buffer.clone();
+ let diff = diff.read(cx).snapshot.clone();
let excerpt_range = excerpt.range.to_offset(&buffer);
- if excerpt_range.is_empty() {
- return;
- }
for hunk in diff.hunks_intersecting_range(range, &buffer) {
let hunk_range = hunk.buffer_range.to_offset(&buffer);
- let hunk_precedes_excerpt = hunk
- .buffer_range
- .end
- .cmp(&excerpt.range.start, &buffer)
- .is_lt();
- let hunk_follows_excerpt = hunk
- .buffer_range
- .start
- .cmp(&excerpt.range.end, &buffer)
- .is_ge();
- if hunk_precedes_excerpt || hunk_follows_excerpt {
+ if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end {
continue;
}
-
if let Err(ix) = excerpt
.expanded_diff_hunks
.binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer))
{
log::info!(
- "expanding diff hunk {:?}. excerpt: {:?}",
+ "expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}",
hunk_range,
+ excerpt_id,
excerpt_range
);
excerpt
.expanded_diff_hunks
.insert(ix, hunk.buffer_range.start);
+ } else {
+ log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}");
}
}
}
@@ -2160,9 +2225,9 @@ impl ReferenceMultibuffer {
excerpt_boundary_rows.insert(MultiBufferRow(text.matches('\n').count() as u32));
let buffer = excerpt.buffer.read(cx);
let buffer_range = excerpt.range.to_offset(buffer);
- let change_set = self.change_sets.get(&buffer.remote_id()).unwrap().read(cx);
- let diff = change_set.diff_to_buffer.clone();
- let base_buffer = change_set.base_text.as_ref().unwrap();
+ let diff = self.diffs.get(&buffer.remote_id()).unwrap().read(cx);
+ let diff = diff.snapshot.clone();
+ let base_buffer = diff.base_text.as_ref().unwrap();
let mut offset = buffer_range.start;
let mut hunks = diff
@@ -2170,17 +2235,12 @@ impl ReferenceMultibuffer {
.peekable();
while let Some(hunk) = hunks.next() {
- if !hunk.buffer_range.start.is_valid(&buffer) {
- continue;
- }
-
// Ignore hunks that are outside the excerpt range.
let mut hunk_range = hunk.buffer_range.to_offset(buffer);
+
hunk_range.end = hunk_range.end.min(buffer_range.end);
- if hunk_range.start > buffer_range.end
- || hunk_range.end < buffer_range.start
- || buffer_range.is_empty()
- {
+ if hunk_range.start > buffer_range.end || hunk_range.start < buffer_range.start {
+ log::trace!("skipping hunk outside excerpt range");
continue;
}
@@ -2188,6 +2248,12 @@ impl ReferenceMultibuffer {
expanded_anchor.to_offset(&buffer).max(buffer_range.start)
== hunk_range.start.max(buffer_range.start)
}) {
+ log::trace!("skipping a hunk that's not marked as expanded");
+ continue;
+ }
+
+ if !hunk.buffer_range.start.is_valid(&buffer) {
+ log::trace!("skipping hunk with deleted start: {:?}", hunk.row_range);
continue;
}
@@ -2196,6 +2262,7 @@ impl ReferenceMultibuffer {
let len = text.len();
text.extend(buffer.text_for_range(offset..hunk_range.start));
regions.push(ReferenceRegion {
+ buffer_id: Some(buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(buffer.offset_to_point(offset)),
status: None,
@@ -2212,6 +2279,7 @@ impl ReferenceMultibuffer {
let len = text.len();
text.push_str(&base_text);
regions.push(ReferenceRegion {
+ buffer_id: Some(base_buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(
base_buffer.offset_to_point(hunk.diff_base_byte_range.start),
@@ -2228,6 +2296,7 @@ impl ReferenceMultibuffer {
let len = text.len();
text.extend(buffer.text_for_range(offset..hunk_range.end));
regions.push(ReferenceRegion {
+ buffer_id: Some(buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(buffer.offset_to_point(offset)),
status: Some(DiffHunkStatus::Added),
@@ -2241,6 +2310,7 @@ impl ReferenceMultibuffer {
text.extend(buffer.text_for_range(offset..buffer_range.end));
text.push('\n');
regions.push(ReferenceRegion {
+ buffer_id: Some(buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(buffer.offset_to_point(offset)),
status: None,
@@ -2250,6 +2320,7 @@ impl ReferenceMultibuffer {
// Remove final trailing newline.
if self.excerpts.is_empty() {
regions.push(ReferenceRegion {
+ buffer_id: None,
range: 0..1,
buffer_start: Some(Point::new(0, 0)),
status: None,
@@ -2273,6 +2344,7 @@ impl ReferenceMultibuffer {
+ text[region.range.start..ix].matches('\n').count() as u32
});
RowInfo {
+ buffer_id: region.buffer_id,
diff_status: region.status,
buffer_row,
multibuffer_row: Some(MultiBufferRow(
@@ -2293,12 +2365,7 @@ impl ReferenceMultibuffer {
let buffer = excerpt.buffer.read(cx).snapshot();
let excerpt_range = excerpt.range.to_offset(&buffer);
let buffer_id = buffer.remote_id();
- let diff = &self
- .change_sets
- .get(&buffer_id)
- .unwrap()
- .read(cx)
- .diff_to_buffer;
+ let diff = &self.diffs.get(&buffer_id).unwrap().read(cx).snapshot;
let mut hunks = diff.hunks_in_row_range(0..u32::MAX, &buffer).peekable();
excerpt.expanded_diff_hunks.retain(|hunk_anchor| {
if !hunk_anchor.is_valid(&buffer) {
@@ -2322,9 +2389,9 @@ impl ReferenceMultibuffer {
}
}
- fn add_change_set(&mut self, change_set: Entity<BufferChangeSet>, cx: &mut App) {
- let buffer_id = change_set.read(cx).buffer_id;
- self.change_sets.insert(buffer_id, change_set);
+ fn add_diff(&mut self, diff: Entity<BufferDiff>, cx: &mut App) {
+ let buffer_id = diff.read(cx).buffer_id;
+ self.diffs.insert(buffer_id, diff);
}
}
@@ -2348,6 +2415,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
buffer.update(cx, |buf, cx| {
let edit_count = rng.gen_range(1..5);
buf.randomly_edit(&mut rng, edit_count, cx);
+ log::info!("buffer text:\n{}", buf.text());
needs_diff_calculation = true;
});
cx.update(|cx| reference.diffs_updated(cx));
@@ -2440,7 +2508,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap()
..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap();
- log::info!("expanding diff hunks for excerpt {:?}", excerpt_ix);
+ log::info!(
+ "expanding diff hunks in range {:?} (excerpt id {:?}) index {excerpt_ix:?})",
+ range.to_offset(&snapshot),
+ excerpt.id
+ );
reference.expand_diff_hunks(excerpt.id, start..end, cx);
multibuffer.expand_diff_hunks(vec![range], cx);
});
@@ -2449,16 +2521,16 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
multibuffer.update(cx, |multibuffer, cx| {
for buffer in multibuffer.all_buffers() {
let snapshot = buffer.read(cx).snapshot();
- let _ = multibuffer
- .change_set_for(snapshot.remote_id())
- .unwrap()
- .update(cx, |change_set, cx| {
+ let _ = multibuffer.diff_for(snapshot.remote_id()).unwrap().update(
+ cx,
+ |diff, cx| {
log::info!(
"recalculating diff for buffer {:?}",
snapshot.remote_id(),
);
- change_set.recalculate_diff(snapshot.text, cx)
- });
+ diff.recalculate_diff_sync(snapshot.text, cx);
+ },
+ );
}
reference.diffs_updated(cx);
needs_diff_calculation = false;
@@ -2471,18 +2543,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
.collect::<String>();
let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx));
- let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx));
- change_set
- .update(cx, |change_set, cx| {
- let snapshot = buffer.read(cx).snapshot();
- change_set.set_base_text(base_text, snapshot.text, cx)
- })
- .await
- .unwrap();
+ let diff = cx.new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer, cx));
multibuffer.update(cx, |multibuffer, cx| {
- reference.add_change_set(change_set.clone(), cx);
- multibuffer.add_change_set(change_set, cx)
+ reference.add_diff(diff.clone(), cx);
+ multibuffer.add_diff(diff, cx)
});
buffers.push(buffer);
buffers.last().unwrap()
@@ -2553,12 +2618,28 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
.filter_map(|b| if b.next.is_some() { Some(b.row) } else { None })
.collect::<HashSet<_>>();
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
- let actual_diff = format_diff(&actual_text, &actual_row_infos, &actual_boundary_rows);
let (expected_text, expected_row_infos, expected_boundary_rows) =
cx.update(|cx| reference.expected_content(cx));
- let expected_diff =
- format_diff(&expected_text, &expected_row_infos, &expected_boundary_rows);
+
+ let has_diff = actual_row_infos
+ .iter()
+ .any(|info| info.diff_status.is_some())
+ || expected_row_infos
+ .iter()
+ .any(|info| info.diff_status.is_some());
+ let actual_diff = format_diff(
+ &actual_text,
+ &actual_row_infos,
+ &actual_boundary_rows,
+ Some(has_diff),
+ );
+ let expected_diff = format_diff(
+ &expected_text,
+ &expected_row_infos,
+ &expected_boundary_rows,
+ Some(has_diff),
+ );
log::info!("Multibuffer content:\n{}", actual_diff);
@@ -2569,8 +2650,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
actual_text.split('\n').count()
);
pretty_assertions::assert_eq!(actual_diff, expected_diff);
- pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos);
pretty_assertions::assert_eq!(actual_text, expected_text);
+ pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos);
for _ in 0..5 {
let start_row = rng.gen_range(0..=expected_row_infos.len());
@@ -2937,8 +3018,10 @@ fn format_diff(
text: &str,
row_infos: &Vec<RowInfo>,
boundary_rows: &HashSet<MultiBufferRow>,
+ has_diff: Option<bool>,
) -> String {
- let has_diff = row_infos.iter().any(|info| info.diff_status.is_some());
+ let has_diff =
+ has_diff.unwrap_or_else(|| row_infos.iter().any(|info| info.diff_status.is_some()));
text.split('\n')
.enumerate()
.zip(row_infos)
@@ -3002,7 +3085,7 @@ fn assert_new_snapshot(
let line_infos = new_snapshot
.row_infos(MultiBufferRow(0))
.collect::<Vec<_>>();
- let actual_diff = format_diff(&actual_text, &line_infos, &Default::default());
+ let actual_diff = format_diff(&actual_text, &line_infos, &Default::default(), None);
pretty_assertions::assert_eq!(actual_diff, expected_diff);
check_edits(
snapshot,
@@ -103,6 +103,7 @@ impl Model {
"o1" => Ok(Self::O1),
"o1-preview" => Ok(Self::O1Preview),
"o1-mini" => Ok(Self::O1Mini),
+ "o3-mini" => Ok(Self::O3Mini),
_ => Err(anyhow!("invalid model id")),
}
}
@@ -0,0 +1,17 @@
+[package]
+name = "panel"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/panel.rs"
+
+[dependencies]
+gpui.workspace = true
+ui.workspace = true
+workspace.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,66 @@
+//! # panel
+use gpui::actions;
+use ui::{prelude::*, Tab};
+
+actions!(panel, [NextPanelTab, PreviousPanelTab]);
+
+pub trait PanelHeader: workspace::Panel {
+ fn header_height(&self, cx: &mut App) -> Pixels {
+ Tab::container_height(cx)
+ }
+
+ fn panel_header_container(&self, _window: &mut Window, cx: &mut App) -> Div {
+ h_flex()
+ .h(self.header_height(cx))
+ .w_full()
+ .px_1()
+ .flex_none()
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ }
+}
+
+/// Implement this trait to enable a panel to have tabs.
+pub trait PanelTabs: PanelHeader {
+ /// Returns the index of the currently selected tab.
+ fn selected_tab(&self, cx: &mut App) -> usize;
+ /// Selects the tab at the given index.
+ fn select_tab(&self, cx: &mut App, index: usize);
+ /// Moves to the next tab.
+ fn next_tab(&self, _: NextPanelTab, cx: &mut App) -> Self;
+ /// Moves to the previous tab.
+ fn previous_tab(&self, _: PreviousPanelTab, cx: &mut App) -> Self;
+}
+
+#[derive(IntoElement)]
+pub struct PanelTab {}
+
+impl RenderOnce for PanelTab {
+ fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ div()
+ }
+}
+
+pub fn panel_button(label: impl Into<SharedString>) -> ui::Button {
+ let label = label.into();
+ let id = ElementId::Name(label.clone().to_lowercase().replace(' ', "_").into());
+ ui::Button::new(id, label)
+ .label_size(ui::LabelSize::Small)
+ .layer(ui::ElevationIndex::Surface)
+ .size(ui::ButtonSize::Compact)
+}
+
+pub fn panel_filled_button(label: impl Into<SharedString>) -> ui::Button {
+ panel_button(label).style(ui::ButtonStyle::Filled)
+}
+
+pub fn panel_icon_button(id: impl Into<SharedString>, icon: IconName) -> ui::IconButton {
+ let id = ElementId::Name(id.into());
+ ui::IconButton::new(id, icon)
+ .layer(ui::ElevationIndex::Surface)
+ .size(ui::ButtonSize::Compact)
+}
+
+pub fn panel_filled_icon_button(id: impl Into<SharedString>, icon: IconName) -> ui::IconButton {
+ panel_icon_button(id, icon).style(ui::ButtonStyle::Filled)
+}
@@ -26,6 +26,7 @@ actions!(picker, [ConfirmCompletion]);
/// ConfirmInput is an alternative editor action which - instead of selecting active picker entry - treats pickers editor input literally,
/// performing some kind of action on it.
#[derive(Clone, PartialEq, Deserialize, JsonSchema, Default)]
+#[serde(deny_unknown_fields)]
pub struct ConfirmInput {
pub secondary: bool,
}
@@ -946,7 +946,7 @@ mod tests {
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
- let message = e.to_string();
+ let message = e.to_string().replace("\\\\", "/");
assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined");
assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents");
},
@@ -30,6 +30,7 @@ async-trait.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
+diff.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
@@ -77,6 +78,7 @@ fancy-regex.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
+diff = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
fs = { workspace = true, features = ["test-support"] }
git2.workspace = true
@@ -8,9 +8,10 @@ use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegist
use anyhow::{anyhow, bail, Context as _, Result};
use client::Client;
use collections::{hash_map, HashMap, HashSet};
+use diff::{BufferDiff, BufferDiffEvent, BufferDiffSnapshot};
use fs::Fs;
use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
-use git::{blame::Blame, diff::BufferDiff, repository::RepoPath};
+use git::{blame::Blame, repository::RepoPath};
use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
};
@@ -34,18 +35,24 @@ use std::{
sync::Arc,
time::Instant,
};
-use text::{BufferId, LineEnding, Rope};
+use text::BufferId;
use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId};
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+enum DiffKind {
+ Unstaged,
+ Uncommitted,
+}
+
/// A set of open buffers.
pub struct BufferStore {
state: BufferStoreState,
#[allow(clippy::type_complexity)]
loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
#[allow(clippy::type_complexity)]
- loading_change_sets:
- HashMap<BufferId, Shared<Task<Result<Entity<BufferChangeSet>, Arc<anyhow::Error>>>>>,
+ loading_diffs:
+ HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
worktree_store: Entity<WorktreeStore>,
opened_buffers: HashMap<BufferId, OpenBuffer>,
downstream_client: Option<(AnyProtoClient, u64)>,
@@ -55,22 +62,232 @@ pub struct BufferStore {
#[derive(Hash, Eq, PartialEq, Clone)]
struct SharedBuffer {
buffer: Entity<Buffer>,
- unstaged_changes: Option<Entity<BufferChangeSet>>,
+ diff: Option<Entity<BufferDiff>>,
lsp_handle: Option<OpenLspBufferHandle>,
}
-pub struct BufferChangeSet {
- pub buffer_id: BufferId,
- pub base_text: Option<language::BufferSnapshot>,
- pub language: Option<Arc<Language>>,
- pub diff_to_buffer: git::diff::BufferDiff,
- pub recalculate_diff_task: Option<Task<Result<()>>>,
- pub diff_updated_futures: Vec<oneshot::Sender<()>>,
- pub language_registry: Option<Arc<LanguageRegistry>>,
+#[derive(Default)]
+struct BufferDiffState {
+ unstaged_diff: Option<WeakEntity<BufferDiff>>,
+ uncommitted_diff: Option<WeakEntity<BufferDiff>>,
+ recalculate_diff_task: Option<Task<Result<()>>>,
+ language: Option<Arc<Language>>,
+ language_registry: Option<Arc<LanguageRegistry>>,
+ diff_updated_futures: Vec<oneshot::Sender<()>>,
+
+ head_text: Option<Arc<String>>,
+ index_text: Option<Arc<String>>,
+ head_changed: bool,
+ index_changed: bool,
+ language_changed: bool,
}
-pub enum BufferChangeSetEvent {
- DiffChanged { changed_range: Range<text::Anchor> },
+#[derive(Clone, Debug)]
+enum DiffBasesChange {
+ SetIndex(Option<String>),
+ SetHead(Option<String>),
+ SetEach {
+ index: Option<String>,
+ head: Option<String>,
+ },
+ SetBoth(Option<String>),
+}
+
+impl BufferDiffState {
+ fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
+ self.language = buffer.read(cx).language().cloned();
+ self.language_changed = true;
+ let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
+ }
+
+ fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
+ self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
+ }
+
+ fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
+ self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
+ }
+
+ fn handle_base_texts_updated(
+ &mut self,
+ buffer: text::BufferSnapshot,
+ message: proto::UpdateDiffBases,
+ cx: &mut Context<Self>,
+ ) {
+ use proto::update_diff_bases::Mode;
+
+ let Some(mode) = Mode::from_i32(message.mode) else {
+ return;
+ };
+
+ let diff_bases_change = match mode {
+ Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
+ Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
+ Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
+ Mode::IndexAndHead => DiffBasesChange::SetEach {
+ index: message.staged_text,
+ head: message.committed_text,
+ },
+ };
+
+ let _ = self.diff_bases_changed(buffer, diff_bases_change, cx);
+ }
+
+ fn diff_bases_changed(
+ &mut self,
+ buffer: text::BufferSnapshot,
+ diff_bases_change: DiffBasesChange,
+ cx: &mut Context<Self>,
+ ) -> oneshot::Receiver<()> {
+ match diff_bases_change {
+ DiffBasesChange::SetIndex(index) => {
+ self.index_text = index.map(|mut index| {
+ text::LineEnding::normalize(&mut index);
+ Arc::new(index)
+ });
+ self.index_changed = true;
+ }
+ DiffBasesChange::SetHead(head) => {
+ self.head_text = head.map(|mut head| {
+ text::LineEnding::normalize(&mut head);
+ Arc::new(head)
+ });
+ self.head_changed = true;
+ }
+ DiffBasesChange::SetBoth(text) => {
+ let text = text.map(|mut text| {
+ text::LineEnding::normalize(&mut text);
+ Arc::new(text)
+ });
+ self.head_text = text.clone();
+ self.index_text = text;
+ self.head_changed = true;
+ self.index_changed = true;
+ }
+ DiffBasesChange::SetEach { index, head } => {
+ self.index_text = index.map(|mut index| {
+ text::LineEnding::normalize(&mut index);
+ Arc::new(index)
+ });
+ self.index_changed = true;
+ self.head_text = head.map(|mut head| {
+ text::LineEnding::normalize(&mut head);
+ Arc::new(head)
+ });
+ self.head_changed = true;
+ }
+ }
+
+ self.recalculate_diffs(buffer, cx)
+ }
+
+ fn recalculate_diffs(
+ &mut self,
+ buffer: text::BufferSnapshot,
+ cx: &mut Context<Self>,
+ ) -> oneshot::Receiver<()> {
+ let (tx, rx) = oneshot::channel();
+ self.diff_updated_futures.push(tx);
+
+ let language = self.language.clone();
+ let language_registry = self.language_registry.clone();
+ let unstaged_diff = self.unstaged_diff();
+ let uncommitted_diff = self.uncommitted_diff();
+ let head = self.head_text.clone();
+ let index = self.index_text.clone();
+ let index_changed = self.index_changed;
+ let head_changed = self.head_changed;
+ let language_changed = self.language_changed;
+ let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
+ (Some(index), Some(head)) => Arc::ptr_eq(index, head),
+ (None, None) => true,
+ _ => false,
+ };
+ self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
+ if let Some(unstaged_diff) = &unstaged_diff {
+ let snapshot = if index_changed || language_changed {
+ cx.update(|cx| {
+ BufferDiffSnapshot::build(
+ buffer.clone(),
+ index,
+ language.clone(),
+ language_registry.clone(),
+ cx,
+ )
+ })?
+ .await
+ } else {
+ unstaged_diff
+ .read_with(&cx, |changes, cx| {
+ BufferDiffSnapshot::build_with_base_buffer(
+ buffer.clone(),
+ index,
+ changes.snapshot.base_text.clone(),
+ cx,
+ )
+ })?
+ .await
+ };
+
+ unstaged_diff.update(&mut cx, |unstaged_diff, cx| {
+ unstaged_diff.set_state(snapshot, &buffer, cx);
+ if language_changed {
+ cx.emit(BufferDiffEvent::LanguageChanged);
+ }
+ })?;
+ }
+
+ if let Some(uncommitted_diff) = &uncommitted_diff {
+ let snapshot =
+ if let (Some(unstaged_diff), true) = (&unstaged_diff, index_matches_head) {
+ unstaged_diff.read_with(&cx, |diff, _| diff.snapshot.clone())?
+ } else if head_changed || language_changed {
+ cx.update(|cx| {
+ BufferDiffSnapshot::build(
+ buffer.clone(),
+ head,
+ language.clone(),
+ language_registry.clone(),
+ cx,
+ )
+ })?
+ .await
+ } else {
+ uncommitted_diff
+ .read_with(&cx, |changes, cx| {
+ BufferDiffSnapshot::build_with_base_buffer(
+ buffer.clone(),
+ head,
+ changes.snapshot.base_text.clone(),
+ cx,
+ )
+ })?
+ .await
+ };
+
+ uncommitted_diff.update(&mut cx, |diff, cx| {
+ diff.set_state(snapshot, &buffer, cx);
+ if language_changed {
+ cx.emit(BufferDiffEvent::LanguageChanged);
+ }
+ })?;
+ }
+
+ if let Some(this) = this.upgrade() {
+ this.update(&mut cx, |this, _| {
+ this.index_changed = false;
+ this.head_changed = false;
+ for tx in this.diff_updated_futures.drain(..) {
+ tx.send(()).ok();
+ }
+ })?;
+ }
+
+ Ok(())
+ }));
+
+ rx
+ }
}
enum BufferStoreState {
@@ -98,7 +315,7 @@ struct LocalBufferStore {
enum OpenBuffer {
Complete {
buffer: WeakEntity<Buffer>,
- unstaged_changes: Option<WeakEntity<BufferChangeSet>>,
+ diff_state: Entity<BufferDiffState>,
},
Operations(Vec<Operation>),
}
@@ -118,19 +335,48 @@ pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>
impl EventEmitter<BufferStoreEvent> for BufferStore {}
impl RemoteBufferStore {
- fn load_staged_text(&self, buffer_id: BufferId, cx: &App) -> Task<Result<Option<String>>> {
+ fn open_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Task<Result<Option<String>>> {
let project_id = self.project_id;
let client = self.upstream_client.clone();
cx.background_executor().spawn(async move {
- Ok(client
- .request(proto::GetStagedText {
+ let response = client
+ .request(proto::OpenUnstagedDiff {
project_id,
buffer_id: buffer_id.to_proto(),
})
- .await?
- .staged_text)
+ .await?;
+ Ok(response.staged_text)
+ })
+ }
+
+ fn open_uncommitted_diff(
+ &self,
+ buffer_id: BufferId,
+ cx: &App,
+ ) -> Task<Result<DiffBasesChange>> {
+ use proto::open_uncommitted_diff_response::Mode;
+
+ let project_id = self.project_id;
+ let client = self.upstream_client.clone();
+ cx.background_executor().spawn(async move {
+ let response = client
+ .request(proto::OpenUncommittedDiff {
+ project_id,
+ buffer_id: buffer_id.to_proto(),
+ })
+ .await?;
+ let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
+ let bases = match mode {
+ Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
+ Mode::IndexAndHead => DiffBasesChange::SetEach {
+ head: response.committed_text,
+ index: response.staged_text,
+ },
+ };
+ Ok(bases)
})
}
+
pub fn wait_for_remote_buffer(
&mut self,
id: BufferId,
@@ -398,21 +644,39 @@ impl RemoteBufferStore {
}
impl LocalBufferStore {
- fn load_staged_text(&self, buffer: &Entity<Buffer>, cx: &App) -> Task<Result<Option<String>>> {
- let Some(file) = buffer.read(cx).file() else {
- return Task::ready(Ok(None));
- };
+ fn worktree_for_buffer(
+ &self,
+ buffer: &Entity<Buffer>,
+ cx: &App,
+ ) -> Option<(Entity<Worktree>, Arc<Path>)> {
+ let file = buffer.read(cx).file()?;
let worktree_id = file.worktree_id(cx);
let path = file.path().clone();
- let Some(worktree) = self
+ let worktree = self
.worktree_store
.read(cx)
- .worktree_for_id(worktree_id, cx)
- else {
+ .worktree_for_id(worktree_id, cx)?;
+ Some((worktree, path))
+ }
+
+ fn load_staged_text(&self, buffer: &Entity<Buffer>, cx: &App) -> Task<Result<Option<String>>> {
+ if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
+ worktree.read(cx).load_staged_file(path.as_ref(), cx)
+ } else {
return Task::ready(Err(anyhow!("no such worktree")));
- };
+ }
+ }
- worktree.read(cx).load_staged_file(path.as_ref(), cx)
+ fn load_committed_text(
+ &self,
+ buffer: &Entity<Buffer>,
+ cx: &App,
+ ) -> Task<Result<Option<String>>> {
+ if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
+ worktree.read(cx).load_committed_file(path.as_ref(), cx)
+ } else {
+ Task::ready(Err(anyhow!("no such worktree")))
+ }
}
fn save_local_buffer(
@@ -526,74 +790,137 @@ impl LocalBufferStore {
) {
debug_assert!(worktree_handle.read(cx).is_local());
- let buffer_change_sets = this
- .opened_buffers
- .values()
- .filter_map(|buffer| {
- if let OpenBuffer::Complete {
- buffer,
- unstaged_changes,
- } = buffer
- {
- let buffer = buffer.upgrade()?.read(cx);
- let file = File::from_dyn(buffer.file())?;
- if file.worktree != worktree_handle {
- return None;
- }
- changed_repos
- .iter()
- .find(|(work_dir, _)| file.path.starts_with(work_dir))?;
- let unstaged_changes = unstaged_changes.as_ref()?.upgrade()?;
- let snapshot = buffer.text_snapshot();
- Some((unstaged_changes, snapshot, file.path.clone()))
- } else {
- None
- }
- })
- .collect::<Vec<_>>();
+ let mut diff_state_updates = Vec::new();
+ for buffer in this.opened_buffers.values() {
+ let OpenBuffer::Complete { buffer, diff_state } = buffer else {
+ continue;
+ };
+ let Some(buffer) = buffer.upgrade() else {
+ continue;
+ };
+ let buffer = buffer.read(cx);
+ let Some(file) = File::from_dyn(buffer.file()) else {
+ continue;
+ };
+ if file.worktree != worktree_handle {
+ continue;
+ }
+ let diff_state = diff_state.read(cx);
+ if changed_repos
+ .iter()
+ .any(|(work_dir, _)| file.path.starts_with(work_dir))
+ {
+ let snapshot = buffer.text_snapshot();
+ diff_state_updates.push((
+ snapshot.clone(),
+ file.path.clone(),
+ diff_state
+ .unstaged_diff
+ .as_ref()
+ .and_then(|set| set.upgrade())
+ .is_some(),
+ diff_state
+ .uncommitted_diff
+ .as_ref()
+ .and_then(|set| set.upgrade())
+ .is_some(),
+ ))
+ }
+ }
- if buffer_change_sets.is_empty() {
+ if diff_state_updates.is_empty() {
return;
}
cx.spawn(move |this, mut cx| async move {
let snapshot =
worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
- let diff_bases_by_buffer = cx
+ let diff_bases_changes_by_buffer = cx
.background_executor()
.spawn(async move {
- buffer_change_sets
+ diff_state_updates
.into_iter()
- .filter_map(|(change_set, buffer_snapshot, path)| {
- let local_repo = snapshot.local_repo_for_path(&path)?;
- let relative_path = local_repo.relativize(&path).ok()?;
- let base_text = local_repo.repo().load_index_text(&relative_path);
- Some((change_set, buffer_snapshot, base_text))
- })
+ .filter_map(
+ |(buffer_snapshot, path, needs_staged_text, needs_committed_text)| {
+ let local_repo = snapshot.local_repo_for_path(&path)?;
+ let relative_path = local_repo.relativize(&path).ok()?;
+ let staged_text = if needs_staged_text {
+ local_repo.repo().load_index_text(&relative_path)
+ } else {
+ None
+ };
+ let committed_text = if needs_committed_text {
+ local_repo.repo().load_committed_text(&relative_path)
+ } else {
+ None
+ };
+ let diff_bases_change =
+ match (needs_staged_text, needs_committed_text) {
+ (true, true) => Some(if staged_text == committed_text {
+ DiffBasesChange::SetBoth(committed_text)
+ } else {
+ DiffBasesChange::SetEach {
+ index: staged_text,
+ head: committed_text,
+ }
+ }),
+ (true, false) => {
+ Some(DiffBasesChange::SetIndex(staged_text))
+ }
+ (false, true) => {
+ Some(DiffBasesChange::SetHead(committed_text))
+ }
+ (false, false) => None,
+ };
+ Some((buffer_snapshot, diff_bases_change))
+ },
+ )
.collect::<Vec<_>>()
})
.await;
this.update(&mut cx, |this, cx| {
- for (change_set, buffer_snapshot, staged_text) in diff_bases_by_buffer {
- change_set.update(cx, |change_set, cx| {
- if let Some(staged_text) = staged_text.clone() {
- let _ =
- change_set.set_base_text(staged_text, buffer_snapshot.clone(), cx);
- } else {
- change_set.unset_base_text(buffer_snapshot.clone(), cx);
- }
- });
+ for (buffer_snapshot, diff_bases_change) in diff_bases_changes_by_buffer {
+ let Some(OpenBuffer::Complete { diff_state, .. }) =
+ this.opened_buffers.get_mut(&buffer_snapshot.remote_id())
+ else {
+ continue;
+ };
+ let Some(diff_bases_change) = diff_bases_change else {
+ continue;
+ };
- if let Some((client, project_id)) = &this.downstream_client.clone() {
- client
- .send(proto::UpdateDiffBase {
+ diff_state.update(cx, |diff_state, cx| {
+ use proto::update_diff_bases::Mode;
+
+ if let Some((client, project_id)) = this.downstream_client.as_ref() {
+ let buffer_id = buffer_snapshot.remote_id().to_proto();
+ let (staged_text, committed_text, mode) = match diff_bases_change
+ .clone()
+ {
+ DiffBasesChange::SetIndex(index) => (index, None, Mode::IndexOnly),
+ DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
+ DiffBasesChange::SetEach { index, head } => {
+ (index, head, Mode::IndexAndHead)
+ }
+ DiffBasesChange::SetBoth(text) => {
+ (None, text, Mode::IndexMatchesHead)
+ }
+ };
+ let message = proto::UpdateDiffBases {
project_id: *project_id,
- buffer_id: buffer_snapshot.remote_id().to_proto(),
+ buffer_id,
staged_text,
- })
- .log_err();
- }
+ committed_text,
+ mode: mode as i32,
+ };
+
+ client.send(message).log_err();
+ }
+
+ let _ =
+ diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
+ });
}
})
})
@@ -898,8 +1225,9 @@ impl BufferStore {
client.add_entity_request_handler(Self::handle_blame_buffer);
client.add_entity_request_handler(Self::handle_reload_buffers);
client.add_entity_request_handler(Self::handle_get_permalink_to_line);
- client.add_entity_request_handler(Self::handle_get_staged_text);
- client.add_entity_message_handler(Self::handle_update_diff_base);
+ client.add_entity_request_handler(Self::handle_open_unstaged_diff);
+ client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
+ client.add_entity_message_handler(Self::handle_update_diff_bases);
}
/// Creates a buffer store, optionally retaining its buffers.
@@ -920,7 +1248,7 @@ impl BufferStore {
opened_buffers: Default::default(),
shared_buffers: Default::default(),
loading_buffers: Default::default(),
- loading_change_sets: Default::default(),
+ loading_diffs: Default::default(),
worktree_store,
}
}
@@ -943,7 +1271,7 @@ impl BufferStore {
downstream_client: None,
opened_buffers: Default::default(),
loading_buffers: Default::default(),
- loading_change_sets: Default::default(),
+ loading_diffs: Default::default(),
shared_buffers: Default::default(),
worktree_store,
}
@@ -1016,30 +1344,36 @@ impl BufferStore {
.spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
- pub fn open_unstaged_changes(
+ pub fn open_unstaged_diff(
&mut self,
buffer: Entity<Buffer>,
cx: &mut Context<Self>,
- ) -> Task<Result<Entity<BufferChangeSet>>> {
+ ) -> Task<Result<Entity<BufferDiff>>> {
let buffer_id = buffer.read(cx).remote_id();
- if let Some(change_set) = self.get_unstaged_changes(buffer_id) {
- return Task::ready(Ok(change_set));
+ if let Some(diff) = self.get_unstaged_diff(buffer_id, cx) {
+ return Task::ready(Ok(diff));
}
- let task = match self.loading_change_sets.entry(buffer_id) {
+ let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) {
hash_map::Entry::Occupied(e) => e.get().clone(),
hash_map::Entry::Vacant(entry) => {
- let load = match &self.state {
+ let staged_text = match &self.state {
BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx),
- BufferStoreState::Remote(this) => this.load_staged_text(buffer_id, cx),
+ BufferStoreState::Remote(this) => this.open_unstaged_diff(buffer_id, cx),
};
entry
.insert(
cx.spawn(move |this, cx| async move {
- Self::open_unstaged_changes_internal(this, load.await, buffer, cx)
- .await
- .map_err(Arc::new)
+ Self::open_diff_internal(
+ this,
+ DiffKind::Unstaged,
+ staged_text.await.map(DiffBasesChange::SetIndex),
+ buffer,
+ cx,
+ )
+ .await
+ .map_err(Arc::new)
})
.shared(),
)
@@ -1051,53 +1385,136 @@ impl BufferStore {
.spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
- #[cfg(any(test, feature = "test-support"))]
- pub fn set_change_set(&mut self, buffer_id: BufferId, change_set: Entity<BufferChangeSet>) {
- self.loading_change_sets
- .insert(buffer_id, Task::ready(Ok(change_set)).shared());
+ pub fn open_uncommitted_diff(
+ &mut self,
+ buffer: Entity<Buffer>,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<BufferDiff>>> {
+ let buffer_id = buffer.read(cx).remote_id();
+ if let Some(diff) = self.get_uncommitted_diff(buffer_id, cx) {
+ return Task::ready(Ok(diff));
+ }
+
+ let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) {
+ hash_map::Entry::Occupied(e) => e.get().clone(),
+ hash_map::Entry::Vacant(entry) => {
+ let changes = match &self.state {
+ BufferStoreState::Local(this) => {
+ let committed_text = this.load_committed_text(&buffer, cx);
+ let staged_text = this.load_staged_text(&buffer, cx);
+ cx.background_executor().spawn(async move {
+ let committed_text = committed_text.await?;
+ let staged_text = staged_text.await?;
+ let diff_bases_change = if committed_text == staged_text {
+ DiffBasesChange::SetBoth(committed_text)
+ } else {
+ DiffBasesChange::SetEach {
+ index: staged_text,
+ head: committed_text,
+ }
+ };
+ Ok(diff_bases_change)
+ })
+ }
+ BufferStoreState::Remote(this) => this.open_uncommitted_diff(buffer_id, cx),
+ };
+
+ entry
+ .insert(
+ cx.spawn(move |this, cx| async move {
+ Self::open_diff_internal(
+ this,
+ DiffKind::Uncommitted,
+ changes.await,
+ buffer,
+ cx,
+ )
+ .await
+ .map_err(Arc::new)
+ })
+ .shared(),
+ )
+ .clone()
+ }
+ };
+
+ cx.background_executor()
+ .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
- pub async fn open_unstaged_changes_internal(
+ async fn open_diff_internal(
this: WeakEntity<Self>,
- text: Result<Option<String>>,
- buffer: Entity<Buffer>,
+ kind: DiffKind,
+ texts: Result<DiffBasesChange>,
+ buffer_entity: Entity<Buffer>,
mut cx: AsyncApp,
- ) -> Result<Entity<BufferChangeSet>> {
- let text = match text {
+ ) -> Result<Entity<BufferDiff>> {
+ let diff_bases_change = match texts {
Err(e) => {
this.update(&mut cx, |this, cx| {
- let buffer_id = buffer.read(cx).remote_id();
- this.loading_change_sets.remove(&buffer_id);
+ let buffer = buffer_entity.read(cx);
+ let buffer_id = buffer.remote_id();
+ this.loading_diffs.remove(&(buffer_id, kind));
})?;
return Err(e);
}
- Ok(text) => text,
+ Ok(change) => change,
};
- let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx)).unwrap();
-
- if let Some(text) = text {
- change_set
- .update(&mut cx, |change_set, cx| {
- let snapshot = buffer.read(cx).text_snapshot();
- change_set.set_base_text(text, snapshot, cx)
- })?
- .await
- .ok();
- }
-
this.update(&mut cx, |this, cx| {
- let buffer_id = buffer.read(cx).remote_id();
- this.loading_change_sets.remove(&buffer_id);
- if let Some(OpenBuffer::Complete {
- unstaged_changes, ..
- }) = this.opened_buffers.get_mut(&buffer.read(cx).remote_id())
+ let buffer = buffer_entity.read(cx);
+ let buffer_id = buffer.remote_id();
+ let language = buffer.language().cloned();
+ let language_registry = buffer.language_registry();
+ let text_snapshot = buffer.text_snapshot();
+ this.loading_diffs.remove(&(buffer_id, kind));
+
+ if let Some(OpenBuffer::Complete { diff_state, .. }) =
+ this.opened_buffers.get_mut(&buffer_id)
{
- *unstaged_changes = Some(change_set.downgrade());
- }
- })?;
+ diff_state.update(cx, |diff_state, cx| {
+ diff_state.language = language;
+ diff_state.language_registry = language_registry;
+
+ let diff = cx.new(|_| BufferDiff {
+ buffer_id,
+ snapshot: BufferDiffSnapshot::new(&text_snapshot),
+ unstaged_diff: None,
+ });
+ match kind {
+ DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
+ DiffKind::Uncommitted => {
+ let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
+ diff
+ } else {
+ let unstaged_diff = cx.new(|_| BufferDiff {
+ buffer_id,
+ snapshot: BufferDiffSnapshot::new(&text_snapshot),
+ unstaged_diff: None,
+ });
+ diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
+ unstaged_diff
+ };
+
+ diff.update(cx, |diff, _| {
+ diff.unstaged_diff = Some(unstaged_diff);
+ });
+ diff_state.uncommitted_diff = Some(diff.downgrade())
+ }
+ };
- Ok(change_set)
+ let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, cx);
+
+ Ok(async move {
+ rx.await.ok();
+ Ok(diff)
+ })
+ })
+ } else {
+ Err(anyhow!("buffer was closed"))
+ }
+ })??
+ .await
}
pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
@@ -1298,16 +1715,23 @@ impl BufferStore {
}
}
- fn add_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
- let remote_id = buffer.read(cx).remote_id();
- let is_remote = buffer.read(cx).replica_id() != 0;
+ fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
+ let buffer = buffer_entity.read(cx);
+ let language = buffer.language().cloned();
+ let language_registry = buffer.language_registry();
+ let remote_id = buffer.remote_id();
+ let is_remote = buffer.replica_id() != 0;
let open_buffer = OpenBuffer::Complete {
- buffer: buffer.downgrade(),
- unstaged_changes: None,
+ buffer: buffer_entity.downgrade(),
+ diff_state: cx.new(|_| BufferDiffState {
+ language,
+ language_registry,
+ ..Default::default()
+ }),
};
let handle = cx.entity().downgrade();
- buffer.update(cx, move |_, cx| {
+ buffer_entity.update(cx, move |_, cx| {
cx.on_release(move |buffer, cx| {
handle
.update(cx, |_, cx| {
@@ -1324,7 +1748,7 @@ impl BufferStore {
}
hash_map::Entry::Occupied(mut entry) => {
if let OpenBuffer::Operations(operations) = entry.get_mut() {
- buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
+ buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
} else if entry.get().upgrade().is_some() {
if is_remote {
return Ok(());
@@ -1337,8 +1761,8 @@ impl BufferStore {
}
}
- cx.subscribe(&buffer, Self::on_buffer_event).detach();
- cx.emit(BufferStoreEvent::BufferAdded(buffer));
+ cx.subscribe(&buffer_entity, Self::on_buffer_event).detach();
+ cx.emit(BufferStoreEvent::BufferAdded(buffer_entity));
Ok(())
}
@@ -1384,12 +1808,21 @@ impl BufferStore {
})
}
- pub fn get_unstaged_changes(&self, buffer_id: BufferId) -> Option<Entity<BufferChangeSet>> {
- if let OpenBuffer::Complete {
- unstaged_changes, ..
- } = self.opened_buffers.get(&buffer_id)?
- {
- unstaged_changes.as_ref()?.upgrade()
+ pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
+ if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
+ diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
+ } else {
+ None
+ }
+ }
+
+ pub fn get_uncommitted_diff(
+ &self,
+ buffer_id: BufferId,
+ cx: &App,
+ ) -> Option<Entity<BufferDiff>> {
+ if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
+ diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
} else {
None
}
@@ -1509,21 +1942,13 @@ impl BufferStore {
) -> impl Future<Output = ()> {
let mut futures = Vec::new();
for buffer in buffers {
- let buffer = buffer.read(cx).text_snapshot();
- if let Some(OpenBuffer::Complete {
- unstaged_changes, ..
- }) = self.opened_buffers.get_mut(&buffer.remote_id())
+ if let Some(OpenBuffer::Complete { diff_state, .. }) =
+ self.opened_buffers.get_mut(&buffer.read(cx).remote_id())
{
- if let Some(unstaged_changes) = unstaged_changes
- .as_ref()
- .and_then(|changes| changes.upgrade())
- {
- unstaged_changes.update(cx, |unstaged_changes, cx| {
- futures.push(unstaged_changes.recalculate_diff(buffer.clone(), cx));
- });
- } else {
- unstaged_changes.take();
- }
+ let buffer = buffer.read(cx).text_snapshot();
+ futures.push(diff_state.update(cx, |diff_state, cx| {
+ diff_state.recalculate_diffs(buffer, cx)
+ }));
}
}
async move {
@@ -1558,6 +1983,16 @@ impl BufferStore {
})
.log_err();
}
+ BufferEvent::LanguageChanged => {
+ let buffer_id = buffer.read(cx).remote_id();
+ if let Some(OpenBuffer::Complete { diff_state, .. }) =
+ self.opened_buffers.get(&buffer_id)
+ {
+ diff_state.update(cx, |diff_state, cx| {
+ diff_state.buffer_language_changed(buffer, cx);
+ });
+ }
+ }
_ => {}
}
}
@@ -1632,7 +2067,7 @@ impl BufferStore {
.entry(buffer_id)
.or_insert_with(|| SharedBuffer {
buffer: buffer.clone(),
- unstaged_changes: None,
+ diff: None,
lsp_handle: None,
});
@@ -1937,16 +2372,16 @@ impl BufferStore {
})
}
- pub async fn handle_get_staged_text(
+ pub async fn handle_open_unstaged_diff(
this: Entity<Self>,
- request: TypedEnvelope<proto::GetStagedText>,
+ request: TypedEnvelope<proto::OpenUnstagedDiff>,
mut cx: AsyncApp,
- ) -> Result<proto::GetStagedTextResponse> {
+ ) -> Result<proto::OpenUnstagedDiffResponse> {
let buffer_id = BufferId::new(request.payload.buffer_id)?;
- let change_set = this
+ let diff = this
.update(&mut cx, |this, cx| {
let buffer = this.get(buffer_id)?;
- Some(this.open_unstaged_changes(buffer, cx))
+ Some(this.open_unstaged_diff(buffer, cx))
})?
.ok_or_else(|| anyhow!("no such buffer"))?
.await?;
@@ -1957,43 +2392,95 @@ impl BufferStore {
.or_default();
debug_assert!(shared_buffers.contains_key(&buffer_id));
if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
- shared.unstaged_changes = Some(change_set.clone());
+ shared.diff = Some(diff.clone());
}
})?;
- let staged_text = change_set.read_with(&cx, |change_set, _| {
- change_set.base_text.as_ref().map(|buffer| buffer.text())
+ let staged_text = diff.read_with(&cx, |diff, _| {
+ diff.snapshot.base_text.as_ref().map(|buffer| buffer.text())
})?;
- Ok(proto::GetStagedTextResponse { staged_text })
+ Ok(proto::OpenUnstagedDiffResponse { staged_text })
}
- pub async fn handle_update_diff_base(
+ pub async fn handle_open_uncommitted_diff(
this: Entity<Self>,
- request: TypedEnvelope<proto::UpdateDiffBase>,
+ request: TypedEnvelope<proto::OpenUncommittedDiff>,
mut cx: AsyncApp,
- ) -> Result<()> {
+ ) -> Result<proto::OpenUncommittedDiffResponse> {
let buffer_id = BufferId::new(request.payload.buffer_id)?;
- let Some((buffer, change_set)) = this.update(&mut cx, |this, _| {
- if let OpenBuffer::Complete {
- unstaged_changes,
- buffer,
- } = this.opened_buffers.get(&buffer_id)?
- {
- Some((buffer.upgrade()?, unstaged_changes.as_ref()?.upgrade()?))
- } else {
- None
+ let diff = this
+ .update(&mut cx, |this, cx| {
+ let buffer = this.get(buffer_id)?;
+ Some(this.open_uncommitted_diff(buffer, cx))
+ })?
+ .ok_or_else(|| anyhow!("no such buffer"))?
+ .await?;
+ this.update(&mut cx, |this, _| {
+ let shared_buffers = this
+ .shared_buffers
+ .entry(request.original_sender_id.unwrap_or(request.sender_id))
+ .or_default();
+ debug_assert!(shared_buffers.contains_key(&buffer_id));
+ if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
+ shared.diff = Some(diff.clone());
}
- })?
- else {
- return Ok(());
- };
- change_set.update(&mut cx, |change_set, cx| {
- if let Some(staged_text) = request.payload.staged_text {
- let _ = change_set.set_base_text(staged_text, buffer.read(cx).text_snapshot(), cx);
+ })?;
+ diff.read_with(&cx, |diff, cx| {
+ use proto::open_uncommitted_diff_response::Mode;
+
+ let staged_buffer = diff
+ .unstaged_diff
+ .as_ref()
+ .and_then(|diff| diff.read(cx).snapshot.base_text.as_ref());
+
+ let mode;
+ let staged_text;
+ let committed_text;
+ if let Some(committed_buffer) = &diff.snapshot.base_text {
+ committed_text = Some(committed_buffer.text());
+ if let Some(staged_buffer) = staged_buffer {
+ if staged_buffer.remote_id() == committed_buffer.remote_id() {
+ mode = Mode::IndexMatchesHead;
+ staged_text = None;
+ } else {
+ mode = Mode::IndexAndHead;
+ staged_text = Some(staged_buffer.text());
+ }
+ } else {
+ mode = Mode::IndexAndHead;
+ staged_text = None;
+ }
} else {
- change_set.unset_base_text(buffer.read(cx).text_snapshot(), cx)
+ mode = Mode::IndexAndHead;
+ committed_text = None;
+ staged_text = staged_buffer.as_ref().map(|buffer| buffer.text());
}
- })?;
- Ok(())
+
+ proto::OpenUncommittedDiffResponse {
+ committed_text,
+ staged_text,
+ mode: mode.into(),
+ }
+ })
+ }
+
+ pub async fn handle_update_diff_bases(
+ this: Entity<Self>,
+ request: TypedEnvelope<proto::UpdateDiffBases>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ let buffer_id = BufferId::new(request.payload.buffer_id)?;
+ this.update(&mut cx, |this, cx| {
+ if let Some(OpenBuffer::Complete { diff_state, buffer }) =
+ this.opened_buffers.get_mut(&buffer_id)
+ {
+ if let Some(buffer) = buffer.upgrade() {
+ let buffer = buffer.read(cx).text_snapshot();
+ diff_state.update(cx, |diff_state, cx| {
+ diff_state.handle_base_texts_updated(buffer, request.payload, cx);
+ })
+ }
+ }
+ })
}
pub fn reload_buffers(
@@ -1,6 +1,7 @@
+use crate::buffer_store::BufferStore;
use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent};
use crate::{Project, ProjectPath};
-use anyhow::{anyhow, Context as _};
+use anyhow::Context as _;
use client::ProjectId;
use futures::channel::{mpsc, oneshot};
use futures::StreamExt as _;
@@ -8,24 +9,29 @@ use git::{
repository::{GitRepository, RepoPath},
status::{GitSummary, TrackedSummary},
};
-use gpui::{App, Context, Entity, EventEmitter, SharedString, Subscription, WeakEntity};
+use gpui::{
+ App, AppContext, Context, Entity, EventEmitter, SharedString, Subscription, Task, WeakEntity,
+};
+use language::{Buffer, LanguageRegistry};
use rpc::{proto, AnyProtoClient};
use settings::WorktreeId;
+use std::path::Path;
use std::sync::Arc;
+use text::BufferId;
use util::{maybe, ResultExt};
use worktree::{ProjectEntryId, RepositoryEntry, StatusEntry};
pub struct GitState {
project_id: Option<ProjectId>,
client: Option<AnyProtoClient>,
- repositories: Vec<RepositoryHandle>,
+ repositories: Vec<Entity<Repository>>,
active_index: Option<usize>,
update_sender: mpsc::UnboundedSender<(Message, oneshot::Sender<anyhow::Result<()>>)>,
_subscription: Subscription,
}
-#[derive(Clone)]
-pub struct RepositoryHandle {
+pub struct Repository {
+ commit_message_buffer: Option<Entity<Buffer>>,
git_state: WeakEntity<GitState>,
pub worktree_id: WorktreeId,
pub repository_entry: RepositoryEntry,
@@ -44,36 +50,23 @@ pub enum GitRepo {
},
}
-impl PartialEq<Self> for RepositoryHandle {
- fn eq(&self, other: &Self) -> bool {
- self.worktree_id == other.worktree_id
- && self.repository_entry.work_directory_id()
- == other.repository_entry.work_directory_id()
- }
-}
-
-impl Eq for RepositoryHandle {}
-
-impl PartialEq<RepositoryEntry> for RepositoryHandle {
- fn eq(&self, other: &RepositoryEntry) -> bool {
- self.repository_entry.work_directory_id() == other.work_directory_id()
- }
-}
-
enum Message {
Commit {
git_repo: GitRepo,
+ message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
},
Stage(GitRepo, Vec<RepoPath>),
Unstage(GitRepo, Vec<RepoPath>),
}
-pub enum Event {
- RepositoriesUpdated,
+pub enum GitEvent {
+ ActiveRepositoryChanged,
+ FileSystemUpdated,
+ GitStateUpdated,
}
-impl EventEmitter<Event> for GitState {}
+impl EventEmitter<GitEvent> for GitState {}
impl GitState {
pub fn new(
@@ -95,7 +88,7 @@ impl GitState {
}
}
- pub fn active_repository(&self) -> Option<RepositoryHandle> {
+ pub fn active_repository(&self) -> Option<Entity<Repository>> {
self.active_index
.map(|index| self.repositories[index].clone())
}
@@ -103,7 +96,7 @@ impl GitState {
fn on_worktree_store_event(
&mut self,
worktree_store: Entity<WorktreeStore>,
- _event: &WorktreeStoreEvent,
+ event: &WorktreeStoreEvent,
cx: &mut Context<'_, Self>,
) {
// TODO inspect the event
@@ -116,7 +109,7 @@ impl GitState {
worktree_store.update(cx, |worktree_store, cx| {
for worktree in worktree_store.worktrees() {
- worktree.update(cx, |worktree, _| {
+ worktree.update(cx, |worktree, cx| {
let snapshot = worktree.snapshot();
for repo in snapshot.repositories().iter() {
let git_repo = worktree
@@ -137,27 +130,34 @@ impl GitState {
let Some(git_repo) = git_repo else {
continue;
};
- let existing = self
- .repositories
- .iter()
- .enumerate()
- .find(|(_, existing_handle)| existing_handle == &repo);
+ let worktree_id = worktree.id();
+ let existing =
+ self.repositories
+ .iter()
+ .enumerate()
+ .find(|(_, existing_handle)| {
+ existing_handle.read(cx).id()
+ == (worktree_id, repo.work_directory_id())
+ });
let handle = if let Some((index, handle)) = existing {
if self.active_index == Some(index) {
new_active_index = Some(new_repositories.len());
}
// Update the statuses but keep everything else.
- let mut existing_handle = handle.clone();
- existing_handle.repository_entry = repo.clone();
+ let existing_handle = handle.clone();
+ existing_handle.update(cx, |existing_handle, _| {
+ existing_handle.repository_entry = repo.clone();
+ });
existing_handle
} else {
- RepositoryHandle {
+ cx.new(|_| Repository {
git_state: this.clone(),
- worktree_id: worktree.id(),
+ worktree_id,
repository_entry: repo.clone(),
git_repo,
update_sender: self.update_sender.clone(),
- }
+ commit_message_buffer: None,
+ })
};
new_repositories.push(handle);
}
@@ -172,10 +172,17 @@ impl GitState {
self.repositories = new_repositories;
self.active_index = new_active_index;
- cx.emit(Event::RepositoriesUpdated);
+ match event {
+ WorktreeStoreEvent::WorktreeUpdatedGitRepositories(_) => {
+ cx.emit(GitEvent::GitStateUpdated);
+ }
+ _ => {
+ cx.emit(GitEvent::FileSystemUpdated);
+ }
+ }
}
- pub fn all_repositories(&self) -> Vec<RepositoryHandle> {
+ pub fn all_repositories(&self) -> Vec<Entity<Repository>> {
self.repositories.clone()
}
@@ -251,10 +258,12 @@ impl GitState {
}
Message::Commit {
git_repo,
+ message,
name_and_email,
} => {
match git_repo {
GitRepo::Local(repo) => repo.commit(
+ message.as_ref(),
name_and_email
.as_ref()
.map(|(name, email)| (name.as_ref(), email.as_ref())),
@@ -271,6 +280,7 @@ impl GitState {
project_id: project_id.0,
worktree_id: worktree_id.to_proto(),
work_directory_id: work_directory_id.to_proto(),
+ message: String::from(message),
name: name.map(String::from),
email: email.map(String::from),
})
@@ -284,7 +294,11 @@ impl GitState {
}
}
-impl RepositoryHandle {
+impl Repository {
+ fn id(&self) -> (WorktreeId, ProjectEntryId) {
+ (self.worktree_id, self.repository_entry.work_directory_id())
+ }
+
pub fn display_name(&self, project: &Project, cx: &App) -> SharedString {
maybe!({
let path = self.repo_path_to_project_path(&"".into())?;
@@ -300,21 +314,21 @@ impl RepositoryHandle {
.unwrap_or("".into())
}
- pub fn activate(&self, cx: &mut App) {
+ pub fn activate(&self, cx: &mut Context<Self>) {
let Some(git_state) = self.git_state.upgrade() else {
return;
};
+ let entity = cx.entity();
git_state.update(cx, |git_state, cx| {
- let Some((index, _)) = git_state
+ let Some(index) = git_state
.repositories
.iter()
- .enumerate()
- .find(|(_, handle)| handle == &self)
+ .position(|handle| *handle == entity)
else {
return;
};
git_state.active_index = Some(index);
- cx.emit(Event::RepositoriesUpdated);
+ cx.emit(GitEvent::ActiveRepositoryChanged);
});
}
@@ -322,59 +336,147 @@ impl RepositoryHandle {
self.repository_entry.status()
}
+ pub fn has_conflict(&self, path: &RepoPath) -> bool {
+ self.repository_entry
+ .current_merge_conflicts
+ .contains(&path)
+ }
+
pub fn repo_path_to_project_path(&self, path: &RepoPath) -> Option<ProjectPath> {
let path = self.repository_entry.unrelativize(path)?;
Some((self.worktree_id, path).into())
}
pub fn project_path_to_repo_path(&self, path: &ProjectPath) -> Option<RepoPath> {
- if path.worktree_id != self.worktree_id {
+ self.worktree_id_path_to_repo_path(path.worktree_id, &path.path)
+ }
+
+ pub fn worktree_id_path_to_repo_path(
+ &self,
+ worktree_id: WorktreeId,
+ path: &Path,
+ ) -> Option<RepoPath> {
+ if worktree_id != self.worktree_id {
return None;
}
- self.repository_entry.relativize(&path.path).log_err()
+ self.repository_entry.relativize(path).log_err()
}
- pub async fn stage_entries(&self, entries: Vec<RepoPath>) -> anyhow::Result<()> {
- if entries.is_empty() {
- return Ok(());
+ pub fn open_commit_buffer(
+ &mut self,
+ languages: Option<Arc<LanguageRegistry>>,
+ buffer_store: Entity<BufferStore>,
+ cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<Entity<Buffer>>> {
+ if let Some(buffer) = self.commit_message_buffer.clone() {
+ return Task::ready(Ok(buffer));
+ }
+
+ if let GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } = self.git_repo.clone()
+ {
+ let client = client.clone();
+ cx.spawn(|repository, mut cx| async move {
+ let request = client.request(proto::OpenCommitMessageBuffer {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ });
+ let response = request.await.context("requesting to open commit buffer")?;
+ let buffer_id = BufferId::new(response.buffer_id)?;
+ let buffer = buffer_store
+ .update(&mut cx, |buffer_store, cx| {
+ buffer_store.wait_for_remote_buffer(buffer_id, cx)
+ })?
+ .await?;
+ if let Some(language_registry) = languages {
+ let git_commit_language =
+ language_registry.language_for_name("Git Commit").await?;
+ buffer.update(&mut cx, |buffer, cx| {
+ buffer.set_language(Some(git_commit_language), cx);
+ })?;
+ }
+ repository.update(&mut cx, |repository, _| {
+ repository.commit_message_buffer = Some(buffer.clone());
+ })?;
+ Ok(buffer)
+ })
+ } else {
+ self.open_local_commit_buffer(languages, buffer_store, cx)
}
+ }
+
+ fn open_local_commit_buffer(
+ &mut self,
+ language_registry: Option<Arc<LanguageRegistry>>,
+ buffer_store: Entity<BufferStore>,
+ cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<Entity<Buffer>>> {
+ cx.spawn(|repository, mut cx| async move {
+ let buffer = buffer_store
+ .update(&mut cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
+ .await?;
+
+ if let Some(language_registry) = language_registry {
+ let git_commit_language = language_registry.language_for_name("Git Commit").await?;
+ buffer.update(&mut cx, |buffer, cx| {
+ buffer.set_language(Some(git_commit_language), cx);
+ })?;
+ }
+
+ repository.update(&mut cx, |repository, _| {
+ repository.commit_message_buffer = Some(buffer.clone());
+ })?;
+ Ok(buffer)
+ })
+ }
+
+ pub fn stage_entries(&self, entries: Vec<RepoPath>) -> oneshot::Receiver<anyhow::Result<()>> {
let (result_tx, result_rx) = futures::channel::oneshot::channel();
+ if entries.is_empty() {
+ result_tx.send(Ok(())).ok();
+ return result_rx;
+ }
self.update_sender
.unbounded_send((Message::Stage(self.git_repo.clone(), entries), result_tx))
- .map_err(|_| anyhow!("Failed to submit stage operation"))?;
-
- result_rx.await?
+ .ok();
+ result_rx
}
- pub async fn unstage_entries(&self, entries: Vec<RepoPath>) -> anyhow::Result<()> {
+ pub fn unstage_entries(&self, entries: Vec<RepoPath>) -> oneshot::Receiver<anyhow::Result<()>> {
+ let (result_tx, result_rx) = futures::channel::oneshot::channel();
if entries.is_empty() {
- return Ok(());
+ result_tx.send(Ok(())).ok();
+ return result_rx;
}
- let (result_tx, result_rx) = futures::channel::oneshot::channel();
self.update_sender
.unbounded_send((Message::Unstage(self.git_repo.clone(), entries), result_tx))
- .map_err(|_| anyhow!("Failed to submit unstage operation"))?;
- result_rx.await?
+ .ok();
+ result_rx
}
- pub async fn stage_all(&self) -> anyhow::Result<()> {
+ pub fn stage_all(&self) -> oneshot::Receiver<anyhow::Result<()>> {
let to_stage = self
.repository_entry
.status()
.filter(|entry| !entry.status.is_staged().unwrap_or(false))
.map(|entry| entry.repo_path.clone())
.collect();
- self.stage_entries(to_stage).await
+ self.stage_entries(to_stage)
}
- pub async fn unstage_all(&self) -> anyhow::Result<()> {
+ pub fn unstage_all(&self) -> oneshot::Receiver<anyhow::Result<()>> {
let to_unstage = self
.repository_entry
.status()
.filter(|entry| entry.status.is_staged().unwrap_or(true))
.map(|entry| entry.repo_path.clone())
.collect();
- self.unstage_entries(to_unstage).await
+ self.unstage_entries(to_unstage)
}
/// Get a count of all entries in the active repository, including
@@ -395,18 +497,22 @@ impl RepositoryHandle {
return self.have_changes() && (commit_all || self.have_staged_changes());
}
- pub async fn commit(
+ pub fn commit(
&self,
+ message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
- ) -> anyhow::Result<()> {
+ ) -> oneshot::Receiver<anyhow::Result<()>> {
let (result_tx, result_rx) = futures::channel::oneshot::channel();
- self.update_sender.unbounded_send((
- Message::Commit {
- git_repo: self.git_repo.clone(),
- name_and_email,
- },
- result_tx,
- ))?;
- result_rx.await?
+ self.update_sender
+ .unbounded_send((
+ Message::Commit {
+ git_repo: self.git_repo.clone(),
+ message,
+ name_and_email,
+ },
+ result_tx,
+ ))
+ .ok();
+ result_rx
}
}
@@ -2,12 +2,15 @@ use crate::{
worktree_store::{WorktreeStore, WorktreeStoreEvent},
Project, ProjectEntryId, ProjectItem, ProjectPath,
};
-use anyhow::{Context as _, Result};
+use anyhow::{anyhow, Context as _, Result};
use collections::{hash_map, HashMap, HashSet};
use futures::{channel::oneshot, StreamExt};
use gpui::{
- hash, prelude::*, App, Context, Entity, EventEmitter, Img, Subscription, Task, WeakEntity,
+ hash, prelude::*, App, AsyncApp, Context, Entity, EventEmitter, Img, Subscription, Task,
+ WeakEntity,
};
+pub use image::ImageFormat;
+use image::{ExtendedColorType, GenericImageView, ImageReader};
use language::{DiskState, File};
use rpc::{AnyProtoClient, ErrorExt as _};
use std::ffi::OsStr;
@@ -32,10 +35,12 @@ impl From<NonZeroU64> for ImageId {
}
}
+#[derive(Debug)]
pub enum ImageItemEvent {
ReloadNeeded,
Reloaded,
FileHandleChanged,
+ MetadataUpdated,
}
impl EventEmitter<ImageItemEvent> for ImageItem {}
@@ -46,14 +51,106 @@ pub enum ImageStoreEvent {
impl EventEmitter<ImageStoreEvent> for ImageStore {}
+#[derive(Debug, Clone, Copy)]
+pub struct ImageMetadata {
+ pub width: u32,
+ pub height: u32,
+ pub file_size: u64,
+ pub colors: Option<ImageColorInfo>,
+ pub format: ImageFormat,
+}
+
+#[derive(Debug, Clone, Copy)]
+pub struct ImageColorInfo {
+ pub channels: u8,
+ pub bits_per_channel: u8,
+}
+
+impl ImageColorInfo {
+ pub fn from_color_type(color_type: impl Into<ExtendedColorType>) -> Option<Self> {
+ let (channels, bits_per_channel) = match color_type.into() {
+ ExtendedColorType::L8 => (1, 8),
+ ExtendedColorType::L16 => (1, 16),
+ ExtendedColorType::La8 => (2, 8),
+ ExtendedColorType::La16 => (2, 16),
+ ExtendedColorType::Rgb8 => (3, 8),
+ ExtendedColorType::Rgb16 => (3, 16),
+ ExtendedColorType::Rgba8 => (4, 8),
+ ExtendedColorType::Rgba16 => (4, 16),
+ ExtendedColorType::A8 => (1, 8),
+ ExtendedColorType::Bgr8 => (3, 8),
+ ExtendedColorType::Bgra8 => (4, 8),
+ ExtendedColorType::Cmyk8 => (4, 8),
+ _ => return None,
+ };
+
+ Some(Self {
+ channels,
+ bits_per_channel,
+ })
+ }
+
+ pub const fn bits_per_pixel(&self) -> u8 {
+ self.channels * self.bits_per_channel
+ }
+}
+
pub struct ImageItem {
pub id: ImageId,
pub file: Arc<dyn File>,
pub image: Arc<gpui::Image>,
reload_task: Option<Task<()>>,
+ pub image_metadata: Option<ImageMetadata>,
}
impl ImageItem {
+ pub async fn load_image_metadata(
+ image: Entity<ImageItem>,
+ project: Entity<Project>,
+ cx: &mut AsyncApp,
+ ) -> Result<ImageMetadata> {
+ let (fs, image_path) = cx.update(|cx| {
+ let project_path = image.read(cx).project_path(cx);
+
+ let worktree = project
+ .read(cx)
+ .worktree_for_id(project_path.worktree_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))?;
+ let worktree_root = worktree.read(cx).abs_path();
+ let image_path = image.read(cx).path();
+ let image_path = if image_path.is_absolute() {
+ image_path.to_path_buf()
+ } else {
+ worktree_root.join(image_path)
+ };
+
+ let fs = project.read(cx).fs().clone();
+
+ anyhow::Ok((fs, image_path))
+ })??;
+
+ let image_bytes = fs.load_bytes(&image_path).await?;
+ let image_format = image::guess_format(&image_bytes)?;
+
+ let mut image_reader = ImageReader::new(std::io::Cursor::new(image_bytes));
+ image_reader.set_format(image_format);
+ let image = image_reader.decode()?;
+
+ let (width, height) = image.dimensions();
+ let file_metadata = fs
+ .metadata(image_path.as_path())
+ .await?
+ .ok_or_else(|| anyhow!("failed to load image metadata"))?;
+
+ Ok(ImageMetadata {
+ width,
+ height,
+ file_size: file_metadata.len,
+ format: image_format,
+ colors: ImageColorInfo::from_color_type(image.color()),
+ })
+ }
+
pub fn project_path(&self, cx: &App) -> ProjectPath {
ProjectPath {
worktree_id: self.file.worktree_id(cx),
@@ -391,6 +488,7 @@ impl ImageStoreImpl for Entity<LocalImageStore> {
id: cx.entity_id().as_non_zero_u64().into(),
file: file.clone(),
image,
+ image_metadata: None,
reload_task: None,
})?;
@@ -299,28 +299,27 @@ impl LspCommand for PrepareRename {
_: LanguageServerId,
mut cx: AsyncApp,
) -> Result<PrepareRenameResponse> {
- buffer.update(&mut cx, |buffer, _| {
- match message {
- Some(lsp::PrepareRenameResponse::Range(range))
- | Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { range, .. }) => {
- let Range { start, end } = range_from_lsp(range);
- if buffer.clip_point_utf16(start, Bias::Left) == start.0
- && buffer.clip_point_utf16(end, Bias::Left) == end.0
- {
- Ok(PrepareRenameResponse::Success(
- buffer.anchor_after(start)..buffer.anchor_before(end),
- ))
- } else {
- Ok(PrepareRenameResponse::InvalidPosition)
- }
- }
- Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => {
- Err(anyhow!("Invalid for language server to send a `defaultBehavior` response to `prepareRename`"))
- }
- None => {
+ buffer.update(&mut cx, |buffer, _| match message {
+ Some(lsp::PrepareRenameResponse::Range(range))
+ | Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { range, .. }) => {
+ let Range { start, end } = range_from_lsp(range);
+ if buffer.clip_point_utf16(start, Bias::Left) == start.0
+ && buffer.clip_point_utf16(end, Bias::Left) == end.0
+ {
+ Ok(PrepareRenameResponse::Success(
+ buffer.anchor_after(start)..buffer.anchor_before(end),
+ ))
+ } else {
Ok(PrepareRenameResponse::InvalidPosition)
}
}
+ Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => {
+ let snapshot = buffer.snapshot();
+ let (range, _) = snapshot.surrounding_word(self.position);
+ let range = snapshot.anchor_after(range.start)..snapshot.anchor_before(range.end);
+ Ok(PrepareRenameResponse::Success(range))
+ }
+ None => Ok(PrepareRenameResponse::InvalidPosition),
})?
}
@@ -166,6 +166,19 @@ pub struct LocalLspStore {
}
impl LocalLspStore {
+ /// Returns the running language server for the given ID. Note if the language server is starting, it will not be returned.
+ pub fn running_language_server_for_id(
+ &self,
+ id: LanguageServerId,
+ ) -> Option<&Arc<LanguageServer>> {
+ let language_server_state = self.language_servers.get(&id)?;
+
+ match language_server_state {
+ LanguageServerState::Running { server, .. } => Some(server),
+ LanguageServerState::Starting(_) => None,
+ }
+ }
+
fn start_language_server(
&mut self,
worktree_handle: &Entity<Worktree>,
@@ -1964,7 +1977,12 @@ impl LocalLspStore {
Some(local) => local.abs_path(cx),
None => return,
};
- let file_url = lsp::Url::from_file_path(old_path).unwrap();
+ let file_url = lsp::Url::from_file_path(old_path.as_path()).unwrap_or_else(|_| {
+ panic!(
+ "`{}` is not parseable as an URI",
+ old_path.to_string_lossy()
+ )
+ });
self.unregister_buffer_from_language_servers(buffer, file_url, cx);
}
@@ -7366,10 +7384,6 @@ impl LspStore {
for diagnostic in ¶ms.diagnostics {
let source = diagnostic.source.as_ref();
- let code = diagnostic.code.as_ref().map(|code| match code {
- lsp::NumberOrString::Number(code) => code.to_string(),
- lsp::NumberOrString::String(code) => code.clone(),
- });
let range = range_from_lsp(diagnostic.range);
let is_supporting = diagnostic
.related_information
@@ -7378,7 +7392,7 @@ impl LspStore {
infos.iter().any(|info| {
primary_diagnostic_group_ids.contains_key(&(
source,
- code.clone(),
+ diagnostic.code.clone(),
range_from_lsp(info.location.range),
))
})
@@ -7390,7 +7404,7 @@ impl LspStore {
if is_supporting {
supporting_diagnostics.insert(
- (source, code.clone(), range),
+ (source, diagnostic.code.clone(), range),
(diagnostic.severity, is_unnecessary),
);
} else {
@@ -7400,13 +7414,13 @@ impl LspStore {
sources_by_group_id.insert(group_id, source);
primary_diagnostic_group_ids
- .insert((source, code.clone(), range.clone()), group_id);
+ .insert((source, diagnostic.code.clone(), range.clone()), group_id);
diagnostics.push(DiagnosticEntry {
range,
diagnostic: Diagnostic {
source: diagnostic.source.clone(),
- code: code.clone(),
+ code: diagnostic.code.clone(),
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
message: diagnostic.message.trim().to_string(),
group_id,
@@ -7424,7 +7438,7 @@ impl LspStore {
range,
diagnostic: Diagnostic {
source: diagnostic.source.clone(),
- code: code.clone(),
+ code: diagnostic.code.clone(),
severity: DiagnosticSeverity::INFORMATION,
message: info.message.trim().to_string(),
group_id,
@@ -21,14 +21,15 @@ mod project_tests;
mod direnv;
mod environment;
+use diff::BufferDiff;
pub use environment::EnvironmentErrorMessage;
-use git::RepositoryHandle;
+use git::Repository;
pub mod search_history;
mod yarn;
use crate::git::GitState;
use anyhow::{anyhow, Context as _, Result};
-use buffer_store::{BufferChangeSet, BufferStore, BufferStoreEvent};
+use buffer_store::{BufferStore, BufferStoreEvent};
use client::{
proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore,
};
@@ -48,7 +49,6 @@ use ::git::{
blame::Blame,
repository::{Branch, GitRepository, RepoPath},
status::FileStatus,
- COMMIT_MESSAGE,
};
use gpui::{
AnyEntity, App, AppContext as _, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter,
@@ -1956,17 +1956,31 @@ impl Project {
})
}
- pub fn open_unstaged_changes(
+ pub fn open_unstaged_diff(
&mut self,
buffer: Entity<Buffer>,
cx: &mut Context<Self>,
- ) -> Task<Result<Entity<BufferChangeSet>>> {
+ ) -> Task<Result<Entity<BufferDiff>>> {
if self.is_disconnected(cx) {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.open_unstaged_changes(buffer, cx)
+ buffer_store.open_unstaged_diff(buffer, cx)
+ })
+ }
+
+ pub fn open_uncommitted_diff(
+ &mut self,
+ buffer: Entity<Buffer>,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<BufferDiff>>> {
+ if self.is_disconnected(cx) {
+ return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
+ }
+
+ self.buffer_store.update(cx, |buffer_store, cx| {
+ buffer_store.open_uncommitted_diff(buffer, cx)
})
}
@@ -1984,12 +1998,15 @@ impl Project {
project_id,
id: id.into(),
});
- cx.spawn(move |this, mut cx| async move {
+ cx.spawn(move |project, mut cx| async move {
let buffer_id = BufferId::new(request.await?.buffer_id)?;
- this.update(&mut cx, |this, cx| {
- this.wait_for_remote_buffer(buffer_id, cx)
- })?
- .await
+ project
+ .update(&mut cx, |project, cx| {
+ project.buffer_store.update(cx, |buffer_store, cx| {
+ buffer_store.wait_for_remote_buffer(buffer_id, cx)
+ })
+ })?
+ .await
})
} else {
Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
@@ -2058,8 +2075,25 @@ impl Project {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
- self.image_store.update(cx, |image_store, cx| {
+ let open_image_task = self.image_store.update(cx, |image_store, cx| {
image_store.open_image(path.into(), cx)
+ });
+
+ let weak_project = cx.entity().downgrade();
+ cx.spawn(move |_, mut cx| async move {
+ let image_item = open_image_task.await?;
+ let project = weak_project
+ .upgrade()
+ .ok_or_else(|| anyhow!("Project dropped"))?;
+
+ let metadata =
+ ImageItem::load_image_metadata(image_item.clone(), project, &mut cx).await?;
+ image_item.update(&mut cx, |image_item, cx| {
+ image_item.image_metadata = Some(metadata);
+ cx.emit(ImageItemEvent::MetadataUpdated);
+ })?;
+
+ Ok(image_item)
})
}
@@ -2832,16 +2866,21 @@ impl Project {
let proto_client = ssh_client.read(cx).proto_client();
- cx.spawn(|this, mut cx| async move {
+ cx.spawn(|project, mut cx| async move {
let buffer = proto_client
.request(proto::OpenServerSettings {
project_id: SSH_PROJECT_ID,
})
.await?;
- let buffer = this
- .update(&mut cx, |this, cx| {
- anyhow::Ok(this.wait_for_remote_buffer(BufferId::new(buffer.buffer_id)?, cx))
+ let buffer = project
+ .update(&mut cx, |project, cx| {
+ project.buffer_store.update(cx, |buffer_store, cx| {
+ anyhow::Ok(
+ buffer_store
+ .wait_for_remote_buffer(BufferId::new(buffer.buffer_id)?, cx),
+ )
+ })
})??
.await;
@@ -3172,13 +3211,15 @@ impl Project {
});
let guard = self.retain_remotely_created_models(cx);
- cx.spawn(move |this, mut cx| async move {
+ cx.spawn(move |project, mut cx| async move {
let response = request.await?;
for buffer_id in response.buffer_ids {
let buffer_id = BufferId::new(buffer_id)?;
- let buffer = this
- .update(&mut cx, |this, cx| {
- this.wait_for_remote_buffer(buffer_id, cx)
+ let buffer = project
+ .update(&mut cx, |project, cx| {
+ project.buffer_store.update(cx, |buffer_store, cx| {
+ buffer_store.wait_for_remote_buffer(buffer_id, cx)
+ })
})?
.await?;
let _ = tx.send(buffer).await;
@@ -3984,7 +4025,11 @@ impl Project {
.map(RepoPath::new)
.collect();
- repository_handle.stage_entries(entries).await?;
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.stage_entries(entries)
+ })?
+ .await??;
Ok(proto::Ack {})
}
@@ -4006,7 +4051,11 @@ impl Project {
.map(RepoPath::new)
.collect();
- repository_handle.unstage_entries(entries).await?;
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.unstage_entries(entries)
+ })?
+ .await??;
Ok(proto::Ack {})
}
@@ -4020,9 +4069,14 @@ impl Project {
let repository_handle =
Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let message = SharedString::from(envelope.payload.message);
let name = envelope.payload.name.map(SharedString::from);
let email = envelope.payload.email.map(SharedString::from);
- repository_handle.commit(name.zip(email)).await?;
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.commit(message, name.zip(email))
+ })?
+ .await??;
Ok(proto::Ack {})
}
@@ -4035,55 +4089,12 @@ impl Project {
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
let repository_handle =
Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
- let git_repository = match &repository_handle.git_repo {
- git::GitRepo::Local(git_repository) => git_repository.clone(),
- git::GitRepo::Remote { .. } => {
- anyhow::bail!("Cannot handle open commit message buffer for remote git repo")
- }
- };
- let commit_message_file = git_repository.dot_git_dir().join(*COMMIT_MESSAGE);
- let fs = this.update(&mut cx, |project, _| project.fs().clone())?;
- fs.create_file(
- &commit_message_file,
- CreateOptions {
- overwrite: false,
- ignore_if_exists: true,
- },
- )
- .await
- .with_context(|| format!("creating commit message file {commit_message_file:?}"))?;
-
- let (worktree, relative_path) = this
- .update(&mut cx, |headless_project, cx| {
- headless_project
- .worktree_store
- .update(cx, |worktree_store, cx| {
- worktree_store.find_or_create_worktree(&commit_message_file, false, cx)
- })
- })?
- .await
- .with_context(|| {
- format!("deriving worktree for commit message file {commit_message_file:?}")
- })?;
-
- let buffer = this
- .update(&mut cx, |headless_project, cx| {
- headless_project
- .buffer_store
- .update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(
- ProjectPath {
- worktree_id: worktree.read(cx).id(),
- path: Arc::from(relative_path),
- },
- cx,
- )
- })
- })
- .with_context(|| {
- format!("opening buffer for commit message file {commit_message_file:?}")
+ let buffer = repository_handle
+ .update(&mut cx, |repository_handle, cx| {
+ repository_handle.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
})?
.await?;
+
let peer_id = envelope.original_sender_id()?;
Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
}
@@ -4093,7 +4104,7 @@ impl Project {
worktree_id: WorktreeId,
work_directory_id: ProjectEntryId,
cx: &mut AsyncApp,
- ) -> Result<RepositoryHandle> {
+ ) -> Result<Entity<Repository>> {
this.update(cx, |project, cx| {
let repository_handle = project
.git_state()
@@ -4101,6 +4112,7 @@ impl Project {
.all_repositories()
.into_iter()
.find(|repository_handle| {
+ let repository_handle = repository_handle.read(cx);
repository_handle.worktree_id == worktree_id
&& repository_handle.repository_entry.work_directory_id()
== work_directory_id
@@ -4146,16 +4158,6 @@ impl Project {
buffer.read(cx).remote_id()
}
- pub fn wait_for_remote_buffer(
- &mut self,
- id: BufferId,
- cx: &mut Context<Self>,
- ) -> Task<Result<Entity<Buffer>>> {
- self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.wait_for_remote_buffer(id, cx)
- })
- }
-
fn synchronize_remote_buffers(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let project_id = match self.client_state {
ProjectClientState::Remote {
@@ -4315,11 +4317,11 @@ impl Project {
&self.git_state
}
- pub fn active_repository(&self, cx: &App) -> Option<RepositoryHandle> {
+ pub fn active_repository(&self, cx: &App) -> Option<Entity<Repository>> {
self.git_state.read(cx).active_repository()
}
- pub fn all_repositories(&self, cx: &App) -> Vec<RepositoryHandle> {
+ pub fn all_repositories(&self, cx: &App) -> Vec<Entity<Repository>> {
self.git_state.read(cx).all_repositories()
}
}
@@ -1,5 +1,5 @@
use crate::{Event, *};
-use ::git::diff::assert_hunks;
+use diff::assert_hunks;
use fs::FakeFs;
use futures::{future, StreamExt};
use gpui::{App, SemanticVersion, UpdateGlobal};
@@ -25,10 +25,7 @@ use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
use task::{ResolvedTask, TaskContext};
use unindent::Unindent as _;
use util::{
- assert_set_eq,
- paths::{replace_path_separator, PathMatcher},
- test::TempTree,
- TryFutureExt as _,
+ assert_set_eq, path, paths::PathMatcher, separator, test::TempTree, uri, TryFutureExt as _,
};
#[gpui::test]
@@ -37,7 +34,10 @@ async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
let (tx, mut rx) = futures::channel::mpsc::unbounded();
let _thread = std::thread::spawn(move || {
+ #[cfg(not(target_os = "windows"))]
std::fs::metadata("/tmp").unwrap();
+ #[cfg(target_os = "windows")]
+ std::fs::metadata("C:/Windows").unwrap();
std::thread::sleep(Duration::from_millis(1000));
tx.unbounded_send(1).unwrap();
});
@@ -199,7 +199,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/the-root",
+ path!("/dir"),
json!({
".zed": {
"settings.json": r#"{ "tab_size": 8 }"#,
@@ -227,7 +227,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
)
.await;
- let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
let task_context = TaskContext::default();
@@ -280,8 +280,12 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
(
TaskSourceKind::Worktree {
id: worktree_id,
- directory_in_worktree: PathBuf::from("b/.zed"),
- id_base: "local worktree tasks from directory \"b/.zed\"".into(),
+ directory_in_worktree: PathBuf::from(separator!("b/.zed")),
+ id_base: if cfg!(windows) {
+ "local worktree tasks from directory \"b\\\\.zed\"".into()
+ } else {
+ "local worktree tasks from directory \"b/.zed\"".into()
+ },
},
"cargo check".to_string(),
vec!["check".to_string()],
@@ -359,8 +363,12 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
(
TaskSourceKind::Worktree {
id: worktree_id,
- directory_in_worktree: PathBuf::from("b/.zed"),
- id_base: "local worktree tasks from directory \"b/.zed\"".into(),
+ directory_in_worktree: PathBuf::from(separator!("b/.zed")),
+ id_base: if cfg!(windows) {
+ "local worktree tasks from directory \"b\\\\.zed\"".into()
+ } else {
+ "local worktree tasks from directory \"b/.zed\"".into()
+ },
},
"cargo check".to_string(),
vec!["check".to_string()],
@@ -392,7 +400,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/the-root",
+ path!("/dir"),
json!({
"test.rs": "const A: i32 = 1;",
"test2.rs": "",
@@ -402,7 +410,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let mut fake_rust_servers = language_registry.register_fake_lsp(
@@ -449,7 +457,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
// Open a buffer without an associated language server.
let (toml_buffer, _handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
})
.await
.unwrap();
@@ -457,7 +465,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
// Open a buffer with an associated language server before the language for it has been loaded.
let (rust_buffer, _handle2) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
})
.await
.unwrap();
@@ -482,7 +490,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
version: 0,
text: "const A: i32 = 1;".to_string(),
language_id: "rust".to_string(),
@@ -512,7 +520,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::VersionedTextDocumentIdentifier::new(
- lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
1
)
);
@@ -520,7 +528,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
// Open a third buffer with a different associated language server.
let (json_buffer, _json_handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/the-root/package.json", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
})
.await
.unwrap();
@@ -533,7 +541,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
version: 0,
text: "{\"a\": 1}".to_string(),
language_id: "json".to_string(),
@@ -557,7 +565,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
// it is also configured based on the existing language server's capabilities.
let (rust_buffer2, _handle4) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
})
.await
.unwrap();
@@ -583,7 +591,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::VersionedTextDocumentIdentifier::new(
- lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
1
)
);
@@ -598,20 +606,24 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.receive_notification::<lsp::notification::DidSaveTextDocument>()
.await
.text_document,
- lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
+ lsp::TextDocumentIdentifier::new(
+ lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
+ )
);
assert_eq!(
fake_json_server
.receive_notification::<lsp::notification::DidSaveTextDocument>()
.await
.text_document,
- lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
+ lsp::TextDocumentIdentifier::new(
+ lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
+ )
);
// Renames are reported only to servers matching the buffer's language.
fs.rename(
- Path::new("/the-root/test2.rs"),
- Path::new("/the-root/test3.rs"),
+ Path::new(path!("/dir/test2.rs")),
+ Path::new(path!("/dir/test3.rs")),
Default::default(),
)
.await
@@ -621,7 +633,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.receive_notification::<lsp::notification::DidCloseTextDocument>()
.await
.text_document,
- lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
+ lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
);
assert_eq!(
fake_rust_server
@@ -629,7 +641,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
version: 0,
text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
language_id: "rust".to_string(),
@@ -660,8 +672,8 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
// When the rename changes the extension of the file, the buffer gets closed on the old
// language server and gets opened on the new one.
fs.rename(
- Path::new("/the-root/test3.rs"),
- Path::new("/the-root/test3.json"),
+ Path::new(path!("/dir/test3.rs")),
+ Path::new(path!("/dir/test3.json")),
Default::default(),
)
.await
@@ -671,7 +683,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.receive_notification::<lsp::notification::DidCloseTextDocument>()
.await
.text_document,
- lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
+ lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
);
assert_eq!(
fake_json_server
@@ -679,7 +691,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
version: 0,
text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
language_id: "json".to_string(),
@@ -705,7 +717,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::VersionedTextDocumentIdentifier::new(
- lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
+ lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
1
)
);
@@ -734,7 +746,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
version: 0,
text: rust_buffer.update(cx, |buffer, _| buffer.text()),
language_id: "rust".to_string(),
@@ -755,13 +767,13 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
],
[
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
version: 0,
text: json_buffer.update(cx, |buffer, _| buffer.text()),
language_id: "json".to_string(),
},
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
version: 0,
text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
language_id: "json".to_string(),
@@ -773,7 +785,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
cx.update(|_| drop(_json_handle));
let close_message = lsp::DidCloseTextDocumentParams {
text_document: lsp::TextDocumentIdentifier::new(
- lsp::Url::from_file_path("/the-root/package.json").unwrap(),
+ lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
),
};
assert_eq!(
@@ -786,19 +798,11 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
- fn add_root_for_windows(path: &str) -> String {
- if cfg!(windows) {
- format!("C:{}", path)
- } else {
- path.to_string()
- }
- }
-
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- add_root_for_windows("/the-root"),
+ path!("/the-root"),
json!({
".gitignore": "target\n",
"src": {
@@ -826,7 +830,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
)
.await;
- let project = Project::test(fs.clone(), [add_root_for_windows("/the-root").as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
let mut fake_servers = language_registry.register_fake_lsp(
@@ -842,7 +846,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
// Start the language server by opening a buffer with a compatible file extension.
let _ = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp(add_root_for_windows("/the-root/src/a.rs"), cx)
+ project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
})
.await
.unwrap();
@@ -882,21 +886,21 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
lsp::DidChangeWatchedFilesRegistrationOptions {
watchers: vec![
lsp::FileSystemWatcher {
- glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
- "/the-root/Cargo.toml",
- )),
+ glob_pattern: lsp::GlobPattern::String(
+ path!("/the-root/Cargo.toml").to_string(),
+ ),
kind: None,
},
lsp::FileSystemWatcher {
- glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
- "/the-root/src/*.{rs,c}",
- )),
+ glob_pattern: lsp::GlobPattern::String(
+ path!("/the-root/src/*.{rs,c}").to_string(),
+ ),
kind: None,
},
lsp::FileSystemWatcher {
- glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
- "/the-root/target/y/**/*.rs",
- )),
+ glob_pattern: lsp::GlobPattern::String(
+ path!("/the-root/target/y/**/*.rs").to_string(),
+ ),
kind: None,
},
],
@@ -949,32 +953,23 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
// Perform some file system mutations, two of which match the watched patterns,
// and one of which does not.
+ fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
+ .await
+ .unwrap();
+ fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
+ .await
+ .unwrap();
+ fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
+ .await
+ .unwrap();
fs.create_file(
- add_root_for_windows("/the-root/src/c.rs").as_ref(),
- Default::default(),
- )
- .await
- .unwrap();
- fs.create_file(
- add_root_for_windows("/the-root/src/d.txt").as_ref(),
- Default::default(),
- )
- .await
- .unwrap();
- fs.remove_file(
- add_root_for_windows("/the-root/src/b.rs").as_ref(),
- Default::default(),
- )
- .await
- .unwrap();
- fs.create_file(
- add_root_for_windows("/the-root/target/x/out/x2.rs").as_ref(),
+ path!("/the-root/target/x/out/x2.rs").as_ref(),
Default::default(),
)
.await
.unwrap();
fs.create_file(
- add_root_for_windows("/the-root/target/y/out/y2.rs").as_ref(),
+ path!("/the-root/target/y/out/y2.rs").as_ref(),
Default::default(),
)
.await
@@ -986,16 +981,15 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
&*file_changes.lock(),
&[
lsp::FileEvent {
- uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/b.rs")).unwrap(),
+ uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
typ: lsp::FileChangeType::DELETED,
},
lsp::FileEvent {
- uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/c.rs")).unwrap(),
+ uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
typ: lsp::FileChangeType::CREATED,
},
lsp::FileEvent {
- uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/target/y/out/y2.rs"))
- .unwrap(),
+ uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
typ: lsp::FileChangeType::CREATED,
},
]
@@ -1008,7 +1002,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.rs": "let a = 1;",
"b.rs": "let b = 2;"
@@ -1016,15 +1010,24 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
+ let project = Project::test(
+ fs,
+ [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
+ cx,
+ )
+ .await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let buffer_a = project
- .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/a.rs"), cx)
+ })
.await
.unwrap();
let buffer_b = project
- .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/b.rs"), cx)
+ })
.await
.unwrap();
@@ -1033,7 +1036,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1050,7 +1053,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/dir/b.rs").unwrap(),
+ uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1101,7 +1104,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/root",
+ path!("/root"),
json!({
"dir": {
".git": {
@@ -1116,11 +1119,11 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let (worktree, _) = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/root/dir", true, cx)
+ project.find_or_create_worktree(path!("/root/dir"), true, cx)
})
.await
.unwrap();
@@ -1128,7 +1131,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
let (worktree, _) = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/root/other.rs", false, cx)
+ project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
})
.await
.unwrap();
@@ -1140,7 +1143,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
+ uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1157,7 +1160,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/root/other.rs").unwrap(),
+ uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
@@ -1244,7 +1247,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.rs": "fn a() { A }",
"b.rs": "const y: i32 = 1",
@@ -1252,7 +1255,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -1270,7 +1273,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
// Cause worktree to start the fake language server
let _ = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/b.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
})
.await
.unwrap();
@@ -1299,7 +1302,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
);
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
@@ -1325,7 +1328,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
);
let buffer = project
- .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
+ .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
.await
.unwrap();
@@ -1351,7 +1354,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
// Ensure publishing empty diagnostics twice only results in one update event.
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: Default::default(),
});
@@ -1364,7 +1367,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
);
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: Default::default(),
});
@@ -1379,9 +1382,9 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
let progress_token = "the-progress-token";
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -1399,7 +1402,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
let (buffer, _handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/a.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
})
.await
.unwrap();
@@ -1465,9 +1468,9 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
init_test(cx);
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -1475,7 +1478,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
let (buffer, _) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/a.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
})
.await
.unwrap();
@@ -1483,7 +1486,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
// Publish diagnostics
let fake_server = fake_servers.next().await.unwrap();
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
@@ -1546,9 +1549,9 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T
init_test(cx);
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -1556,7 +1559,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T
let (buffer, _handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/a.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
})
.await
.unwrap();
@@ -1564,7 +1567,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T
// Before restarting the server, report diagnostics with an unknown buffer version.
let fake_server = fake_servers.next().await.unwrap();
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(10000),
diagnostics: Vec::new(),
});
@@ -1588,9 +1591,9 @@ async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
let progress_token = "the-progress-token";
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -1606,7 +1609,7 @@ async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
let (buffer, _handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/a.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
})
.await
.unwrap();
@@ -1651,10 +1654,10 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
let mut fake_rust_servers = language_registry.register_fake_lsp(
@@ -1676,13 +1679,13 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
let _rs_buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/a.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
})
.await
.unwrap();
let _js_buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/b.js", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
})
.await
.unwrap();
@@ -1695,7 +1698,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
.text_document
.uri
.as_str(),
- "file:///dir/a.rs"
+ uri!("file:///dir/a.rs")
);
let mut fake_js_server = fake_js_servers.next().await.unwrap();
@@ -1706,7 +1709,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
.text_document
.uri
.as_str(),
- "file:///dir/b.js"
+ uri!("file:///dir/b.js")
);
// Disable Rust language server, ensuring only that server gets stopped.
@@ -1757,7 +1760,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
.text_document
.uri
.as_str(),
- "file:///dir/a.rs"
+ uri!("file:///dir/a.rs")
);
fake_js_server
.receive_notification::<lsp::notification::Exit>()
@@ -1776,9 +1779,9 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
.unindent();
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
@@ -1792,7 +1795,9 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
);
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/a.rs"), cx)
+ })
.await
.unwrap();
@@ -1814,7 +1819,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// Report some diagnostics for the initial version of the buffer
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(open_notification.text_document.version),
diagnostics: vec![
lsp::Diagnostic {
@@ -1900,7 +1905,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// Ensure overlapping diagnostics are highlighted correctly.
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(open_notification.text_document.version),
diagnostics: vec![
lsp::Diagnostic {
@@ -1992,7 +1997,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// Handle out-of-order diagnostics
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
+ uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(change_notification_2.text_document.version),
diagnostics: vec![
lsp::Diagnostic {
@@ -2198,14 +2203,14 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.rs": text.clone(),
}),
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
@@ -2214,7 +2219,7 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
let (buffer, _handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/a.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
})
.await
.unwrap();
@@ -2351,17 +2356,19 @@ async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAp
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.rs": text.clone(),
}),
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/a.rs"), cx)
+ })
.await
.unwrap();
@@ -2460,17 +2467,19 @@ async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.rs": text.clone(),
}),
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/a.rs"), cx)
+ })
.await
.unwrap();
@@ -2571,7 +2580,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.rs": "const fn a() { A }",
"b.rs": "const y: i32 = crate::a()",
@@ -2579,7 +2588,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@@ -2587,7 +2596,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
let (buffer, _handle) = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/b.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
})
.await
.unwrap();
@@ -2597,13 +2606,13 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
let params = params.text_document_position_params;
assert_eq!(
params.text_document.uri.to_file_path().unwrap(),
- Path::new("/dir/b.rs"),
+ Path::new(path!("/dir/b.rs")),
);
assert_eq!(params.position, lsp::Position::new(0, 22));
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
- lsp::Url::from_file_path("/dir/a.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
),
)))
@@ -2629,18 +2638,24 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
.as_local()
.unwrap()
.abs_path(cx),
- Path::new("/dir/a.rs"),
+ Path::new(path!("/dir/a.rs")),
);
assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
assert_eq!(
list_worktrees(&project, cx),
- [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
+ [
+ (path!("/dir/a.rs").as_ref(), false),
+ (path!("/dir/b.rs").as_ref(), true)
+ ],
);
drop(definition);
});
cx.update(|cx| {
- assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
+ assert_eq!(
+ list_worktrees(&project, cx),
+ [(path!("/dir/b.rs").as_ref(), true)]
+ );
});
fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
@@ -2664,14 +2679,14 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.ts": "",
}),
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(typescript_lang());
@@ -2690,7 +2705,9 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
);
let (buffer, _handle) = project
- .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
+ .update(cx, |p, cx| {
+ p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
+ })
.await
.unwrap();
@@ -2756,14 +2773,14 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.ts": "",
}),
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(typescript_lang());
@@ -2782,7 +2799,9 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
);
let (buffer, _handle) = project
- .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
+ .update(cx, |p, cx| {
+ p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
+ })
.await
.unwrap();
@@ -2817,14 +2836,14 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a.ts": "a",
}),
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(typescript_lang());
@@ -18,7 +18,7 @@ use task::{
ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates, TaskVariables, VariableName,
};
use text::{Point, ToPoint};
-use util::{post_inc, NumericPrefixWithSuffix, ResultExt as _};
+use util::{paths::PathExt as _, post_inc, NumericPrefixWithSuffix, ResultExt as _};
use worktree::WorktreeId;
use crate::worktree_store::WorktreeStore;
@@ -470,7 +470,7 @@ impl ContextProvider for BasicContextProvider {
let current_file = buffer
.file()
.and_then(|file| file.as_local())
- .map(|file| file.abs_path(cx).to_string_lossy().to_string());
+ .map(|file| file.abs_path(cx).to_sanitized_string());
let Point { row, column } = location.range.start.to_point(&buffer_snapshot);
let row = row + 1;
let column = column + 1;
@@ -502,14 +502,14 @@ impl ContextProvider for BasicContextProvider {
if let Some(Some(worktree_path)) = worktree_root_dir {
task_variables.insert(
VariableName::WorktreeRoot,
- worktree_path.to_string_lossy().to_string(),
+ worktree_path.to_sanitized_string(),
);
if let Some(full_path) = current_file.as_ref() {
let relative_path = pathdiff::diff_paths(full_path, worktree_path);
if let Some(relative_path) = relative_path {
task_variables.insert(
VariableName::RelativeFile,
- relative_path.to_string_lossy().into_owned(),
+ relative_path.to_sanitized_string(),
);
}
}
@@ -162,12 +162,14 @@ struct EntryDetails {
}
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)]
+#[serde(deny_unknown_fields)]
struct Delete {
#[serde(default)]
pub skip_prompt: bool,
}
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)]
+#[serde(deny_unknown_fields)]
struct Trash {
#[serde(default)]
pub skip_prompt: bool,
@@ -1106,8 +1108,13 @@ impl ProjectPanel {
let worktree_id = edit_state.worktree_id;
let is_new_entry = edit_state.is_new_entry();
let filename = self.filename_editor.read(cx).text(cx);
- edit_state.is_dir = edit_state.is_dir
- || (edit_state.is_new_entry() && filename.ends_with(std::path::MAIN_SEPARATOR));
+ #[cfg(not(target_os = "windows"))]
+ let filename_indicates_dir = filename.ends_with("/");
+ // On Windows, path separator could be either `/` or `\`.
+ #[cfg(target_os = "windows")]
+ let filename_indicates_dir = filename.ends_with("/") || filename.ends_with("\\");
+ edit_state.is_dir =
+ edit_state.is_dir || (edit_state.is_new_entry() && filename_indicates_dir);
let is_dir = edit_state.is_dir;
let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?;
let entry = worktree.read(cx).entry_for_id(edit_state.entry_id)?.clone();
@@ -1861,7 +1868,7 @@ impl ProjectPanel {
) {
let selection = self.find_entry(
self.selection.as_ref(),
- true,
+ false,
|entry, worktree_id| {
(self.selection.is_none()
|| self.selection.is_some_and(|selection| {
@@ -4793,6 +4800,7 @@ mod tests {
use serde_json::json;
use settings::SettingsStore;
use std::path::{Path, PathBuf};
+ use util::{path, separator};
use workspace::{
item::{Item, ProjectItem},
register_project_item, AppState,
@@ -4894,7 +4902,7 @@ mod tests {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"first.rs": "// First Rust file",
@@ -4905,7 +4913,7 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/src").as_ref()], cx).await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
@@ -5066,7 +5074,7 @@ mod tests {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
- "/root1",
+ path!("/root1"),
json!({
"dir_1": {
"nested_dir_1": {
@@ -5088,7 +5096,7 @@ mod tests {
)
.await;
fs.insert_tree(
- "/root2",
+ path!("/root2"),
json!({
"dir_2": {
"file_1.java": "// File contents",
@@ -5097,7 +5105,12 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
+ let project = Project::test(
+ fs.clone(),
+ [path!("/root1").as_ref(), path!("/root2").as_ref()],
+ cx,
+ )
+ .await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
@@ -5115,10 +5128,10 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
- "v root1",
- " > dir_1/nested_dir_1/nested_dir_2/nested_dir_3",
- "v root2",
- " > dir_2",
+ separator!("v root1"),
+ separator!(" > dir_1/nested_dir_1/nested_dir_2/nested_dir_3"),
+ separator!("v root2"),
+ separator!(" > dir_2"),
]
);
@@ -5130,14 +5143,14 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
- "v root1",
- " v dir_1/nested_dir_1/nested_dir_2/nested_dir_3 <== selected",
- " > nested_dir_4/nested_dir_5",
- " file_a.java",
- " file_b.java",
- " file_c.java",
- "v root2",
- " > dir_2",
+ separator!("v root1"),
+ separator!(" v dir_1/nested_dir_1/nested_dir_2/nested_dir_3 <== selected"),
+ separator!(" > nested_dir_4/nested_dir_5"),
+ separator!(" file_a.java"),
+ separator!(" file_b.java"),
+ separator!(" file_c.java"),
+ separator!("v root2"),
+ separator!(" > dir_2"),
]
);
@@ -5149,31 +5162,31 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
- "v root1",
- " v dir_1/nested_dir_1/nested_dir_2/nested_dir_3",
- " v nested_dir_4/nested_dir_5 <== selected",
- " file_d.java",
- " file_a.java",
- " file_b.java",
- " file_c.java",
- "v root2",
- " > dir_2",
+ separator!("v root1"),
+ separator!(" v dir_1/nested_dir_1/nested_dir_2/nested_dir_3"),
+ separator!(" v nested_dir_4/nested_dir_5 <== selected"),
+ separator!(" file_d.java"),
+ separator!(" file_a.java"),
+ separator!(" file_b.java"),
+ separator!(" file_c.java"),
+ separator!("v root2"),
+ separator!(" > dir_2"),
]
);
toggle_expand_dir(&panel, "root2/dir_2", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
- "v root1",
- " v dir_1/nested_dir_1/nested_dir_2/nested_dir_3",
- " v nested_dir_4/nested_dir_5",
- " file_d.java",
- " file_a.java",
- " file_b.java",
- " file_c.java",
- "v root2",
- " v dir_2 <== selected",
- " file_1.java",
+ separator!("v root1"),
+ separator!(" v dir_1/nested_dir_1/nested_dir_2/nested_dir_3"),
+ separator!(" v nested_dir_4/nested_dir_5"),
+ separator!(" file_d.java"),
+ separator!(" file_a.java"),
+ separator!(" file_b.java"),
+ separator!(" file_c.java"),
+ separator!("v root2"),
+ separator!(" v dir_2 <== selected"),
+ separator!(" file_1.java"),
]
);
}
@@ -5682,7 +5695,7 @@ mod tests {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
- "/root1",
+ path!("/root1"),
json!({
".dockerignore": "",
".git": {
@@ -5692,7 +5705,7 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), ["/root1".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/root1").as_ref()], cx).await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
@@ -5727,9 +5740,10 @@ mod tests {
);
let confirm = panel.update_in(cx, |panel, window, cx| {
+ // If we want to create a subdirectory, there should be no prefix slash.
panel
.filename_editor
- .update(cx, |editor, cx| editor.set_text("/new_dir/", window, cx));
+ .update(cx, |editor, cx| editor.set_text("new_dir/", window, cx));
panel.confirm_edit(window, cx).unwrap()
});
@@ -5738,14 +5752,14 @@ mod tests {
&[
"v root1",
" > .git",
- " [PROCESSING: '/new_dir/'] <== selected",
+ " [PROCESSING: 'new_dir/'] <== selected",
" .dockerignore",
]
);
confirm.await.unwrap();
assert_eq!(
- visible_entries_as_strings(&panel, 0..13, cx),
+ visible_entries_as_strings(&panel, 0..10, cx),
&[
"v root1",
" > .git",
@@ -5753,6 +5767,54 @@ mod tests {
" .dockerignore",
]
);
+
+ // Test filename with whitespace
+ select_path(&panel, "root1", cx);
+ panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ // If we want to create a subdirectory, there should be no prefix slash.
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text("new dir 2/", window, cx));
+ panel.confirm_edit(window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..10, cx),
+ &[
+ "v root1",
+ " > .git",
+ " v new dir 2 <== selected",
+ " v new_dir",
+ " .dockerignore",
+ ]
+ );
+
+ // Test filename ends with "\"
+ #[cfg(target_os = "windows")]
+ {
+ select_path(&panel, "root1", cx);
+ panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ // If we want to create a subdirectory, there should be no prefix slash.
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text("new_dir_3\\", window, cx));
+ panel.confirm_edit(window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..10, cx),
+ &[
+ "v root1",
+ " > .git",
+ " v new dir 2",
+ " v new_dir",
+ " v new_dir_3 <== selected",
+ " .dockerignore",
+ ]
+ );
+ }
}
#[gpui::test]
@@ -6409,7 +6471,7 @@ mod tests {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
- "/src",
+ path!("/src"),
json!({
"test": {
"first.rs": "// First Rust file",
@@ -6420,7 +6482,7 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), ["/src".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/src").as_ref()], cx).await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
@@ -6666,6 +6728,286 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_select_git_entry(cx: &mut gpui::TestAppContext) {
+ use git::status::{FileStatus, StatusCode, TrackedStatus};
+ use std::path::Path;
+
+ init_test_with_editor(cx);
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "tree1": {
+ ".git": {},
+ "dir1": {
+ "modified1.txt": "",
+ "unmodified1.txt": "",
+ "modified2.txt": "",
+ },
+ "dir2": {
+ "modified3.txt": "",
+ "unmodified2.txt": "",
+ },
+ "modified4.txt": "",
+ "unmodified3.txt": "",
+ },
+ "tree2": {
+ ".git": {},
+ "dir3": {
+ "modified5.txt": "",
+ "unmodified4.txt": "",
+ },
+ "modified6.txt": "",
+ "unmodified5.txt": "",
+ }
+ }),
+ )
+ .await;
+
+ // Mark files as git modified
+ let tree1_modified_files = [
+ "dir1/modified1.txt",
+ "dir1/modified2.txt",
+ "modified4.txt",
+ "dir2/modified3.txt",
+ ];
+
+ let tree2_modified_files = ["dir3/modified5.txt", "modified6.txt"];
+
+ let root1_dot_git = Path::new("/root/tree1/.git");
+ let root2_dot_git = Path::new("/root/tree2/.git");
+ let set_value = FileStatus::Tracked(TrackedStatus {
+ index_status: StatusCode::Modified,
+ worktree_status: StatusCode::Modified,
+ });
+
+ fs.with_git_state(&root1_dot_git, true, |git_repo_state| {
+ for file_path in tree1_modified_files {
+ git_repo_state.statuses.insert(file_path.into(), set_value);
+ }
+ });
+
+ fs.with_git_state(&root2_dot_git, true, |git_repo_state| {
+ for file_path in tree2_modified_files {
+ git_repo_state.statuses.insert(file_path.into(), set_value);
+ }
+ });
+
+ let project = Project::test(
+ fs.clone(),
+ ["/root/tree1".as_ref(), "/root/tree2".as_ref()],
+ cx,
+ )
+ .await;
+ let workspace =
+ cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let cx = &mut VisualTestContext::from_window(*workspace, cx);
+ let panel = workspace.update(cx, ProjectPanel::new).unwrap();
+
+ // Check initial state
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..15, cx),
+ &[
+ "v tree1",
+ " > .git",
+ " > dir1",
+ " > dir2",
+ " modified4.txt",
+ " unmodified3.txt",
+ "v tree2",
+ " > .git",
+ " > dir3",
+ " modified6.txt",
+ " unmodified5.txt"
+ ],
+ );
+
+ // Test selecting next modified entry
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_next_git_entry(&SelectNextGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..6, cx),
+ &[
+ "v tree1",
+ " > .git",
+ " v dir1",
+ " modified1.txt <== selected",
+ " modified2.txt",
+ " unmodified1.txt",
+ ],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_next_git_entry(&SelectNextGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..6, cx),
+ &[
+ "v tree1",
+ " > .git",
+ " v dir1",
+ " modified1.txt",
+ " modified2.txt <== selected",
+ " unmodified1.txt",
+ ],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_next_git_entry(&SelectNextGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 6..9, cx),
+ &[
+ " v dir2",
+ " modified3.txt <== selected",
+ " unmodified2.txt",
+ ],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_next_git_entry(&SelectNextGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 9..11, cx),
+ &[" modified4.txt <== selected", " unmodified3.txt",],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_next_git_entry(&SelectNextGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 13..16, cx),
+ &[
+ " v dir3",
+ " modified5.txt <== selected",
+ " unmodified4.txt",
+ ],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_next_git_entry(&SelectNextGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 16..18, cx),
+ &[" modified6.txt <== selected", " unmodified5.txt",],
+ );
+
+ // Wraps around to first modified file
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_next_git_entry(&SelectNextGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..18, cx),
+ &[
+ "v tree1",
+ " > .git",
+ " v dir1",
+ " modified1.txt <== selected",
+ " modified2.txt",
+ " unmodified1.txt",
+ " v dir2",
+ " modified3.txt",
+ " unmodified2.txt",
+ " modified4.txt",
+ " unmodified3.txt",
+ "v tree2",
+ " > .git",
+ " v dir3",
+ " modified5.txt",
+ " unmodified4.txt",
+ " modified6.txt",
+ " unmodified5.txt",
+ ],
+ );
+
+ // Wraps around again to last modified file
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 16..18, cx),
+ &[" modified6.txt <== selected", " unmodified5.txt",],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 13..16, cx),
+ &[
+ " v dir3",
+ " modified5.txt <== selected",
+ " unmodified4.txt",
+ ],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 9..11, cx),
+ &[" modified4.txt <== selected", " unmodified3.txt",],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 6..9, cx),
+ &[
+ " v dir2",
+ " modified3.txt <== selected",
+ " unmodified2.txt",
+ ],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..6, cx),
+ &[
+ "v tree1",
+ " > .git",
+ " v dir1",
+ " modified1.txt",
+ " modified2.txt <== selected",
+ " unmodified1.txt",
+ ],
+ );
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx);
+ });
+
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..6, cx),
+ &[
+ "v tree1",
+ " > .git",
+ " v dir1",
+ " modified1.txt <== selected",
+ " modified2.txt",
+ " unmodified1.txt",
+ ],
+ );
+ }
+
#[gpui::test]
async fn test_select_directory(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
@@ -8545,7 +8887,7 @@ mod tests {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
- "/root",
+ path!("/root"),
json!({
".gitignore": "**/ignored_dir\n**/ignored_nested",
"dir1": {
@@ -8573,7 +8915,7 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
@@ -8602,12 +8944,12 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1 <== selected",
- " > empty1/empty2/empty3",
- " > ignored_dir",
- " > subdir1",
- " .gitignore",
+ separator!("v root"),
+ separator!(" v dir1 <== selected"),
+ separator!(" > empty1/empty2/empty3"),
+ separator!(" > ignored_dir"),
+ separator!(" > subdir1"),
+ separator!(" .gitignore"),
],
"Should show first level with auto-folded dirs and ignored dir visible"
);
@@ -8624,18 +8966,18 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1 <== selected",
- " v empty1",
- " v empty2",
- " v empty3",
- " file.txt",
- " > ignored_dir",
- " v subdir1",
- " > ignored_nested",
- " file1.txt",
- " file2.txt",
- " .gitignore",
+ separator!("v root"),
+ separator!(" v dir1 <== selected"),
+ separator!(" v empty1"),
+ separator!(" v empty2"),
+ separator!(" v empty3"),
+ separator!(" file.txt"),
+ separator!(" > ignored_dir"),
+ separator!(" v subdir1"),
+ separator!(" > ignored_nested"),
+ separator!(" file1.txt"),
+ separator!(" file2.txt"),
+ separator!(" .gitignore"),
],
"After expand_all with auto-fold: should not expand ignored_dir, should expand folded dirs, and should not expand ignored_nested"
);
@@ -8660,12 +9002,12 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1 <== selected",
- " > empty1",
- " > ignored_dir",
- " > subdir1",
- " .gitignore",
+ separator!("v root"),
+ separator!(" v dir1 <== selected"),
+ separator!(" > empty1"),
+ separator!(" > ignored_dir"),
+ separator!(" > subdir1"),
+ separator!(" .gitignore"),
],
"With auto-fold disabled: should show all directories separately"
);
@@ -8682,18 +9024,18 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1 <== selected",
- " v empty1",
- " v empty2",
- " v empty3",
- " file.txt",
- " > ignored_dir",
- " v subdir1",
- " > ignored_nested",
- " file1.txt",
- " file2.txt",
- " .gitignore",
+ separator!("v root"),
+ separator!(" v dir1 <== selected"),
+ separator!(" v empty1"),
+ separator!(" v empty2"),
+ separator!(" v empty3"),
+ separator!(" file.txt"),
+ separator!(" > ignored_dir"),
+ separator!(" v subdir1"),
+ separator!(" > ignored_nested"),
+ separator!(" file1.txt"),
+ separator!(" file2.txt"),
+ separator!(" .gitignore"),
],
"After expand_all without auto-fold: should expand all dirs normally, \
expand ignored_dir itself but not its subdirs, and not expand ignored_nested"
@@ -8712,20 +9054,20 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1 <== selected",
- " v empty1",
- " v empty2",
- " v empty3",
- " file.txt",
- " v ignored_dir",
- " v subdir",
- " deep_file.txt",
- " v subdir1",
- " > ignored_nested",
- " file1.txt",
- " file2.txt",
- " .gitignore",
+ separator!("v root"),
+ separator!(" v dir1 <== selected"),
+ separator!(" v empty1"),
+ separator!(" v empty2"),
+ separator!(" v empty3"),
+ separator!(" file.txt"),
+ separator!(" v ignored_dir"),
+ separator!(" v subdir"),
+ separator!(" deep_file.txt"),
+ separator!(" v subdir1"),
+ separator!(" > ignored_nested"),
+ separator!(" file1.txt"),
+ separator!(" file2.txt"),
+ separator!(" .gitignore"),
],
"After expand_all on ignored_dir: should expand all contents of the ignored directory"
);
@@ -8737,7 +9079,7 @@ mod tests {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
- "/root",
+ path!("/root"),
json!({
"dir1": {
"subdir1": {
@@ -8759,7 +9101,7 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
@@ -8776,15 +9118,15 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1",
- " v subdir1",
- " v nested1",
- " file1.txt",
- " file2.txt",
- " v subdir2 <== selected",
- " file4.txt",
- " > dir2",
+ separator!("v root"),
+ separator!(" v dir1"),
+ separator!(" v subdir1"),
+ separator!(" v nested1"),
+ separator!(" file1.txt"),
+ separator!(" file2.txt"),
+ separator!(" v subdir2 <== selected"),
+ separator!(" file4.txt"),
+ separator!(" > dir2"),
],
"Initial state with everything expanded"
);
@@ -8826,13 +9168,13 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1",
- " v subdir1/nested1 <== selected",
- " file1.txt",
- " file2.txt",
- " > subdir2",
- " > dir2/single_file",
+ separator!("v root"),
+ separator!(" v dir1"),
+ separator!(" v subdir1/nested1 <== selected"),
+ separator!(" file1.txt"),
+ separator!(" file2.txt"),
+ separator!(" > subdir2"),
+ separator!(" > dir2/single_file"),
],
"Initial state with some dirs expanded"
);
@@ -8849,11 +9191,11 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1 <== selected",
- " > subdir1/nested1",
- " > subdir2",
- " > dir2/single_file",
+ separator!("v root"),
+ separator!(" v dir1 <== selected"),
+ separator!(" > subdir1/nested1"),
+ separator!(" > subdir2"),
+ separator!(" > dir2/single_file"),
],
"Subdirs should be collapsed and folded with auto-fold enabled"
);
@@ -8881,14 +9223,14 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1",
- " v subdir1",
- " v nested1 <== selected",
- " file1.txt",
- " file2.txt",
- " > subdir2",
- " > dir2",
+ separator!("v root"),
+ separator!(" v dir1"),
+ separator!(" v subdir1"),
+ separator!(" v nested1 <== selected"),
+ separator!(" file1.txt"),
+ separator!(" file2.txt"),
+ separator!(" > subdir2"),
+ separator!(" > dir2"),
],
"Initial state with some dirs expanded and auto-fold disabled"
);
@@ -8905,11 +9247,11 @@ mod tests {
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
- "v root",
- " v dir1 <== selected",
- " > subdir1",
- " > subdir2",
- " > dir2",
+ separator!("v root"),
+ separator!(" v dir1 <== selected"),
+ separator!(" > subdir1"),
+ separator!(" > subdir2"),
+ separator!(" > dir2"),
],
"Subdirs should be collapsed but not folded with auto-fold disabled"
);
@@ -272,15 +272,17 @@ mod tests {
use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc};
+ use util::path;
#[gpui::test]
async fn test_project_symbols(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "test.rs": "" })).await;
+ fs.insert_tree(path!("/dir"), json!({ "test.rs": "" }))
+ .await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
@@ -299,7 +301,7 @@ mod tests {
let _buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer_with_lsp("/dir/test.rs", cx)
+ project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
})
.await
.unwrap();
@@ -307,9 +309,9 @@ mod tests {
// Set up fake language server to return fuzzy matches against
// a fixed set of symbol names.
let fake_symbols = [
- symbol("one", "/external"),
- symbol("ton", "/dir/test.rs"),
- symbol("uno", "/dir/test.rs"),
+ symbol("one", path!("/external")),
+ symbol("ton", path!("/dir/test.rs")),
+ symbol("uno", path!("/dir/test.rs")),
];
let fake_server = fake_servers.next().await.unwrap();
fake_server.handle_request::<lsp::WorkspaceSymbolRequest, _, _>(
@@ -129,7 +129,7 @@ message Envelope {
GetPrivateUserInfo get_private_user_info = 102;
GetPrivateUserInfoResponse get_private_user_info_response = 103;
UpdateUserPlan update_user_plan = 234;
- UpdateDiffBase update_diff_base = 104;
+ UpdateDiffBases update_diff_bases = 104;
AcceptTermsOfService accept_terms_of_service = 239;
AcceptTermsOfServiceResponse accept_terms_of_service_response = 240;
@@ -304,15 +304,18 @@ message Envelope {
SyncExtensionsResponse sync_extensions_response = 286;
InstallExtension install_extension = 287;
- GetStagedText get_staged_text = 288;
- GetStagedTextResponse get_staged_text_response = 289;
+ OpenUnstagedDiff open_unstaged_diff = 288;
+ OpenUnstagedDiffResponse open_unstaged_diff_response = 289;
RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290;
Stage stage = 293;
Unstage unstage = 294;
Commit commit = 295;
- OpenCommitMessageBuffer open_commit_message_buffer = 296; // current max
+ OpenCommitMessageBuffer open_commit_message_buffer = 296;
+
+ OpenUncommittedDiff open_uncommitted_diff = 297;
+ OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; // current max
}
reserved 87 to 88;
@@ -365,6 +368,7 @@ enum ErrorCode {
DevServerProjectPathDoesNotExist = 16;
RemoteUpgradeRequired = 17;
RateLimitExceeded = 18;
+ CommitFailed = 19;
reserved 6;
reserved 14 to 15;
}
@@ -1797,6 +1801,7 @@ message RepositoryEntry {
optional string branch = 2;
repeated StatusEntry updated_statuses = 3;
repeated string removed_statuses = 4;
+ repeated string current_merge_conflicts = 5;
}
message StatusEntry {
@@ -2035,19 +2040,51 @@ message WorktreeMetadata {
string abs_path = 4;
}
-message UpdateDiffBase {
+message UpdateDiffBases {
uint64 project_id = 1;
uint64 buffer_id = 2;
+
+ enum Mode {
+ // No collaborator is using the unstaged diff.
+ HEAD_ONLY = 0;
+ // No collaborator is using the diff from HEAD.
+ INDEX_ONLY = 1;
+ // Both the unstaged and uncommitted diffs are demanded,
+ // and the contents of the index and HEAD are the same for this path.
+ INDEX_MATCHES_HEAD = 2;
+ // Both the unstaged and uncommitted diffs are demanded,
+ // and the contents of the index and HEAD differ for this path,
+ // where None means the path doesn't exist in that state of the repo.
+ INDEX_AND_HEAD = 3;
+ }
+
optional string staged_text = 3;
+ optional string committed_text = 4;
+ Mode mode = 5;
+}
+
+message OpenUnstagedDiff {
+ uint64 project_id = 1;
+ uint64 buffer_id = 2;
+}
+
+message OpenUnstagedDiffResponse {
+ optional string staged_text = 1;
}
-message GetStagedText {
+message OpenUncommittedDiff {
uint64 project_id = 1;
uint64 buffer_id = 2;
}
-message GetStagedTextResponse {
+message OpenUncommittedDiffResponse {
+ enum Mode {
+ INDEX_MATCHES_HEAD = 0;
+ INDEX_AND_HEAD = 1;
+ }
optional string staged_text = 1;
+ optional string committed_text = 2;
+ Mode mode = 3;
}
message GetNotifications {
@@ -2658,6 +2695,7 @@ message Commit {
uint64 work_directory_id = 3;
optional string name = 4;
optional string email = 5;
+ string message = 6;
}
message OpenCommitMessageBuffer {
@@ -219,8 +219,10 @@ messages!(
(GetImplementationResponse, Background),
(GetLlmToken, Background),
(GetLlmTokenResponse, Background),
- (GetStagedText, Foreground),
- (GetStagedTextResponse, Foreground),
+ (OpenUnstagedDiff, Foreground),
+ (OpenUnstagedDiffResponse, Foreground),
+ (OpenUncommittedDiff, Foreground),
+ (OpenUncommittedDiffResponse, Foreground),
(GetUsers, Foreground),
(Hello, Foreground),
(IncomingCall, Foreground),
@@ -309,7 +311,7 @@ messages!(
(UpdateUserChannels, Foreground),
(UpdateContacts, Foreground),
(UpdateDiagnosticSummary, Foreground),
- (UpdateDiffBase, Foreground),
+ (UpdateDiffBases, Foreground),
(UpdateFollowers, Foreground),
(UpdateInviteInfo, Foreground),
(UpdateLanguageServer, Foreground),
@@ -422,7 +424,8 @@ request_messages!(
(GetProjectSymbols, GetProjectSymbolsResponse),
(GetReferences, GetReferencesResponse),
(GetSignatureHelp, GetSignatureHelpResponse),
- (GetStagedText, GetStagedTextResponse),
+ (OpenUnstagedDiff, OpenUnstagedDiffResponse),
+ (OpenUncommittedDiff, OpenUncommittedDiffResponse),
(GetSupermavenApiKey, GetSupermavenApiKeyResponse),
(GetTypeDefinition, GetTypeDefinitionResponse),
(LinkedEditingRange, LinkedEditingRangeResponse),
@@ -543,7 +546,8 @@ entity_messages!(
GetProjectSymbols,
GetReferences,
GetSignatureHelp,
- GetStagedText,
+ OpenUnstagedDiff,
+ OpenUncommittedDiff,
GetTypeDefinition,
InlayHints,
JoinProject,
@@ -575,7 +579,7 @@ entity_messages!(
UpdateBuffer,
UpdateBufferFile,
UpdateDiagnosticSummary,
- UpdateDiffBase,
+ UpdateDiffBases,
UpdateLanguageServer,
UpdateProject,
UpdateProjectCollaborator,
@@ -595,6 +595,7 @@ mod tests {
use project::{project_settings::ProjectSettings, Project};
use serde_json::json;
use settings::SettingsStore;
+ use util::path;
use workspace::{open_paths, AppState};
use super::*;
@@ -615,7 +616,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"main.ts": "a"
}),
@@ -623,7 +624,7 @@ mod tests {
.await;
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/dir/main.ts")],
+ &[PathBuf::from(path!("/dir/main.ts"))],
app_state,
workspace::OpenOptions::default(),
cx,
@@ -14,6 +14,6 @@ proc-macro = true
doctest = false
[dependencies]
-syn = "1.0.72"
-quote = "1.0.9"
-proc-macro2 = "1.0.66"
+proc-macro2.workspace = true
+quote.workspace = true
+syn.workspace = true
@@ -77,7 +77,7 @@ node_runtime = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
remote = { workspace = true, features = ["test-support"] }
lsp = { workspace = true, features=["test-support"] }
-
+unindent.workspace = true
serde_json.workspace = true
[build-dependencies]
@@ -14,22 +14,17 @@ fn main() {
std::env::var("TARGET").unwrap()
);
- // If we're building this for nightly, we want to set the ZED_COMMIT_SHA
- if let Some(release_channel) = std::env::var("ZED_RELEASE_CHANNEL").ok() {
- if release_channel.as_str() == "nightly" {
- // Populate git sha environment variable if git is available
- println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
- if let Some(output) = Command::new("git")
- .args(["rev-parse", "HEAD"])
- .output()
- .ok()
- .filter(|output| output.status.success())
- {
- let git_sha = String::from_utf8_lossy(&output.stdout);
- let git_sha = git_sha.trim();
+ // Populate git sha environment variable if git is available
+ println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
+ if let Some(output) = Command::new("git")
+ .args(["rev-parse", "HEAD"])
+ .output()
+ .ok()
+ .filter(|output| output.status.success())
+ {
+ let git_sha = String::from_utf8_lossy(&output.stdout);
+ let git_sha = git_sha.trim();
- println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
- }
- }
+ println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
}
}
@@ -1,15 +1,15 @@
use anyhow::{anyhow, Context as _, Result};
use extension::ExtensionHostProxy;
use extension_host::headless_host::HeadlessExtensionStore;
-use fs::{CreateOptions, Fs};
-use git::{repository::RepoPath, COMMIT_MESSAGE};
+use fs::Fs;
+use git::repository::RepoPath;
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel, SharedString};
use http_client::HttpClient;
use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
use node_runtime::NodeRuntime;
use project::{
buffer_store::{BufferStore, BufferStoreEvent},
- git::{GitRepo, GitState, RepositoryHandle},
+ git::{GitState, Repository},
project_settings::SettingsObserver,
search::SearchQuery,
task_store::TaskStore,
@@ -635,7 +635,11 @@ impl HeadlessProject {
.map(RepoPath::new)
.collect();
- repository_handle.stage_entries(entries).await?;
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.stage_entries(entries)
+ })?
+ .await??;
Ok(proto::Ack {})
}
@@ -657,7 +661,11 @@ impl HeadlessProject {
.map(RepoPath::new)
.collect();
- repository_handle.unstage_entries(entries).await?;
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.unstage_entries(entries)
+ })?
+ .await??;
Ok(proto::Ack {})
}
@@ -672,10 +680,15 @@ impl HeadlessProject {
let repository_handle =
Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let message = SharedString::from(envelope.payload.message);
let name = envelope.payload.name.map(SharedString::from);
let email = envelope.payload.email.map(SharedString::from);
- repository_handle.commit(name.zip(email)).await?;
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.commit(message, name.zip(email))
+ })?
+ .await??;
Ok(proto::Ack {})
}
@@ -686,55 +699,11 @@ impl HeadlessProject {
) -> Result<proto::OpenBufferResponse> {
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
+ let repository =
Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
- let git_repository = match &repository_handle.git_repo {
- GitRepo::Local(git_repository) => git_repository.clone(),
- GitRepo::Remote { .. } => {
- anyhow::bail!("Cannot handle open commit message buffer for remote git repo")
- }
- };
- let commit_message_file = git_repository.dot_git_dir().join(*COMMIT_MESSAGE);
- let fs = this.update(&mut cx, |headless_project, _| headless_project.fs.clone())?;
- fs.create_file(
- &commit_message_file,
- CreateOptions {
- overwrite: false,
- ignore_if_exists: true,
- },
- )
- .await
- .with_context(|| format!("creating commit message file {commit_message_file:?}"))?;
-
- let (worktree, relative_path) = this
- .update(&mut cx, |headless_project, cx| {
- headless_project
- .worktree_store
- .update(cx, |worktree_store, cx| {
- worktree_store.find_or_create_worktree(&commit_message_file, false, cx)
- })
- })?
- .await
- .with_context(|| {
- format!("deriving worktree for commit message file {commit_message_file:?}")
- })?;
-
- let buffer = this
- .update(&mut cx, |headless_project, cx| {
- headless_project
- .buffer_store
- .update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(
- ProjectPath {
- worktree_id: worktree.read(cx).id(),
- path: Arc::from(relative_path),
- },
- cx,
- )
- })
- })
- .with_context(|| {
- format!("opening buffer for commit message file {commit_message_file:?}")
+ let buffer = repository
+ .update(&mut cx, |repository, cx| {
+ repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
})?
.await?;
@@ -759,7 +728,7 @@ impl HeadlessProject {
worktree_id: WorktreeId,
work_directory_id: ProjectEntryId,
cx: &mut AsyncApp,
- ) -> Result<RepositoryHandle> {
+ ) -> Result<Entity<Repository>> {
this.update(cx, |project, cx| {
let repository_handle = project
.git_state
@@ -767,8 +736,11 @@ impl HeadlessProject {
.all_repositories()
.into_iter()
.find(|repository_handle| {
- repository_handle.worktree_id == worktree_id
- && repository_handle.repository_entry.work_directory_id()
+ repository_handle.read(cx).worktree_id == worktree_id
+ && repository_handle
+ .read(cx)
+ .repository_entry
+ .work_directory_id()
== work_directory_id
})
.context("missing repository handle")?;
@@ -40,6 +40,7 @@ fn main() {
#[cfg(not(windows))]
fn main() {
+ use release_channel::{ReleaseChannel, RELEASE_CHANNEL};
use remote::proxy::ProxyLaunchError;
use remote_server::unix::{execute_proxy, execute_run};
@@ -72,12 +73,18 @@ fn main() {
}
},
Some(Commands::Version) => {
- if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") {
- println!("{}", build_sha);
- } else {
- println!("{}", env!("ZED_PKG_VERSION"));
- }
-
+ let release_channel = *RELEASE_CHANNEL;
+ match release_channel {
+ ReleaseChannel::Stable | ReleaseChannel::Preview => {
+ println!("{}", env!("ZED_PKG_VERSION"))
+ }
+ ReleaseChannel::Nightly | ReleaseChannel::Dev => {
+ println!(
+ "{}",
+ option_env!("ZED_COMMIT_SHA").unwrap_or(release_channel.dev_name())
+ )
+ }
+ };
std::process::exit(0);
}
None => {
@@ -1,3 +1,6 @@
+/// todo(windows)
+/// The tests in this file assume that server_cx is running on Windows too.
+/// We neead to find a way to test Windows-Non-Windows interactions.
use crate::headless_project::HeadlessProject;
use client::{Client, UserStore};
use clock::FakeSystemClock;
@@ -24,12 +27,14 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
+use unindent::Unindent as _;
+use util::{path, separator};
#[gpui::test]
async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -45,14 +50,14 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
)
.await;
fs.set_index_for_repo(
- Path::new("/code/project1/.git"),
- &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())],
+ Path::new(path!("/code/project1/.git")),
+ &[("src/lib.rs".into(), "fn one() -> usize { 0 }".into())],
);
let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/code/project1", true, cx)
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
})
.await
.unwrap();
@@ -79,18 +84,15 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
})
.await
.unwrap();
- let change_set = project
+ let diff = project
.update(cx, |project, cx| {
- project.open_unstaged_changes(buffer.clone(), cx)
+ project.open_unstaged_diff(buffer.clone(), cx)
})
.await
.unwrap();
- change_set.update(cx, |change_set, _| {
- assert_eq!(
- change_set.base_text_string().unwrap(),
- "fn one() -> usize { 0 }"
- );
+ diff.update(cx, |diff, _| {
+ assert_eq!(diff.base_text_string().unwrap(), "fn one() -> usize { 0 }");
});
buffer.update(cx, |buffer, cx| {
@@ -113,7 +115,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
// A new file is created in the remote filesystem. The user
// sees the new file.
fs.save(
- "/code/project1/src/main.rs".as_ref(),
+ path!("/code/project1/src/main.rs").as_ref(),
&"fn main() {}".into(),
Default::default(),
)
@@ -134,8 +136,8 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
// A file that is currently open in a buffer is renamed.
fs.rename(
- "/code/project1/src/lib.rs".as_ref(),
- "/code/project1/src/lib2.rs".as_ref(),
+ path!("/code/project1/src/lib.rs").as_ref(),
+ path!("/code/project1/src/lib2.rs").as_ref(),
Default::default(),
)
.await
@@ -146,13 +148,13 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
});
fs.set_index_for_repo(
- Path::new("/code/project1/.git"),
- &[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())],
+ Path::new(path!("/code/project1/.git")),
+ &[("src/lib2.rs".into(), "fn one() -> usize { 100 }".into())],
);
cx.executor().run_until_parked();
- change_set.update(cx, |change_set, _| {
+ diff.update(cx, |diff, _| {
assert_eq!(
- change_set.base_text_string().unwrap(),
+ diff.base_text_string().unwrap(),
"fn one() -> usize { 100 }"
);
});
@@ -162,7 +164,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -179,7 +181,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes
project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/code/project1", true, cx)
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
})
.await
.unwrap();
@@ -210,7 +212,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes
buffer.update(&mut cx, |buffer, cx| {
assert_eq!(
buffer.file().unwrap().full_path(cx).to_string_lossy(),
- "project1/README.md"
+ separator!("project1/README.md")
)
});
@@ -368,7 +370,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo
async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -384,7 +386,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
let (project, headless) = init_test(&fs, cx, server_cx).await;
fs.insert_tree(
- "/code/project1/.zed",
+ path!("/code/project1/.zed"),
json!({
"settings.json": r#"
{
@@ -431,7 +433,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
let worktree_id = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/code/project1", true, cx)
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
})
.await
.unwrap()
@@ -512,7 +514,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
Ok(Some(lsp::WorkspaceEdit {
changes: Some(
[(
- lsp::Url::from_file_path("/code/project1/src/lib.rs").unwrap(),
+ lsp::Url::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 6)),
"two".to_string(),
@@ -545,7 +547,7 @@ async fn test_remote_cancel_language_server_work(
) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -561,7 +563,7 @@ async fn test_remote_cancel_language_server_work(
let (project, headless) = init_test(&fs, cx, server_cx).await;
fs.insert_tree(
- "/code/project1/.zed",
+ path!("/code/project1/.zed"),
json!({
"settings.json": r#"
{
@@ -608,7 +610,7 @@ async fn test_remote_cancel_language_server_work(
let worktree_id = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/code/project1", true, cx)
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
})
.await
.unwrap()
@@ -708,7 +710,7 @@ async fn test_remote_cancel_language_server_work(
async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -724,7 +726,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/code/project1", true, cx)
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
})
.await
.unwrap();
@@ -739,7 +741,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
.unwrap();
fs.save(
- &PathBuf::from("/code/project1/src/lib.rs"),
+ &PathBuf::from(path!("/code/project1/src/lib.rs")),
&("bangles".to_string().into()),
LineEnding::Unix,
)
@@ -754,7 +756,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
});
fs.save(
- &PathBuf::from("/code/project1/src/lib.rs"),
+ &PathBuf::from(path!("/code/project1/src/lib.rs")),
&("bloop".to_string().into()),
LineEnding::Unix,
)
@@ -786,7 +788,7 @@ async fn test_remote_resolve_path_in_buffer(
) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -802,7 +804,7 @@ async fn test_remote_resolve_path_in_buffer(
let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/code/project1", true, cx)
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
})
.await
.unwrap();
@@ -818,14 +820,14 @@ async fn test_remote_resolve_path_in_buffer(
let path = project
.update(cx, |project, cx| {
- project.resolve_path_in_buffer("/code/project1/README.md", &buffer, cx)
+ project.resolve_path_in_buffer(path!("/code/project1/README.md"), &buffer, cx)
})
.await
.unwrap();
assert!(path.is_file());
assert_eq!(
path.abs_path().unwrap().to_string_lossy(),
- "/code/project1/README.md"
+ path!("/code/project1/README.md")
);
let path = project
@@ -1013,7 +1015,7 @@ async fn test_adding_then_removing_then_adding_worktrees(
async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -1035,7 +1037,9 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test
cx.update(|cx| {
assert_eq!(
buffer.read(cx).text(),
- initial_server_settings_content().to_string()
+ initial_server_settings_content()
+ .to_string()
+ .replace("\r\n", "\n")
)
})
}
@@ -1044,7 +1048,7 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test
async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
- "/code",
+ path!("/code"),
json!({
"project1": {
".git": {},
@@ -1061,7 +1065,7 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext)
let (worktree, _) = project
.update(cx, |project, cx| {
- project.find_or_create_worktree("/code/project1", true, cx)
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
})
.await
.unwrap();
@@ -1091,7 +1095,9 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext)
.unwrap();
assert_eq!(
- fs.load("/code/project1/src/lib.rs".as_ref()).await.unwrap(),
+ fs.load(path!("/code/project1/src/lib.rs").as_ref())
+ .await
+ .unwrap(),
"fn one() -> usize { 100 }"
);
}
@@ -1175,6 +1181,122 @@ async fn test_remote_rename_entry(cx: &mut TestAppContext, server_cx: &mut TestA
assert_eq!(worktree.entry_for_path("README.rst").unwrap().id, entry.id)
});
}
+
+#[gpui::test]
+async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
+ let text_2 = "
+ fn one() -> usize {
+ 1
+ }
+ "
+ .unindent();
+ let text_1 = "
+ fn one() -> usize {
+ 0
+ }
+ "
+ .unindent();
+
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "src": {
+ "lib.rs": text_2
+ },
+ "README.md": "# project 1",
+ },
+ }),
+ )
+ .await;
+ fs.set_index_for_repo(
+ Path::new("/code/project1/.git"),
+ &[("src/lib.rs".into(), text_1.clone())],
+ );
+ fs.set_head_for_repo(
+ Path::new("/code/project1/.git"),
+ &[("src/lib.rs".into(), text_1.clone())],
+ );
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
+ let (worktree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/code/project1", true, cx)
+ })
+ .await
+ .unwrap();
+ let worktree_id = cx.update(|cx| worktree.read(cx).id());
+ cx.executor().run_until_parked();
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx)
+ })
+ .await
+ .unwrap();
+ let diff = project
+ .update(cx, |project, cx| {
+ project.open_uncommitted_diff(buffer.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ diff.read_with(cx, |diff, cx| {
+ assert_eq!(diff.base_text_string().unwrap(), text_1);
+ assert_eq!(
+ diff.unstaged_diff
+ .as_ref()
+ .unwrap()
+ .read(cx)
+ .base_text_string()
+ .unwrap(),
+ text_1
+ );
+ });
+
+ // stage the current buffer's contents
+ fs.set_index_for_repo(
+ Path::new("/code/project1/.git"),
+ &[("src/lib.rs".into(), text_2.clone())],
+ );
+
+ cx.executor().run_until_parked();
+ diff.read_with(cx, |diff, cx| {
+ assert_eq!(diff.base_text_string().unwrap(), text_1);
+ assert_eq!(
+ diff.unstaged_diff
+ .as_ref()
+ .unwrap()
+ .read(cx)
+ .base_text_string()
+ .unwrap(),
+ text_2
+ );
+ });
+
+ // commit the current buffer's contents
+ fs.set_head_for_repo(
+ Path::new("/code/project1/.git"),
+ &[("src/lib.rs".into(), text_2.clone())],
+ );
+
+ cx.executor().run_until_parked();
+ diff.read_with(cx, |diff, cx| {
+ assert_eq!(diff.base_text_string().unwrap(), text_2);
+ assert_eq!(
+ diff.unstaged_diff
+ .as_ref()
+ .unwrap()
+ .read(cx)
+ .base_text_string()
+ .unwrap(),
+ text_2
+ );
+ });
+}
+
#[gpui::test]
async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
@@ -16,7 +16,7 @@ use node_runtime::{NodeBinaryOptions, NodeRuntime};
use paths::logs_dir;
use project::project_settings::ProjectSettings;
-use release_channel::AppVersion;
+use release_channel::{AppVersion, ReleaseChannel, RELEASE_CHANNEL};
use remote::proxy::ProxyLaunchError;
use remote::ssh_session::ChannelClient;
use remote::{
@@ -149,6 +149,14 @@ fn init_panic_hook() {
(&backtrace).join("\n")
);
+ let release_channel = *RELEASE_CHANNEL;
+ let version = match release_channel {
+ ReleaseChannel::Stable | ReleaseChannel::Preview => env!("ZED_PKG_VERSION"),
+ ReleaseChannel::Nightly | ReleaseChannel::Dev => {
+ option_env!("ZED_COMMIT_SHA").unwrap_or("missing-zed-commit-sha")
+ }
+ };
+
let panic_data = telemetry_events::Panic {
thread: thread_name.into(),
payload: payload.clone(),
@@ -156,11 +164,9 @@ fn init_panic_hook() {
file: location.file().into(),
line: location.line(),
}),
- app_version: format!(
- "remote-server-{}",
- option_env!("ZED_COMMIT_SHA").unwrap_or(&env!("ZED_PKG_VERSION"))
- ),
- release_channel: release_channel::RELEASE_CHANNEL.display_name().into(),
+ app_version: format!("remote-server-{version}"),
+ app_commit_sha: option_env!("ZED_COMMIT_SHA").map(|sha| sha.into()),
+ release_channel: release_channel.display_name().into(),
target: env!("TARGET").to_owned().into(),
os_name: telemetry::os_name(),
os_version: Some(telemetry::os_version()),
@@ -450,6 +450,10 @@ impl Rope {
self.clip_point(Point::new(row, u32::MAX), Bias::Left)
.column
}
+
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ self.chunks.ptr_eq(&other.chunks)
+ }
}
impl<'a> From<&'a str> for Rope {
@@ -1,6 +1,9 @@
+use std::collections::BTreeSet;
+use std::sync::Arc;
+
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
-use std::sync::Arc;
+use strum::EnumString;
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct ExtensionApiManifest {
@@ -11,6 +14,36 @@ pub struct ExtensionApiManifest {
pub repository: String,
pub schema_version: Option<i32>,
pub wasm_api_version: Option<String>,
+ #[serde(default)]
+ pub provides: BTreeSet<ExtensionProvides>,
+}
+
+#[derive(
+ Debug,
+ PartialEq,
+ Eq,
+ PartialOrd,
+ Ord,
+ Hash,
+ Clone,
+ Copy,
+ Serialize,
+ Deserialize,
+ EnumString,
+ strum::Display,
+)]
+#[serde(rename_all = "kebab-case")]
+#[strum(serialize_all = "kebab-case")]
+pub enum ExtensionProvides {
+ Themes,
+ IconThemes,
+ Languages,
+ Grammars,
+ LanguageServers,
+ ContextServers,
+ SlashCommands,
+ IndexedDocsProviders,
+ Snippets,
}
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
@@ -33,18 +33,3 @@ pub struct PerformCompletionParams {
pub model: String,
pub provider_request: Box<serde_json::value::RawValue>,
}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct PredictEditsParams {
- pub outline: Option<String>,
- pub input_events: String,
- pub input_excerpt: String,
- /// Whether the user provided consent for sampling this interaction.
- #[serde(default)]
- pub data_collection_permission: bool,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct PredictEditsResponse {
- pub output_excerpt: String,
-}
@@ -44,6 +44,7 @@ use registrar::{ForDeployed, ForDismissed, SearchActionsRegistrar, WithResults};
const MAX_BUFFER_SEARCH_HISTORY_SIZE: usize = 50;
#[derive(PartialEq, Clone, Deserialize, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct Deploy {
#[serde(default = "util::serde::default_true")]
pub focus: bool,
@@ -2188,6 +2188,7 @@ pub mod tests {
use project::FakeFs;
use serde_json::json;
use settings::SettingsStore;
+ use util::path;
use workspace::DeploySearch;
#[gpui::test]
@@ -2196,7 +2197,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
@@ -2205,7 +2206,7 @@ pub mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let workspace = window.root(cx).unwrap();
let search = cx.new(|cx| ProjectSearch::new(project.clone(), cx));
@@ -2563,7 +2564,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
@@ -2572,7 +2573,7 @@ pub mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx));
let workspace = window;
let search_bar = window.build_entity(cx, |_, _| ProjectSearchBar::new());
@@ -2858,7 +2859,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a": {
"one.rs": "const ONE: usize = 1;",
@@ -2983,7 +2984,7 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
@@ -2992,7 +2993,7 @@ pub mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx));
let workspace = window.root(cx).unwrap();
let search_bar = window.build_entity(cx, |_, _| ProjectSearchBar::new());
@@ -3313,13 +3314,13 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"one.rs": "const ONE: usize = 1;",
}),
)
.await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let worktree_id = project.update(cx, |this, cx| {
this.worktrees(cx).next().unwrap().read(cx).id()
});
@@ -3537,13 +3538,13 @@ pub mod tests {
// Setup 2 panes, both with a file open and one with a project search.
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"one.rs": "const ONE: usize = 1;",
}),
)
.await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let worktree_id = project.update(cx, |this, cx| {
this.worktrees(cx).next().unwrap().read(cx).id()
});
@@ -3692,7 +3693,7 @@ pub mod tests {
// We need many lines in the search results to be able to scroll the window
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"1.txt": "\n\n\n\n\n A \n\n\n\n\n",
"2.txt": "\n\n\n\n\n A \n\n\n\n\n",
@@ -3717,7 +3718,7 @@ pub mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let workspace = window.root(cx).unwrap();
let search = cx.new(|cx| ProjectSearch::new(project, cx));
@@ -3771,13 +3772,13 @@ pub mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"one.rs": "const ONE: usize = 1;",
}),
)
.await;
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let worktree_id = project.update(cx, |this, cx| {
this.worktrees(cx).next().unwrap().read(cx).id()
});
@@ -44,9 +44,9 @@ sha2.workspace = true
smol.workspace = true
theme.workspace = true
tree-sitter.workspace = true
-ui. workspace = true
+ui.workspace = true
unindent.workspace = true
-util. workspace = true
+util.workspace = true
workspace.workspace = true
worktree.workspace = true
@@ -279,6 +279,7 @@ mod tests {
use settings::SettingsStore;
use smol::channel;
use std::{future, path::Path, sync::Arc};
+ use util::separator;
fn init_test(cx: &mut TestAppContext) {
env_logger::try_init().ok();
@@ -421,7 +422,10 @@ mod tests {
// Find result that is greater than 0.5
let search_result = results.iter().find(|result| result.score > 0.9).unwrap();
- assert_eq!(search_result.path.to_string_lossy(), "fixture/needle.md");
+ assert_eq!(
+ search_result.path.to_string_lossy(),
+ separator!("fixture/needle.md")
+ );
let content = cx
.update(|cx| {
@@ -35,6 +35,7 @@ smallvec.workspace = true
tree-sitter-json.workspace = true
tree-sitter.workspace = true
util.workspace = true
+migrator.workspace = true
[dev-dependencies]
fs = { workspace = true, features = ["test-support"] }
@@ -1,22 +1,24 @@
-use std::rc::Rc;
-
-use crate::{settings_store::parse_json_with_comments, SettingsAssets};
-use anyhow::anyhow;
+use anyhow::{anyhow, Context as _, Result};
use collections::{HashMap, IndexMap};
+use fs::Fs;
use gpui::{
Action, ActionBuildError, App, InvalidKeystrokeError, KeyBinding, KeyBindingContextPredicate,
NoAction, SharedString, KEYSTROKE_PARSE_EXPECTED_MESSAGE,
};
+use migrator::migrate_keymap;
use schemars::{
gen::{SchemaGenerator, SchemaSettings},
schema::{ArrayValidation, InstanceType, Schema, SchemaObject, SubschemaValidation},
JsonSchema,
};
-use serde::Deserialize;
+use serde::{Deserialize, Serialize};
use serde_json::Value;
-use std::fmt::Write;
+use std::rc::Rc;
+use std::{fmt::Write, sync::Arc};
use util::{asset_str, markdown::MarkdownString};
+use crate::{settings_store::parse_json_with_comments, SettingsAssets};
+
// Note that the doc comments on these are shown by json-language-server when editing the keymap, so
// they should be considered user-facing documentation. Documentation is not handled well with
// schemars-0.8 - when there are newlines, it is rendered as plaintext (see
@@ -28,12 +30,12 @@ use util::{asset_str, markdown::MarkdownString};
/// Keymap configuration consisting of sections. Each section may have a context predicate which
/// determines whether its bindings are used.
-#[derive(Debug, Deserialize, Default, Clone, JsonSchema)]
+#[derive(Debug, Deserialize, Default, Clone, JsonSchema, Serialize)]
#[serde(transparent)]
pub struct KeymapFile(Vec<KeymapSection>);
/// Keymap section which binds keystrokes to actions.
-#[derive(Debug, Deserialize, Default, Clone, JsonSchema)]
+#[derive(Debug, Deserialize, Default, Clone, JsonSchema, Serialize)]
pub struct KeymapSection {
/// Determines when these bindings are active. When just a name is provided, like `Editor` or
/// `Workspace`, the bindings will be active in that context. Boolean expressions like `X && Y`,
@@ -78,9 +80,9 @@ impl KeymapSection {
/// Unlike the other json types involved in keymaps (including actions), this doc-comment will not
/// be included in the generated JSON schema, as it manually defines its `JsonSchema` impl. The
/// actual schema used for it is automatically generated in `KeymapFile::generate_json_schema`.
-#[derive(Debug, Deserialize, Default, Clone)]
+#[derive(Debug, Deserialize, Default, Clone, Serialize)]
#[serde(transparent)]
-pub struct KeymapAction(Value);
+pub struct KeymapAction(pub(crate) Value);
impl std::fmt::Display for KeymapAction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -114,9 +116,11 @@ impl JsonSchema for KeymapAction {
pub enum KeymapFileLoadResult {
Success {
key_bindings: Vec<KeyBinding>,
+ keymap_file: KeymapFile,
},
SomeFailedToLoad {
key_bindings: Vec<KeyBinding>,
+ keymap_file: KeymapFile,
error_message: MarkdownString,
},
JsonParseFailure {
@@ -150,6 +154,7 @@ impl KeymapFile {
KeymapFileLoadResult::SomeFailedToLoad {
key_bindings,
error_message,
+ ..
} if key_bindings.is_empty() => Err(anyhow!(
"Error loading built-in keymap \"{asset_path}\": {error_message}"
)),
@@ -164,7 +169,7 @@ impl KeymapFile {
#[cfg(feature = "test-support")]
pub fn load_panic_on_failure(content: &str, cx: &App) -> Vec<KeyBinding> {
match Self::load(content, cx) {
- KeymapFileLoadResult::Success { key_bindings } => key_bindings,
+ KeymapFileLoadResult::Success { key_bindings, .. } => key_bindings,
KeymapFileLoadResult::SomeFailedToLoad { error_message, .. } => {
panic!("{error_message}");
}
@@ -180,6 +185,7 @@ impl KeymapFile {
if content.is_empty() {
return KeymapFileLoadResult::Success {
key_bindings: Vec::new(),
+ keymap_file: KeymapFile(Vec::new()),
};
}
let keymap_file = match parse_json_with_comments::<Self>(content) {
@@ -266,7 +272,10 @@ impl KeymapFile {
}
if errors.is_empty() {
- KeymapFileLoadResult::Success { key_bindings }
+ KeymapFileLoadResult::Success {
+ key_bindings,
+ keymap_file,
+ }
} else {
let mut error_message = "Errors in user keymap file.\n".to_owned();
for (context, section_errors) in errors {
@@ -284,6 +293,7 @@ impl KeymapFile {
}
KeymapFileLoadResult::SomeFailedToLoad {
key_bindings,
+ keymap_file,
error_message: MarkdownString(error_message),
}
}
@@ -551,6 +561,55 @@ impl KeymapFile {
pub fn sections(&self) -> impl DoubleEndedIterator<Item = &KeymapSection> {
self.0.iter()
}
+
+ async fn load_keymap_file(fs: &Arc<dyn Fs>) -> Result<String> {
+ match fs.load(paths::keymap_file()).await {
+ result @ Ok(_) => result,
+ Err(err) => {
+ if let Some(e) = err.downcast_ref::<std::io::Error>() {
+ if e.kind() == std::io::ErrorKind::NotFound {
+ return Ok(crate::initial_keymap_content().to_string());
+ }
+ }
+ Err(err)
+ }
+ }
+ }
+
+ pub fn should_migrate_keymap(keymap_file: Self) -> bool {
+ let Ok(old_text) = serde_json::to_string(&keymap_file) else {
+ return false;
+ };
+ migrate_keymap(&old_text).is_some()
+ }
+
+ pub async fn migrate_keymap(fs: Arc<dyn Fs>) -> Result<()> {
+ let old_text = Self::load_keymap_file(&fs).await?;
+ let Some(new_text) = migrate_keymap(&old_text) else {
+ return Ok(());
+ };
+ let initial_path = paths::keymap_file().as_path();
+ if fs.is_file(initial_path).await {
+ let backup_path = paths::home_dir().join(".zed_keymap_backup");
+ fs.atomic_write(backup_path, old_text)
+ .await
+ .with_context(|| {
+ "Failed to create settings backup in home directory".to_string()
+ })?;
+ let resolved_path = fs.canonicalize(initial_path).await.with_context(|| {
+ format!("Failed to canonicalize keymap path {:?}", initial_path)
+ })?;
+ fs.atomic_write(resolved_path.clone(), new_text)
+ .await
+ .with_context(|| format!("Failed to write keymap to file {:?}", resolved_path))?;
+ } else {
+ fs.atomic_write(initial_path.to_path_buf(), new_text)
+ .await
+ .with_context(|| format!("Failed to write keymap to file {:?}", initial_path))?;
+ }
+
+ Ok(())
+ }
}
// Double quotes a string and wraps it in backticks for markdown inline code..
@@ -560,7 +619,7 @@ fn inline_code_string(text: &str) -> MarkdownString {
#[cfg(test)]
mod tests {
- use crate::KeymapFile;
+ use super::KeymapFile;
#[test]
fn can_deserialize_keymap_with_trailing_comma() {
@@ -12,6 +12,7 @@ pub fn test_settings() -> String {
crate::default_settings().as_ref(),
)
.unwrap();
+ #[cfg(not(target_os = "windows"))]
util::merge_non_null_json_value_into(
serde_json::json!({
"ui_font_family": "Courier",
@@ -26,6 +27,21 @@ pub fn test_settings() -> String {
}),
&mut value,
);
+ #[cfg(target_os = "windows")]
+ util::merge_non_null_json_value_into(
+ serde_json::json!({
+ "ui_font_family": "Courier New",
+ "ui_font_features": {},
+ "ui_font_size": 14,
+ "ui_font_fallback": [],
+ "buffer_font_family": "Courier New",
+ "buffer_font_features": {},
+ "buffer_font_size": 14,
+ "buffer_font_fallback": [],
+ "theme": EMPTY_THEME_NAME,
+ }),
+ &mut value,
+ );
value.as_object_mut().unwrap().remove("languages");
serde_json::to_string(&value).unwrap()
}
@@ -65,7 +81,7 @@ pub fn watch_config_file(
pub fn handle_settings_file_changes(
mut user_settings_file_rx: mpsc::UnboundedReceiver<String>,
cx: &mut App,
- settings_changed: impl Fn(Option<anyhow::Error>, &mut App) + 'static,
+ settings_changed: impl Fn(Result<serde_json::Value, anyhow::Error>, &mut App) + 'static,
) {
let user_settings_content = cx
.background_executor()
@@ -76,7 +92,7 @@ pub fn handle_settings_file_changes(
if let Err(err) = &result {
log::error!("Failed to load user settings: {err}");
}
- settings_changed(result.err(), cx);
+ settings_changed(result, cx);
});
cx.spawn(move |cx| async move {
while let Some(user_settings_content) = user_settings_file_rx.next().await {
@@ -85,7 +101,7 @@ pub fn handle_settings_file_changes(
if let Err(err) = &result {
log::error!("Failed to load user settings: {err}");
}
- settings_changed(result.err(), cx);
+ settings_changed(result, cx);
cx.refresh_windows();
});
if result.is_err() {
@@ -4,9 +4,10 @@ use ec4rs::{ConfigParser, PropertiesSource, Section};
use fs::Fs;
use futures::{channel::mpsc, future::LocalBoxFuture, FutureExt, StreamExt};
use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal};
+use migrator::migrate_settings;
use paths::{local_settings_file_relative_path, EDITORCONFIG_NAME};
use schemars::{gen::SchemaGenerator, schema::RootSchema, JsonSchema};
-use serde::{de::DeserializeOwned, Deserialize as _, Serialize};
+use serde::{de::DeserializeOwned, Deserialize, Serialize};
use smallvec::SmallVec;
use std::{
any::{type_name, Any, TypeId},
@@ -17,7 +18,9 @@ use std::{
sync::{Arc, LazyLock},
};
use tree_sitter::Query;
-use util::{merge_non_null_json_value_into, RangeExt, ResultExt as _};
+use util::RangeExt;
+
+use util::{merge_non_null_json_value_into, ResultExt as _};
pub type EditorconfigProperties = ec4rs::Properties;
@@ -544,7 +547,11 @@ impl SettingsStore {
}
/// Sets the user settings via a JSON string.
- pub fn set_user_settings(&mut self, user_settings_content: &str, cx: &mut App) -> Result<()> {
+ pub fn set_user_settings(
+ &mut self,
+ user_settings_content: &str,
+ cx: &mut App,
+ ) -> Result<serde_json::Value> {
let settings: serde_json::Value = if user_settings_content.is_empty() {
parse_json_with_comments("{}")?
} else {
@@ -552,9 +559,9 @@ impl SettingsStore {
};
anyhow::ensure!(settings.is_object(), "settings must be an object");
- self.raw_user_settings = settings;
+ self.raw_user_settings = settings.clone();
self.recompute_values(None, cx)?;
- Ok(())
+ Ok(settings)
}
pub fn set_server_settings(
@@ -988,6 +995,52 @@ impl SettingsStore {
properties.use_fallbacks();
Some(properties)
}
+
+ pub fn should_migrate_settings(settings: &serde_json::Value) -> bool {
+ let Ok(old_text) = serde_json::to_string(settings) else {
+ return false;
+ };
+ migrate_settings(&old_text).is_some()
+ }
+
+ pub fn migrate_settings(&self, fs: Arc<dyn Fs>) {
+ self.setting_file_updates_tx
+ .unbounded_send(Box::new(move |_: AsyncApp| {
+ async move {
+ let old_text = Self::load_settings(&fs).await?;
+ let Some(new_text) = migrate_settings(&old_text) else {
+ return anyhow::Ok(());
+ };
+ let initial_path = paths::settings_file().as_path();
+ if fs.is_file(initial_path).await {
+ let backup_path = paths::home_dir().join(".zed_settings_backup");
+ fs.atomic_write(backup_path, old_text)
+ .await
+ .with_context(|| {
+ "Failed to create settings backup in home directory".to_string()
+ })?;
+ let resolved_path =
+ fs.canonicalize(initial_path).await.with_context(|| {
+ format!("Failed to canonicalize settings path {:?}", initial_path)
+ })?;
+ fs.atomic_write(resolved_path.clone(), new_text)
+ .await
+ .with_context(|| {
+ format!("Failed to write settings to file {:?}", resolved_path)
+ })?;
+ } else {
+ fs.atomic_write(initial_path.to_path_buf(), new_text)
+ .await
+ .with_context(|| {
+ format!("Failed to write settings to file {:?}", initial_path)
+ })?;
+ }
+ anyhow::Ok(())
+ }
+ .boxed_local()
+ }))
+ .ok();
+ }
}
#[derive(Debug, Clone, PartialEq)]
@@ -1235,7 +1288,9 @@ fn replace_value_in_json_text(
let found_key = text
.get(key_range.clone())
- .map(|key_text| key_text == format!("\"{}\"", key_path[depth]))
+ .map(|key_text| {
+ depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth])
+ })
.unwrap_or(false);
if found_key {
@@ -16,4 +16,4 @@ doctest = false
[dependencies]
sqlez.workspace = true
sqlformat.workspace = true
-syn = "1.0"
+syn.workspace = true
@@ -516,6 +516,10 @@ impl<T: Item> SumTree<T> {
}
}
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ Arc::ptr_eq(&self.0, &other.0)
+ }
+
fn push_tree_recursive(
&mut self,
other: SumTree<T>,
@@ -32,7 +32,7 @@ impl<'a, K> Default for MapKeyRef<'a, K> {
}
}
-#[derive(Clone)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TreeSet<K>(TreeMap<K, ()>)
where
K: Clone + Ord;
@@ -22,6 +22,7 @@ inline_completion.workspace = true
language.workspace = true
log.workspace = true
postage.workspace = true
+project.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
@@ -31,16 +31,16 @@ pub fn init(client: Arc<Client>, cx: &mut App) {
let supermaven = cx.new(|_| Supermaven::Starting);
Supermaven::set_global(supermaven.clone(), cx);
- let mut provider = all_language_settings(None, cx).inline_completions.provider;
- if provider == language::language_settings::InlineCompletionProvider::Supermaven {
+ let mut provider = all_language_settings(None, cx).edit_predictions.provider;
+ if provider == language::language_settings::EditPredictionProvider::Supermaven {
supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx));
}
cx.observe_global::<SettingsStore>(move |cx| {
- let new_provider = all_language_settings(None, cx).inline_completions.provider;
+ let new_provider = all_language_settings(None, cx).edit_predictions.provider;
if new_provider != provider {
provider = new_provider;
- if provider == language::language_settings::InlineCompletionProvider::Supermaven {
+ if provider == language::language_settings::EditPredictionProvider::Supermaven {
supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx));
} else {
supermaven.update(cx, |supermaven, _cx| supermaven.stop());
@@ -2,8 +2,9 @@ use crate::{Supermaven, SupermavenCompletionStateId};
use anyhow::Result;
use futures::StreamExt as _;
use gpui::{App, Context, Entity, EntityId, Task};
-use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider};
-use language::{language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot};
+use inline_completion::{Direction, EditPredictionProvider, InlineCompletion};
+use language::{Anchor, Buffer, BufferSnapshot};
+use project::Project;
use std::{
ops::{AddAssign, Range},
path::Path,
@@ -96,7 +97,7 @@ fn completion_from_diff(
}
}
-impl InlineCompletionProvider for SupermavenCompletionProvider {
+impl EditPredictionProvider for SupermavenCompletionProvider {
fn name() -> &'static str {
"supermaven"
}
@@ -109,20 +110,8 @@ impl InlineCompletionProvider for SupermavenCompletionProvider {
false
}
- fn show_completions_in_normal_mode() -> bool {
- false
- }
-
- fn is_enabled(&self, buffer: &Entity<Buffer>, cursor_position: Anchor, cx: &App) -> bool {
- if !self.supermaven.read(cx).is_enabled() {
- return false;
- }
-
- let buffer = buffer.read(cx);
- let file = buffer.file();
- let language = buffer.language_at(cursor_position);
- let settings = all_language_settings(file, cx);
- settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx)
+ fn is_enabled(&self, _buffer: &Entity<Buffer>, _cursor_position: Anchor, cx: &App) -> bool {
+ self.supermaven.read(cx).is_enabled()
}
fn is_refreshing(&self) -> bool {
@@ -131,6 +120,7 @@ impl InlineCompletionProvider for SupermavenCompletionProvider {
fn refresh(
&mut self,
+ _project: Option<Entity<Project>>,
buffer_handle: Entity<Buffer>,
cursor_position: Anchor,
debounce: bool,
@@ -25,6 +25,7 @@ use workspace::{
const PANEL_WIDTH_REMS: f32 = 28.;
#[derive(PartialEq, Clone, Deserialize, JsonSchema, Default)]
+#[serde(deny_unknown_fields)]
pub struct Toggle {
#[serde(default)]
pub select_last: bool,
@@ -5,6 +5,7 @@ use menu::SelectPrev;
use project::{Project, ProjectPath};
use serde_json::json;
use std::path::Path;
+use util::path;
use workspace::{AppState, Workspace};
#[ctor::ctor]
@@ -24,7 +25,7 @@ async fn test_open_with_prev_tab_selected_and_cycle_on_toggle_action(
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"1.txt": "First file",
"2.txt": "Second file",
@@ -34,7 +35,7 @@ async fn test_open_with_prev_tab_selected_and_cycle_on_toggle_action(
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -81,7 +82,7 @@ async fn test_open_with_last_tab_selected(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"1.txt": "First file",
"2.txt": "Second file",
@@ -90,7 +91,7 @@ async fn test_open_with_last_tab_selected(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -172,10 +173,10 @@ async fn test_open_with_single_item(cx: &mut gpui::TestAppContext) {
app_state
.fs
.as_fake()
- .insert_tree("/root", json!({"1.txt": "Single file"}))
+ .insert_tree(path!("/root"), json!({"1.txt": "Single file"}))
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -195,7 +196,7 @@ async fn test_close_selected_item(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"1.txt": "First file",
"2.txt": "Second file",
@@ -203,7 +204,7 @@ async fn test_close_selected_item(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -241,7 +242,7 @@ async fn test_close_preserves_selected_position(cx: &mut gpui::TestAppContext) {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"1.txt": "First file",
"2.txt": "Second file",
@@ -250,7 +251,7 @@ async fn test_close_preserves_selected_position(cx: &mut gpui::TestAppContext) {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -603,6 +603,7 @@ mod tests {
use project::{ContextProviderWithTasks, FakeFs, Project};
use serde_json::json;
use task::TaskTemplates;
+ use util::path;
use workspace::CloseInactiveTabsAndPanes;
use crate::{modal::Spawn, tests::init_test};
@@ -614,7 +615,7 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
".zed": {
"tasks.json": r#"[
@@ -635,7 +636,7 @@ mod tests {
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
@@ -654,7 +655,7 @@ mod tests {
let _ = workspace
.update_in(cx, |workspace, window, cx| {
- workspace.open_abs_path(PathBuf::from("/dir/a.ts"), true, window, cx)
+ workspace.open_abs_path(PathBuf::from(path!("/dir/a.ts")), true, window, cx)
})
.await
.unwrap();
@@ -778,7 +779,7 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
".zed": {
"tasks.json": r#"[
@@ -800,7 +801,7 @@ mod tests {
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -819,7 +820,7 @@ mod tests {
let _ = workspace
.update_in(cx, |workspace, window, cx| {
workspace.open_abs_path(
- PathBuf::from("/dir/file_with.odd_extension"),
+ PathBuf::from(path!("/dir/file_with.odd_extension")),
true,
window,
cx,
@@ -832,8 +833,8 @@ mod tests {
assert_eq!(
task_names(&tasks_picker, cx),
vec![
- "hello from /dir/file_with.odd_extension:1:1".to_string(),
- "opened now: /dir".to_string()
+ concat!("hello from ", path!("/dir/file_with.odd_extension:1:1")).to_string(),
+ concat!("opened now: ", path!("/dir")).to_string(),
],
"Second opened buffer should fill the context, labels should be trimmed if long enough"
);
@@ -846,7 +847,7 @@ mod tests {
let second_item = workspace
.update_in(cx, |workspace, window, cx| {
workspace.open_abs_path(
- PathBuf::from("/dir/file_without_extension"),
+ PathBuf::from(path!("/dir/file_without_extension")),
true,
window,
cx,
@@ -868,8 +869,8 @@ mod tests {
assert_eq!(
task_names(&tasks_picker, cx),
vec![
- "hello from /dir/file_without_extension:2:3".to_string(),
- "opened now: /dir".to_string()
+ concat!("hello from ", path!("/dir/file_without_extension:2:3")).to_string(),
+ concat!("opened now: ", path!("/dir")).to_string(),
],
"Opened buffer should fill the context, labels should be trimmed if long enough"
);
@@ -885,7 +886,7 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
"a1.ts": "// a1",
"a2.ts": "// a2",
@@ -894,7 +895,7 @@ mod tests {
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
project.read_with(cx, |project, _| {
let language_registry = project.languages();
language_registry.add(Arc::new(
@@ -955,7 +956,7 @@ mod tests {
let _ts_file_1 = workspace
.update_in(cx, |workspace, window, cx| {
- workspace.open_abs_path(PathBuf::from("/dir/a1.ts"), true, window, cx)
+ workspace.open_abs_path(PathBuf::from(path!("/dir/a1.ts")), true, window, cx)
})
.await
.unwrap();
@@ -963,23 +964,28 @@ mod tests {
assert_eq!(
task_names(&tasks_picker, cx),
vec![
- "Another task from file /dir/a1.ts",
- "TypeScript task from file /dir/a1.ts",
+ concat!("Another task from file ", path!("/dir/a1.ts")),
+ concat!("TypeScript task from file ", path!("/dir/a1.ts")),
"Task without variables",
],
"Should open spawn TypeScript tasks for the opened file, tasks with most template variables above, all groups sorted alphanumerically"
);
+
emulate_task_schedule(
tasks_picker,
&project,
- "TypeScript task from file /dir/a1.ts",
+ concat!("TypeScript task from file ", path!("/dir/a1.ts")),
cx,
);
let tasks_picker = open_spawn_tasks(&workspace, cx);
assert_eq!(
task_names(&tasks_picker, cx),
- vec!["TypeScript task from file /dir/a1.ts", "Another task from file /dir/a1.ts", "Task without variables"],
+ vec![
+ concat!("TypeScript task from file ", path!("/dir/a1.ts")),
+ concat!("Another task from file ", path!("/dir/a1.ts")),
+ "Task without variables",
+ ],
"After spawning the task and getting it into the history, it should be up in the sort as recently used.
Tasks with the same labels and context are deduplicated."
);
@@ -991,7 +997,7 @@ mod tests {
let _ts_file_2 = workspace
.update_in(cx, |workspace, window, cx| {
- workspace.open_abs_path(PathBuf::from("/dir/a2.ts"), true, window, cx)
+ workspace.open_abs_path(PathBuf::from(path!("/dir/a2.ts")), true, window, cx)
})
.await
.unwrap();
@@ -999,10 +1005,10 @@ mod tests {
assert_eq!(
task_names(&tasks_picker, cx),
vec![
- "TypeScript task from file /dir/a1.ts",
- "Another task from file /dir/a2.ts",
- "TypeScript task from file /dir/a2.ts",
- "Task without variables"
+ concat!("TypeScript task from file ", path!("/dir/a1.ts")),
+ concat!("Another task from file ", path!("/dir/a2.ts")),
+ concat!("TypeScript task from file ", path!("/dir/a2.ts")),
+ "Task without variables",
],
"Even when both TS files are open, should only show the history (on the top), and tasks, resolved for the current file"
);
@@ -1029,7 +1035,7 @@ mod tests {
emulate_task_schedule(tasks_picker, &project, "Rust task", cx);
let _ts_file_2 = workspace
.update_in(cx, |workspace, window, cx| {
- workspace.open_abs_path(PathBuf::from("/dir/a2.ts"), true, window, cx)
+ workspace.open_abs_path(PathBuf::from(path!("/dir/a2.ts")), true, window, cx)
})
.await
.unwrap();
@@ -1037,10 +1043,10 @@ mod tests {
assert_eq!(
task_names(&tasks_picker, cx),
vec![
- "TypeScript task from file /dir/a1.ts",
- "Another task from file /dir/a2.ts",
- "TypeScript task from file /dir/a2.ts",
- "Task without variables"
+ concat!("TypeScript task from file ", path!("/dir/a1.ts")),
+ concat!("Another task from file ", path!("/dir/a2.ts")),
+ concat!("TypeScript task from file ", path!("/dir/a2.ts")),
+ "Task without variables",
],
"After closing all but *.rs tabs, running a Rust task and switching back to TS tasks, \
same TS spawn history should be restored"
@@ -262,6 +262,7 @@ mod tests {
use serde_json::json;
use task::{TaskContext, TaskVariables, VariableName};
use ui::VisualContext;
+ use util::{path, separator};
use workspace::{AppState, Workspace};
use crate::task_context;
@@ -271,7 +272,7 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
- "/dir",
+ path!("/dir"),
json!({
".zed": {
"tasks.json": r#"[
@@ -295,7 +296,7 @@ mod tests {
}),
)
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let worktree_store = project.update(cx, |project, _| project.worktree_store().clone());
let rust_language = Arc::new(
Language::new(
@@ -375,17 +376,18 @@ mod tests {
task_context(workspace, window, cx)
})
.await;
+
assert_eq!(
first_context,
TaskContext {
- cwd: Some("/dir".into()),
+ cwd: Some(path!("/dir").into()),
task_variables: TaskVariables::from_iter([
- (VariableName::File, "/dir/rust/b.rs".into()),
+ (VariableName::File, path!("/dir/rust/b.rs").into()),
(VariableName::Filename, "b.rs".into()),
- (VariableName::RelativeFile, "rust/b.rs".into()),
- (VariableName::Dirname, "/dir/rust".into()),
+ (VariableName::RelativeFile, separator!("rust/b.rs").into()),
+ (VariableName::Dirname, path!("/dir/rust").into()),
(VariableName::Stem, "b".into()),
- (VariableName::WorktreeRoot, "/dir".into()),
+ (VariableName::WorktreeRoot, path!("/dir").into()),
(VariableName::Row, "1".into()),
(VariableName::Column, "1".into()),
]),
@@ -407,14 +409,14 @@ mod tests {
})
.await,
TaskContext {
- cwd: Some("/dir".into()),
+ cwd: Some(path!("/dir").into()),
task_variables: TaskVariables::from_iter([
- (VariableName::File, "/dir/rust/b.rs".into()),
+ (VariableName::File, path!("/dir/rust/b.rs").into()),
(VariableName::Filename, "b.rs".into()),
- (VariableName::RelativeFile, "rust/b.rs".into()),
- (VariableName::Dirname, "/dir/rust".into()),
+ (VariableName::RelativeFile, separator!("rust/b.rs").into()),
+ (VariableName::Dirname, path!("/dir/rust").into()),
(VariableName::Stem, "b".into()),
- (VariableName::WorktreeRoot, "/dir".into()),
+ (VariableName::WorktreeRoot, path!("/dir").into()),
(VariableName::Row, "1".into()),
(VariableName::Column, "15".into()),
(VariableName::SelectedText, "is_i".into()),
@@ -433,14 +435,14 @@ mod tests {
})
.await,
TaskContext {
- cwd: Some("/dir".into()),
+ cwd: Some(path!("/dir").into()),
task_variables: TaskVariables::from_iter([
- (VariableName::File, "/dir/a.ts".into()),
+ (VariableName::File, path!("/dir/a.ts").into()),
(VariableName::Filename, "a.ts".into()),
(VariableName::RelativeFile, "a.ts".into()),
- (VariableName::Dirname, "/dir".into()),
+ (VariableName::Dirname, path!("/dir").into()),
(VariableName::Stem, "a".into()),
- (VariableName::WorktreeRoot, "/dir".into()),
+ (VariableName::WorktreeRoot, path!("/dir").into()),
(VariableName::Row, "1".into()),
(VariableName::Column, "1".into()),
(VariableName::Symbol, "this_is_a_test".into()),
@@ -267,6 +267,9 @@ pub struct Panic {
pub backtrace: Vec<String>,
/// Zed version number
pub app_version: String,
+ /// The Git commit SHA that Zed was built at.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub app_commit_sha: Option<String>,
/// Zed release channel (stable, preview, dev)
pub release_channel: String,
pub target: Option<String>,
@@ -1073,6 +1073,15 @@ impl InputHandler for TerminalInputHandler {
fn apple_press_and_hold_enabled(&mut self) -> bool {
false
}
+
+ fn character_index_for_point(
+ &mut self,
+ _point: Point<Pixels>,
+ _window: &mut Window,
+ _cx: &mut App,
+ ) -> Option<usize> {
+ None
+ }
}
pub fn is_blank(cell: &IndexedCell) -> bool {
@@ -35,10 +35,11 @@ use workspace::{
item::SerializableItem,
move_active_item, move_item, pane,
ui::IconName,
- ActivateNextPane, ActivatePane, ActivatePaneInDirection, ActivatePreviousPane,
- DraggedSelection, DraggedTab, ItemId, MoveItemToPane, MoveItemToPaneInDirection, NewTerminal,
- Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, SplitRight, SplitUp,
- SwapPaneInDirection, ToggleZoom, Workspace,
+ ActivateNextPane, ActivatePane, ActivatePaneDown, ActivatePaneLeft, ActivatePaneRight,
+ ActivatePaneUp, ActivatePreviousPane, DraggedSelection, DraggedTab, ItemId, MoveItemToPane,
+ MoveItemToPaneInDirection, NewTerminal, Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft,
+ SplitRight, SplitUp, SwapPaneDown, SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom,
+ Workspace,
};
use anyhow::{anyhow, Context as _, Result};
@@ -889,6 +890,37 @@ impl TerminalPanel {
is_enabled_in_workspace(workspace.read(cx), cx)
})
}
+
+ fn activate_pane_in_direction(
+ &mut self,
+ direction: SplitDirection,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(pane) = self
+ .center
+ .find_pane_in_direction(&self.active_pane, direction, cx)
+ {
+ window.focus(&pane.focus_handle(cx));
+ } else {
+ self.workspace
+ .update(cx, |workspace, cx| {
+ workspace.activate_pane_in_direction(direction, window, cx)
+ })
+ .ok();
+ }
+ }
+
+ fn swap_pane_in_direction(&mut self, direction: SplitDirection, cx: &mut Context<Self>) {
+ if let Some(to) = self
+ .center
+ .find_pane_in_direction(&self.active_pane, direction, cx)
+ .cloned()
+ {
+ self.center.swap(&self.active_pane, &to);
+ cx.notify();
+ }
+ }
}
fn is_enabled_in_workspace(workspace: &Workspace, cx: &App) -> bool {
@@ -1145,24 +1177,28 @@ impl Render for TerminalPanel {
.ok()
.map(|div| {
div.on_action({
- cx.listener(
- |terminal_panel, action: &ActivatePaneInDirection, window, cx| {
- if let Some(pane) = terminal_panel.center.find_pane_in_direction(
- &terminal_panel.active_pane,
- action.0,
- cx,
- ) {
- window.focus(&pane.focus_handle(cx));
- } else {
- terminal_panel
- .workspace
- .update(cx, |workspace, cx| {
- workspace.activate_pane_in_direction(action.0, window, cx)
- })
- .ok();
- }
- },
- )
+ cx.listener(|terminal_panel, _: &ActivatePaneLeft, window, cx| {
+ terminal_panel.activate_pane_in_direction(SplitDirection::Left, window, cx);
+ })
+ })
+ .on_action({
+ cx.listener(|terminal_panel, _: &ActivatePaneRight, window, cx| {
+ terminal_panel.activate_pane_in_direction(
+ SplitDirection::Right,
+ window,
+ cx,
+ );
+ })
+ })
+ .on_action({
+ cx.listener(|terminal_panel, _: &ActivatePaneUp, window, cx| {
+ terminal_panel.activate_pane_in_direction(SplitDirection::Up, window, cx);
+ })
+ })
+ .on_action({
+ cx.listener(|terminal_panel, _: &ActivatePaneDown, window, cx| {
+ terminal_panel.activate_pane_in_direction(SplitDirection::Down, window, cx);
+ })
})
.on_action(
cx.listener(|terminal_panel, _action: &ActivateNextPane, window, cx| {
@@ -1210,18 +1246,18 @@ impl Render for TerminalPanel {
}
}),
)
- .on_action(
- cx.listener(|terminal_panel, action: &SwapPaneInDirection, _, cx| {
- if let Some(to) = terminal_panel
- .center
- .find_pane_in_direction(&terminal_panel.active_pane, action.0, cx)
- .cloned()
- {
- terminal_panel.center.swap(&terminal_panel.active_pane, &to);
- cx.notify();
- }
- }),
- )
+ .on_action(cx.listener(|terminal_panel, _: &SwapPaneLeft, _, cx| {
+ terminal_panel.swap_pane_in_direction(SplitDirection::Left, cx);
+ }))
+ .on_action(cx.listener(|terminal_panel, _: &SwapPaneRight, _, cx| {
+ terminal_panel.swap_pane_in_direction(SplitDirection::Right, cx);
+ }))
+ .on_action(cx.listener(|terminal_panel, _: &SwapPaneUp, _, cx| {
+ terminal_panel.swap_pane_in_direction(SplitDirection::Up, cx);
+ }))
+ .on_action(cx.listener(|terminal_panel, _: &SwapPaneDown, _, cx| {
+ terminal_panel.swap_pane_in_direction(SplitDirection::Down, cx);
+ }))
.on_action(
cx.listener(|terminal_panel, action: &MoveItemToPane, window, cx| {
let Some(&target_pane) =
@@ -257,7 +257,13 @@ impl TerminalView {
.action("Inline Assist", Box::new(InlineAssist::default()))
})
.separator()
- .action("Close", Box::new(CloseActiveItem { save_intent: None }))
+ .action(
+ "Close",
+ Box::new(CloseActiveItem {
+ save_intent: None,
+ close_pinned: true,
+ }),
+ )
});
window.focus(&context_menu.focus_handle(cx));
@@ -84,9 +84,11 @@ const FILE_ICONS: &[(&str, &str)] = &[
("haskell", "icons/file_icons/haskell.svg"),
("hcl", "icons/file_icons/hcl.svg"),
("heroku", "icons/file_icons/heroku.svg"),
+ ("html", "icons/file_icons/html.svg"),
("image", "icons/file_icons/image.svg"),
("java", "icons/file_icons/java.svg"),
("javascript", "icons/file_icons/javascript.svg"),
+ ("json", "icons/file_icons/code.svg"),
("julia", "icons/file_icons/julia.svg"),
("kotlin", "icons/file_icons/kotlin.svg"),
("lock", "icons/file_icons/lock.svg"),
@@ -164,6 +164,30 @@ impl ThemeSettings {
}
}
}
+
+ /// Reloads the current icon theme.
+ ///
+ /// Reads the [`ThemeSettings`] to know which icon theme should be loaded.
+ pub fn reload_current_icon_theme(cx: &mut App) {
+ let mut theme_settings = ThemeSettings::get_global(cx).clone();
+
+ let active_theme = theme_settings.active_icon_theme.clone();
+ let mut icon_theme_name = active_theme.name.as_ref();
+
+ // If the selected theme doesn't exist, fall back to the default theme.
+ let theme_registry = ThemeRegistry::global(cx);
+ if theme_registry
+ .get_icon_theme(icon_theme_name)
+ .ok()
+ .is_none()
+ {
+ icon_theme_name = DEFAULT_ICON_THEME_NAME;
+ };
+
+ if let Some(_theme) = theme_settings.switch_icon_theme(icon_theme_name, cx) {
+ ThemeSettings::override_global(theme_settings, cx);
+ }
+ }
}
/// The appearance of the system.
@@ -487,6 +511,24 @@ impl ThemeSettings {
self.active_theme = Arc::new(base_theme);
}
}
+
+ /// Switches to the icon theme with the given name, if it exists.
+ ///
+ /// Returns a `Some` containing the new icon theme if it was successful.
+ /// Returns `None` otherwise.
+ pub fn switch_icon_theme(&mut self, icon_theme: &str, cx: &mut App) -> Option<Arc<IconTheme>> {
+ let themes = ThemeRegistry::default_global(cx);
+
+ let mut new_icon_theme = None;
+
+ if let Some(icon_theme) = themes.get_icon_theme(icon_theme).log_err() {
+ self.active_icon_theme = icon_theme.clone();
+ new_icon_theme = Some(icon_theme);
+ cx.refresh_windows();
+ }
+
+ new_icon_theme
+ }
}
// TODO: Make private, change usages to use `get_ui_font_size` instead.
@@ -77,4 +77,8 @@ impl ExtensionThemeProxy for ThemeRegistryProxy {
.await
})
}
+
+ fn reload_current_icon_theme(&self, cx: &mut App) {
+ ThemeSettings::reload_current_icon_theme(cx)
+ }
}
@@ -1,19 +1,31 @@
-use gpui::{impl_actions, Entity, OwnedMenu, OwnedMenuItem};
+use gpui::{Entity, OwnedMenu, OwnedMenuItem};
+
+#[cfg(not(target_os = "macos"))]
+use gpui::{actions, impl_actions};
+
+#[cfg(not(target_os = "macos"))]
use schemars::JsonSchema;
+#[cfg(not(target_os = "macos"))]
use serde::Deserialize;
+
use smallvec::SmallVec;
use ui::{prelude::*, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
-impl_actions!(
- app_menu,
- [OpenApplicationMenu, NavigateApplicationMenuInDirection]
-);
+#[cfg(not(target_os = "macos"))]
+impl_actions!(app_menu, [OpenApplicationMenu]);
+#[cfg(not(target_os = "macos"))]
+actions!(app_menu, [ActivateMenuRight, ActivateMenuLeft]);
+
+#[cfg(not(target_os = "macos"))]
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Default)]
pub struct OpenApplicationMenu(String);
-#[derive(Clone, Deserialize, JsonSchema, PartialEq, Default)]
-pub struct NavigateApplicationMenuInDirection(String);
+#[cfg(not(target_os = "macos"))]
+pub enum ActivateDirection {
+ Left,
+ Right,
+}
#[derive(Clone)]
struct MenuEntry {
@@ -190,7 +202,7 @@ impl ApplicationMenu {
#[cfg(not(target_os = "macos"))]
pub fn navigate_menus_in_direction(
&mut self,
- action: &NavigateApplicationMenuInDirection,
+ direction: ActivateDirection,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -202,22 +214,21 @@ impl ApplicationMenu {
return;
};
- let next_index = match action.0.as_str() {
- "Left" => {
+ let next_index = match direction {
+ ActivateDirection::Left => {
if current_index == 0 {
self.entries.len() - 1
} else {
current_index - 1
}
}
- "Right" => {
+ ActivateDirection::Right => {
if current_index == self.entries.len() - 1 {
0
} else {
current_index + 1
}
}
- _ => return,
};
self.entries[current_index].handle.hide(cx);
@@ -9,7 +9,9 @@ mod stories;
use crate::application_menu::ApplicationMenu;
#[cfg(not(target_os = "macos"))]
-use crate::application_menu::{NavigateApplicationMenuInDirection, OpenApplicationMenu};
+use crate::application_menu::{
+ ActivateDirection, ActivateMenuLeft, ActivateMenuRight, OpenApplicationMenu,
+};
use crate::platforms::{platform_linux, platform_mac, platform_windows};
use auto_update::AutoUpdateStatus;
@@ -78,22 +80,36 @@ pub fn init(cx: &mut App) {
});
#[cfg(not(target_os = "macos"))]
- workspace.register_action(
- |workspace, action: &NavigateApplicationMenuInDirection, window, cx| {
- if let Some(titlebar) = workspace
- .titlebar_item()
- .and_then(|item| item.downcast::<TitleBar>().ok())
- {
- titlebar.update(cx, |titlebar, cx| {
- if let Some(ref menu) = titlebar.application_menu {
- menu.update(cx, |menu, cx| {
- menu.navigate_menus_in_direction(action, window, cx)
- });
- }
- });
- }
- },
- );
+ workspace.register_action(|workspace, _: &ActivateMenuRight, window, cx| {
+ if let Some(titlebar) = workspace
+ .titlebar_item()
+ .and_then(|item| item.downcast::<TitleBar>().ok())
+ {
+ titlebar.update(cx, |titlebar, cx| {
+ if let Some(ref menu) = titlebar.application_menu {
+ menu.update(cx, |menu, cx| {
+ menu.navigate_menus_in_direction(ActivateDirection::Right, window, cx)
+ });
+ }
+ });
+ }
+ });
+
+ #[cfg(not(target_os = "macos"))]
+ workspace.register_action(|workspace, _: &ActivateMenuLeft, window, cx| {
+ if let Some(titlebar) = workspace
+ .titlebar_item()
+ .and_then(|item| item.downcast::<TitleBar>().ok())
+ {
+ titlebar.update(cx, |titlebar, cx| {
+ if let Some(ref menu) = titlebar.application_menu {
+ menu.update(cx, |menu, cx| {
+ menu.navigate_menus_in_direction(ActivateDirection::Left, window, cx)
+ });
+ }
+ });
+ }
+ });
})
.detach();
}
@@ -11,6 +11,7 @@ mod image;
mod indent_guides;
mod indicator;
mod keybinding;
+mod keybinding_hint;
mod label;
mod list;
mod modal;
@@ -47,6 +48,7 @@ pub use image::*;
pub use indent_guides::*;
pub use indicator::*;
pub use keybinding::*;
+pub use keybinding_hint::*;
pub use label::*;
pub use list::*;
pub use modal::*;
@@ -2,7 +2,8 @@
use gpui::{AnyView, DefiniteLength};
use crate::{
- prelude::*, Color, DynamicSpacing, ElevationIndex, IconPosition, KeyBinding, TintColor,
+ prelude::*, Color, DynamicSpacing, ElevationIndex, IconPosition, KeyBinding,
+ KeybindingPosition, TintColor,
};
use crate::{
ButtonCommon, ButtonLike, ButtonSize, ButtonStyle, IconName, IconSize, Label, LineHeightStyle,
@@ -92,6 +93,7 @@ pub struct Button {
selected_icon: Option<IconName>,
selected_icon_color: Option<Color>,
key_binding: Option<KeyBinding>,
+ keybinding_position: KeybindingPosition,
alpha: Option<f32>,
}
@@ -117,6 +119,7 @@ impl Button {
selected_icon: None,
selected_icon_color: None,
key_binding: None,
+ keybinding_position: KeybindingPosition::default(),
alpha: None,
}
}
@@ -187,6 +190,15 @@ impl Button {
self
}
+ /// Sets the position of the keybinding relative to the button label.
+ ///
+ /// This method allows you to specify where the keybinding should be displayed
+ /// in relation to the button's label.
+ pub fn key_binding_position(mut self, position: KeybindingPosition) -> Self {
+ self.keybinding_position = position;
+ self
+ }
+
/// Sets the alpha property of the color of label.
pub fn alpha(mut self, alpha: f32) -> Self {
self.alpha = Some(alpha);
@@ -412,6 +424,10 @@ impl RenderOnce for Button {
})
.child(
h_flex()
+ .when(
+ self.keybinding_position == KeybindingPosition::Start,
+ |this| this.flex_row_reverse(),
+ )
.gap(DynamicSpacing::Base06.rems(cx))
.justify_between()
.child(
@@ -1,5 +1,6 @@
#![allow(missing_docs)]
-use crate::{prelude::*, Icon, IconName, IconSize};
+use crate::{prelude::*, Icon, IconName, IconSize, IconWithIndicator, Indicator};
+use gpui::Hsla;
/// An icon that appears within a button.
///
@@ -15,6 +16,8 @@ pub(super) struct ButtonIcon {
selected_icon: Option<IconName>,
selected_icon_color: Option<Color>,
selected_style: Option<ButtonStyle>,
+ indicator: Option<Indicator>,
+ indicator_border_color: Option<Hsla>,
}
impl ButtonIcon {
@@ -28,6 +31,8 @@ impl ButtonIcon {
selected_icon: None,
selected_icon_color: None,
selected_style: None,
+ indicator: None,
+ indicator_border_color: None,
}
}
@@ -56,6 +61,16 @@ impl ButtonIcon {
self.selected_icon_color = color.into();
self
}
+
+ pub fn indicator(mut self, indicator: Indicator) -> Self {
+ self.indicator = Some(indicator);
+ self
+ }
+
+ pub fn indicator_border_color(mut self, color: Option<Hsla>) -> Self {
+ self.indicator_border_color = color;
+ self
+ }
}
impl Disableable for ButtonIcon {
@@ -96,6 +111,13 @@ impl RenderOnce for ButtonIcon {
self.color
};
- Icon::new(icon).size(self.size).color(icon_color)
+ let icon = Icon::new(icon).size(self.size).color(icon_color);
+
+ match self.indicator {
+ Some(indicator) => IconWithIndicator::new(icon, Some(indicator))
+ .indicator_border_color(self.indicator_border_color)
+ .into_any_element(),
+ None => icon.into_any_element(),
+ }
}
}
@@ -45,6 +45,13 @@ pub enum IconPosition {
End,
}
+#[derive(Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
+pub enum KeybindingPosition {
+ Start,
+ #[default]
+ End,
+}
+
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)]
pub enum TintColor {
#[default]
@@ -1,8 +1,8 @@
#![allow(missing_docs)]
-use gpui::{AnyView, DefiniteLength};
+use gpui::{AnyView, DefiniteLength, Hsla};
use super::button_like::{ButtonCommon, ButtonLike, ButtonSize, ButtonStyle};
-use crate::{prelude::*, ElevationIndex, SelectableButton};
+use crate::{prelude::*, ElevationIndex, Indicator, SelectableButton};
use crate::{IconName, IconSize};
use super::button_icon::ButtonIcon;
@@ -22,6 +22,8 @@ pub struct IconButton {
icon_size: IconSize,
icon_color: Color,
selected_icon: Option<IconName>,
+ indicator: Option<Indicator>,
+ indicator_border_color: Option<Hsla>,
alpha: Option<f32>,
}
@@ -34,6 +36,8 @@ impl IconButton {
icon_size: IconSize::default(),
icon_color: Color::Default,
selected_icon: None,
+ indicator: None,
+ indicator_border_color: None,
alpha: None,
};
this.base.base = this.base.base.debug_selector(|| format!("ICON-{:?}", icon));
@@ -64,6 +68,16 @@ impl IconButton {
self.selected_icon = icon.into();
self
}
+
+ pub fn indicator(mut self, indicator: Indicator) -> Self {
+ self.indicator = Some(indicator);
+ self
+ }
+
+ pub fn indicator_border_color(mut self, color: Option<Hsla>) -> Self {
+ self.indicator_border_color = color;
+ self
+ }
}
impl Disableable for IconButton {
@@ -168,6 +182,10 @@ impl RenderOnce for IconButton {
.toggle_state(is_selected)
.selected_icon(self.selected_icon)
.when_some(selected_style, |this, style| this.selected_style(style))
+ .when_some(self.indicator, |this, indicator| {
+ this.indicator(indicator)
+ .indicator_border_color(self.indicator_border_color)
+ })
.size(self.icon_size)
.color(Color::Custom(color)),
)
@@ -47,6 +47,7 @@ pub struct ContextMenuEntry {
handler: Rc<dyn Fn(Option<&FocusHandle>, &mut Window, &mut App)>,
action: Option<Box<dyn Action>>,
disabled: bool,
+ documentation_aside: Option<Rc<dyn Fn(&mut App) -> AnyElement>>,
}
impl ContextMenuEntry {
@@ -61,6 +62,7 @@ impl ContextMenuEntry {
handler: Rc::new(|_, _, _| {}),
action: None,
disabled: false,
+ documentation_aside: None,
}
}
@@ -108,6 +110,14 @@ impl ContextMenuEntry {
self.disabled = disabled;
self
}
+
+ pub fn documentation_aside(
+ mut self,
+ element: impl Fn(&mut App) -> AnyElement + 'static,
+ ) -> Self {
+ self.documentation_aside = Some(Rc::new(element));
+ self
+ }
}
impl From<ContextMenuEntry> for ContextMenuItem {
@@ -125,6 +135,7 @@ pub struct ContextMenu {
clicked: bool,
_on_blur_subscription: Subscription,
keep_open_on_confirm: bool,
+ documentation_aside: Option<(usize, Rc<dyn Fn(&mut App) -> AnyElement>)>,
}
impl Focusable for ContextMenu {
@@ -161,6 +172,7 @@ impl ContextMenu {
clicked: false,
_on_blur_subscription,
keep_open_on_confirm: false,
+ documentation_aside: None,
},
window,
cx,
@@ -209,6 +221,7 @@ impl ContextMenu {
icon_color: None,
action,
disabled: false,
+ documentation_aside: None,
}));
self
}
@@ -231,6 +244,7 @@ impl ContextMenu {
icon_color: None,
action,
disabled: false,
+ documentation_aside: None,
}));
self
}
@@ -281,6 +295,7 @@ impl ContextMenu {
icon_size: IconSize::Small,
icon_color: None,
disabled: false,
+ documentation_aside: None,
}));
self
}
@@ -294,7 +309,6 @@ impl ContextMenu {
toggle: None,
label: label.into(),
action: Some(action.boxed_clone()),
-
handler: Rc::new(move |context, window, cx| {
if let Some(context) = &context {
window.focus(context);
@@ -306,6 +320,7 @@ impl ContextMenu {
icon_position: IconPosition::End,
icon_color: None,
disabled: true,
+ documentation_aside: None,
}));
self
}
@@ -314,7 +329,6 @@ impl ContextMenu {
self.items.push(ContextMenuItem::Entry(ContextMenuEntry {
toggle: None,
label: label.into(),
-
action: Some(action.boxed_clone()),
handler: Rc::new(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)),
icon: Some(IconName::ArrowUpRight),
@@ -322,6 +336,7 @@ impl ContextMenu {
icon_position: IconPosition::End,
icon_color: None,
disabled: false,
+ documentation_aside: None,
}));
self
}
@@ -356,15 +371,16 @@ impl ContextMenu {
}
fn select_first(&mut self, _: &SelectFirst, _: &mut Window, cx: &mut Context<Self>) {
- self.selected_index = self.items.iter().position(|item| item.is_selectable());
+ if let Some(ix) = self.items.iter().position(|item| item.is_selectable()) {
+ self.select_index(ix);
+ }
cx.notify();
}
pub fn select_last(&mut self) -> Option<usize> {
for (ix, item) in self.items.iter().enumerate().rev() {
if item.is_selectable() {
- self.selected_index = Some(ix);
- return Some(ix);
+ return self.select_index(ix);
}
}
None
@@ -384,7 +400,7 @@ impl ContextMenu {
} else {
for (ix, item) in self.items.iter().enumerate().skip(next_index) {
if item.is_selectable() {
- self.selected_index = Some(ix);
+ self.select_index(ix);
cx.notify();
break;
}
@@ -402,7 +418,7 @@ impl ContextMenu {
} else {
for (ix, item) in self.items.iter().enumerate().take(ix).rev() {
if item.is_selectable() {
- self.selected_index = Some(ix);
+ self.select_index(ix);
cx.notify();
break;
}
@@ -413,6 +429,20 @@ impl ContextMenu {
}
}
+ fn select_index(&mut self, ix: usize) -> Option<usize> {
+ self.documentation_aside = None;
+ let item = self.items.get(ix)?;
+ if item.is_selectable() {
+ self.selected_index = Some(ix);
+ if let ContextMenuItem::Entry(entry) = item {
+ if let Some(callback) = &entry.documentation_aside {
+ self.documentation_aside = Some((ix, callback.clone()));
+ }
+ }
+ }
+ Some(ix)
+ }
+
pub fn on_action_dispatch(
&mut self,
dispatched: &dyn Action,
@@ -436,7 +466,7 @@ impl ContextMenu {
false
}
}) {
- self.selected_index = Some(ix);
+ self.select_index(ix);
self.delayed = true;
cx.notify();
let action = dispatched.boxed_clone();
@@ -479,198 +509,275 @@ impl Render for ContextMenu {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let ui_font_size = ThemeSettings::get_global(cx).ui_font_size;
- WithRemSize::new(ui_font_size)
- .occlude()
- .elevation_2(cx)
- .flex()
- .flex_row()
+ let aside = self
+ .documentation_aside
+ .as_ref()
+ .map(|(_, callback)| callback.clone());
+
+ h_flex()
+ .w_full()
+ .items_start()
+ .gap_1()
+ .when_some(aside, |this, aside| {
+ this.child(
+ WithRemSize::new(ui_font_size)
+ .occlude()
+ .elevation_2(cx)
+ .p_2()
+ .max_w_80()
+ .child(aside(cx)),
+ )
+ })
.child(
- v_flex()
- .id("context-menu")
- .min_w(px(200.))
- .max_h(vh(0.75, window))
- .flex_1()
- .overflow_y_scroll()
- .track_focus(&self.focus_handle(cx))
- .on_mouse_down_out(
- cx.listener(|this, _, window, cx| this.cancel(&menu::Cancel, window, cx)),
- )
- .key_context("menu")
- .on_action(cx.listener(ContextMenu::select_first))
- .on_action(cx.listener(ContextMenu::handle_select_last))
- .on_action(cx.listener(ContextMenu::select_next))
- .on_action(cx.listener(ContextMenu::select_prev))
- .on_action(cx.listener(ContextMenu::confirm))
- .on_action(cx.listener(ContextMenu::cancel))
- .when(!self.delayed, |mut el| {
- for item in self.items.iter() {
- if let ContextMenuItem::Entry(ContextMenuEntry {
- action: Some(action),
- disabled: false,
- ..
- }) = item
- {
- el = el.on_boxed_action(
- &**action,
- cx.listener(ContextMenu::on_action_dispatch),
- );
- }
- }
- el
- })
- .child(List::new().children(self.items.iter_mut().enumerate().map(
- |(ix, item)| {
- match item {
- ContextMenuItem::Separator => ListSeparator.into_any_element(),
- ContextMenuItem::Header(header) => {
- ListSubHeader::new(header.clone())
- .inset(true)
- .into_any_element()
+ WithRemSize::new(ui_font_size)
+ .occlude()
+ .elevation_2(cx)
+ .flex()
+ .flex_row()
+ .child(
+ v_flex()
+ .id("context-menu")
+ .min_w(px(200.))
+ .max_h(vh(0.75, window))
+ .flex_1()
+ .overflow_y_scroll()
+ .track_focus(&self.focus_handle(cx))
+ .on_mouse_down_out(cx.listener(|this, _, window, cx| {
+ this.cancel(&menu::Cancel, window, cx)
+ }))
+ .key_context("menu")
+ .on_action(cx.listener(ContextMenu::select_first))
+ .on_action(cx.listener(ContextMenu::handle_select_last))
+ .on_action(cx.listener(ContextMenu::select_next))
+ .on_action(cx.listener(ContextMenu::select_prev))
+ .on_action(cx.listener(ContextMenu::confirm))
+ .on_action(cx.listener(ContextMenu::cancel))
+ .when(!self.delayed, |mut el| {
+ for item in self.items.iter() {
+ if let ContextMenuItem::Entry(ContextMenuEntry {
+ action: Some(action),
+ disabled: false,
+ ..
+ }) = item
+ {
+ el = el.on_boxed_action(
+ &**action,
+ cx.listener(ContextMenu::on_action_dispatch),
+ );
+ }
}
- ContextMenuItem::Label(label) => ListItem::new(ix)
- .inset(true)
- .disabled(true)
- .child(Label::new(label.clone()))
- .into_any_element(),
- ContextMenuItem::Entry(ContextMenuEntry {
- toggle,
- label,
- handler,
- icon,
- icon_position,
- icon_size,
- icon_color,
- action,
- disabled,
- }) => {
- let handler = handler.clone();
- let menu = cx.entity().downgrade();
- let icon_color = if *disabled {
- Color::Muted
- } else {
- icon_color.unwrap_or(Color::Default)
- };
- let label_color = if *disabled {
- Color::Muted
- } else {
- Color::Default
- };
- let label_element = if let Some(icon_name) = icon {
- h_flex()
- .gap_1p5()
- .when(*icon_position == IconPosition::Start, |flex| {
- flex.child(
- Icon::new(*icon_name)
- .size(*icon_size)
- .color(icon_color),
- )
- })
- .child(Label::new(label.clone()).color(label_color))
- .when(*icon_position == IconPosition::End, |flex| {
- flex.child(
- Icon::new(*icon_name)
- .size(*icon_size)
- .color(icon_color),
- )
- })
- .into_any_element()
- } else {
- Label::new(label.clone())
- .color(label_color)
- .into_any_element()
- };
-
- ListItem::new(ix)
- .inset(true)
- .disabled(*disabled)
- .toggle_state(Some(ix) == self.selected_index)
- .when_some(*toggle, |list_item, (position, toggled)| {
- let contents = if toggled {
- v_flex().flex_none().child(
- Icon::new(IconName::Check).color(Color::Accent),
- )
+ el
+ })
+ .child(List::new().children(self.items.iter_mut().enumerate().map(
+ |(ix, item)| {
+ match item {
+ ContextMenuItem::Separator => {
+ ListSeparator.into_any_element()
+ }
+ ContextMenuItem::Header(header) => {
+ ListSubHeader::new(header.clone())
+ .inset(true)
+ .into_any_element()
+ }
+ ContextMenuItem::Label(label) => ListItem::new(ix)
+ .inset(true)
+ .disabled(true)
+ .child(Label::new(label.clone()))
+ .into_any_element(),
+ ContextMenuItem::Entry(ContextMenuEntry {
+ toggle,
+ label,
+ handler,
+ icon,
+ icon_position,
+ icon_size,
+ icon_color,
+ action,
+ disabled,
+ documentation_aside,
+ }) => {
+ let handler = handler.clone();
+ let menu = cx.entity().downgrade();
+ let icon_color = if *disabled {
+ Color::Muted
+ } else {
+ icon_color.unwrap_or(Color::Default)
+ };
+ let label_color = if *disabled {
+ Color::Muted
} else {
- v_flex()
- .flex_none()
- .size(IconSize::default().rems())
+ Color::Default
};
- match position {
- IconPosition::Start => {
- list_item.start_slot(contents)
- }
- IconPosition::End => list_item.end_slot(contents),
- }
- })
- .child(
- h_flex()
- .w_full()
- .justify_between()
- .child(label_element)
- .debug_selector(|| format!("MENU_ITEM-{}", label))
- .children(action.as_ref().and_then(|action| {
- self.action_context
- .as_ref()
- .map(|focus| {
- KeyBinding::for_action_in(
- &**action, focus, window,
+ let label_element = if let Some(icon_name) = icon {
+ h_flex()
+ .gap_1p5()
+ .when(
+ *icon_position == IconPosition::Start,
+ |flex| {
+ flex.child(
+ Icon::new(*icon_name)
+ .size(*icon_size)
+ .color(icon_color),
)
- })
- .unwrap_or_else(|| {
- KeyBinding::for_action(
- &**action, window,
+ },
+ )
+ .child(
+ Label::new(label.clone())
+ .color(label_color),
+ )
+ .when(
+ *icon_position == IconPosition::End,
+ |flex| {
+ flex.child(
+ Icon::new(*icon_name)
+ .size(*icon_size)
+ .color(icon_color),
)
- })
- .map(|binding| div().ml_4().child(binding))
- })),
- )
- .on_click({
- let context = self.action_context.clone();
- move |_, window, cx| {
- handler(context.as_ref(), window, cx);
- menu.update(cx, |menu, cx| {
- menu.clicked = true;
- cx.emit(DismissEvent);
+ },
+ )
+ .into_any_element()
+ } else {
+ Label::new(label.clone())
+ .color(label_color)
+ .into_any_element()
+ };
+ let documentation_aside_callback =
+ documentation_aside.clone();
+ div()
+ .id(("context-menu-child", ix))
+ .when_some(
+ documentation_aside_callback,
+ |this, documentation_aside_callback| {
+ this.occlude().on_hover(cx.listener(
+ move |menu, hovered, _, cx| {
+ if *hovered {
+ menu.documentation_aside = Some((ix, documentation_aside_callback.clone()));
+ cx.notify();
+ } else if matches!(menu.documentation_aside, Some((id, _)) if id == ix) {
+ menu.documentation_aside = None;
+ cx.notify();
+ }
+ },
+ ))
+ },
+ )
+ .child(
+ ListItem::new(ix)
+ .inset(true)
+ .disabled(*disabled)
+ .toggle_state(
+ Some(ix) == self.selected_index,
+ )
+ .when_some(
+ *toggle,
+ |list_item, (position, toggled)| {
+ let contents = if toggled {
+ v_flex().flex_none().child(
+ Icon::new(IconName::Check)
+ .color(Color::Accent),
+ )
+ } else {
+ v_flex().flex_none().size(
+ IconSize::default().rems(),
+ )
+ };
+ match position {
+ IconPosition::Start => {
+ list_item
+ .start_slot(contents)
+ }
+ IconPosition::End => {
+ list_item.end_slot(contents)
+ }
+ }
+ },
+ )
+ .child(
+ h_flex()
+ .w_full()
+ .justify_between()
+ .child(label_element)
+ .debug_selector(|| {
+ format!("MENU_ITEM-{}", label)
+ })
+ .children(
+ action.as_ref().and_then(
+ |action| {
+ self.action_context
+ .as_ref()
+ .map(|focus| {
+ KeyBinding::for_action_in(
+ &**action, focus,
+ window,
+ )
+ })
+ .unwrap_or_else(|| {
+ KeyBinding::for_action(
+ &**action, window,
+ )
+ })
+ .map(|binding| {
+ div().ml_4().child(binding)
+ })
+ },
+ ),
+ ),
+ )
+ .on_click({
+ let context =
+ self.action_context.clone();
+ move |_, window, cx| {
+ handler(
+ context.as_ref(),
+ window,
+ cx,
+ );
+ menu.update(cx, |menu, cx| {
+ menu.clicked = true;
+ cx.emit(DismissEvent);
+ })
+ .ok();
+ }
+ }),
+ )
+ .into_any_element()
+ }
+ ContextMenuItem::CustomEntry {
+ entry_render,
+ handler,
+ selectable,
+ } => {
+ let handler = handler.clone();
+ let menu = cx.entity().downgrade();
+ let selectable = *selectable;
+ ListItem::new(ix)
+ .inset(true)
+ .toggle_state(if selectable {
+ Some(ix) == self.selected_index
+ } else {
+ false
})
- .ok();
- }
- })
- .into_any_element()
- }
- ContextMenuItem::CustomEntry {
- entry_render,
- handler,
- selectable,
- } => {
- let handler = handler.clone();
- let menu = cx.entity().downgrade();
- let selectable = *selectable;
- ListItem::new(ix)
- .inset(true)
- .toggle_state(if selectable {
- Some(ix) == self.selected_index
- } else {
- false
- })
- .selectable(selectable)
- .when(selectable, |item| {
- item.on_click({
- let context = self.action_context.clone();
- move |_, window, cx| {
- handler(context.as_ref(), window, cx);
- menu.update(cx, |menu, cx| {
- menu.clicked = true;
- cx.emit(DismissEvent);
+ .selectable(selectable)
+ .when(selectable, |item| {
+ item.on_click({
+ let context = self.action_context.clone();
+ move |_, window, cx| {
+ handler(context.as_ref(), window, cx);
+ menu.update(cx, |menu, cx| {
+ menu.clicked = true;
+ cx.emit(DismissEvent);
+ })
+ .ok();
+ }
})
- .ok();
- }
- })
- })
- .child(entry_render(window, cx))
- .into_any_element()
- }
- }
- },
- ))),
+ })
+ .child(entry_render(window, cx))
+ .into_any_element()
+ }
+ }
+ },
+ ))),
+ ),
)
}
}
@@ -70,6 +70,7 @@ pub enum IconSize {
Medium,
/// 48px
XLarge,
+ Custom(Pixels),
}
impl IconSize {
@@ -80,6 +81,7 @@ impl IconSize {
IconSize::Small => rems_from_px(14.),
IconSize::Medium => rems_from_px(16.),
IconSize::XLarge => rems_from_px(48.),
+ IconSize::Custom(size) => rems_from_px(size.into()),
}
}
@@ -96,6 +98,8 @@ impl IconSize {
IconSize::Small => DynamicSpacing::Base02.px(cx),
IconSize::Medium => DynamicSpacing::Base02.px(cx),
IconSize::XLarge => DynamicSpacing::Base02.px(cx),
+ // TODO: Wire into dynamic spacing
+ IconSize::Custom(size) => px(size.into()),
};
(icon_size, padding)
@@ -164,6 +168,7 @@ pub enum IconName {
ChevronRight,
ChevronUp,
ChevronUpDown,
+ Circle,
Close,
Code,
Command,
@@ -323,6 +328,7 @@ pub enum IconName {
ZedAssistant2,
ZedAssistantFilled,
ZedPredict,
+ ZedPredictDisabled,
ZedXCopilot,
}
@@ -15,6 +15,7 @@ pub struct KeyBinding {
/// The [`PlatformStyle`] to use when displaying this keybinding.
platform_style: PlatformStyle,
+ size: Option<Pixels>,
}
impl KeyBinding {
@@ -47,6 +48,7 @@ impl KeyBinding {
Self {
key_binding,
platform_style: PlatformStyle::platform(),
+ size: None,
}
}
@@ -55,6 +57,12 @@ impl KeyBinding {
self.platform_style = platform_style;
self
}
+
+ /// Sets the size for this [`KeyBinding`].
+ pub fn size(mut self, size: Pixels) -> Self {
+ self.size = Some(size);
+ self
+ }
}
impl RenderOnce for KeyBinding {
@@ -83,9 +91,12 @@ impl RenderOnce for KeyBinding {
&keystroke.modifiers,
self.platform_style,
None,
+ self.size,
false,
))
- .map(|el| el.child(render_key(&keystroke, self.platform_style, None)))
+ .map(|el| {
+ el.child(render_key(&keystroke, self.platform_style, None, self.size))
+ })
}))
}
}
@@ -94,11 +105,14 @@ pub fn render_key(
keystroke: &Keystroke,
platform_style: PlatformStyle,
color: Option<Color>,
+ size: Option<Pixels>,
) -> AnyElement {
let key_icon = icon_for_key(keystroke, platform_style);
match key_icon {
- Some(icon) => KeyIcon::new(icon, color).into_any_element(),
- None => Key::new(capitalize(&keystroke.key), color).into_any_element(),
+ Some(icon) => KeyIcon::new(icon, color).size(size).into_any_element(),
+ None => Key::new(capitalize(&keystroke.key), color)
+ .size(size)
+ .into_any_element(),
}
}
@@ -130,6 +144,7 @@ pub fn render_modifiers(
modifiers: &Modifiers,
platform_style: PlatformStyle,
color: Option<Color>,
+ size: Option<Pixels>,
standalone: bool,
) -> impl Iterator<Item = AnyElement> {
enum KeyOrIcon {
@@ -200,8 +215,8 @@ pub fn render_modifiers(
PlatformStyle::Windows => vec![modifier.windows, KeyOrIcon::Key("+")],
})
.map(move |key_or_icon| match key_or_icon {
- KeyOrIcon::Key(key) => Key::new(key, color).into_any_element(),
- KeyOrIcon::Icon(icon) => KeyIcon::new(icon, color).into_any_element(),
+ KeyOrIcon::Key(key) => Key::new(key, color).size(size).into_any_element(),
+ KeyOrIcon::Icon(icon) => KeyIcon::new(icon, color).size(size).into_any_element(),
})
}
@@ -209,26 +224,26 @@ pub fn render_modifiers(
pub struct Key {
key: SharedString,
color: Option<Color>,
+ size: Option<Pixels>,
}
impl RenderOnce for Key {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let single_char = self.key.len() == 1;
+ let size = self.size.unwrap_or(px(14.));
+ let size_f32: f32 = size.into();
div()
.py_0()
.map(|this| {
if single_char {
- this.w(rems_from_px(14.))
- .flex()
- .flex_none()
- .justify_center()
+ this.w(size).flex().flex_none().justify_center()
} else {
this.px_0p5()
}
})
- .h(rems_from_px(14.))
- .text_ui(cx)
+ .h(rems_from_px(size_f32))
+ .text_size(size)
.line_height(relative(1.))
.text_color(self.color.unwrap_or(Color::Muted).color(cx))
.child(self.key.clone())
@@ -240,27 +255,47 @@ impl Key {
Self {
key: key.into(),
color,
+ size: None,
}
}
+
+ pub fn size(mut self, size: impl Into<Option<Pixels>>) -> Self {
+ self.size = size.into();
+ self
+ }
}
#[derive(IntoElement)]
pub struct KeyIcon {
icon: IconName,
color: Option<Color>,
+ size: Option<Pixels>,
}
impl RenderOnce for KeyIcon {
- fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ fn render(self, window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ let size = self
+ .size
+ .unwrap_or(IconSize::Small.rems().to_pixels(window.rem_size()));
+
Icon::new(self.icon)
- .size(IconSize::XSmall)
+ .size(IconSize::Custom(size))
.color(self.color.unwrap_or(Color::Muted))
}
}
impl KeyIcon {
pub fn new(icon: IconName, color: Option<Color>) -> Self {
- Self { icon, color }
+ Self {
+ icon,
+ color,
+ size: None,
+ }
+ }
+
+ pub fn size(mut self, size: impl Into<Option<Pixels>>) -> Self {
+ self.size = size.into();
+ self
}
}
@@ -0,0 +1,307 @@
+use crate::{h_flex, prelude::*};
+use crate::{ElevationIndex, KeyBinding};
+use gpui::{point, App, BoxShadow, IntoElement, Window};
+use smallvec::smallvec;
+
+/// Represents a hint for a keybinding, optionally with a prefix and suffix.
+///
+/// This struct allows for the creation and customization of a keybinding hint,
+/// which can be used to display keyboard shortcuts or commands in a user interface.
+///
+/// # Examples
+///
+/// ```
+/// use ui::prelude::*;
+///
+/// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+S"))
+/// .prefix("Save:")
+/// .size(Pixels::from(14.0));
+/// ```
+#[derive(Debug, IntoElement, Clone)]
+pub struct KeybindingHint {
+ prefix: Option<SharedString>,
+ suffix: Option<SharedString>,
+ keybinding: KeyBinding,
+ size: Option<Pixels>,
+ elevation: Option<ElevationIndex>,
+}
+
+impl KeybindingHint {
+ /// Creates a new `KeybindingHint` with the specified keybinding.
+ ///
+ /// This method initializes a new `KeybindingHint` instance with the given keybinding,
+ /// setting all other fields to their default values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use ui::prelude::*;
+ ///
+ /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+C"));
+ /// ```
+ pub fn new(keybinding: KeyBinding) -> Self {
+ Self {
+ prefix: None,
+ suffix: None,
+ keybinding,
+ size: None,
+ elevation: None,
+ }
+ }
+
+ /// Creates a new `KeybindingHint` with a prefix and keybinding.
+ ///
+ /// This method initializes a new `KeybindingHint` instance with the given prefix and keybinding,
+ /// setting all other fields to their default values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use ui::prelude::*;
+ ///
+ /// let hint = KeybindingHint::with_prefix("Copy:", KeyBinding::from_str("Ctrl+C"));
+ /// ```
+ pub fn with_prefix(prefix: impl Into<SharedString>, keybinding: KeyBinding) -> Self {
+ Self {
+ prefix: Some(prefix.into()),
+ suffix: None,
+ keybinding,
+ size: None,
+ elevation: None,
+ }
+ }
+
+ /// Creates a new `KeybindingHint` with a keybinding and suffix.
+ ///
+ /// This method initializes a new `KeybindingHint` instance with the given keybinding and suffix,
+ /// setting all other fields to their default values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use ui::prelude::*;
+ ///
+ /// let hint = KeybindingHint::with_suffix(KeyBinding::from_str("Ctrl+V"), "Paste");
+ /// ```
+ pub fn with_suffix(keybinding: KeyBinding, suffix: impl Into<SharedString>) -> Self {
+ Self {
+ prefix: None,
+ suffix: Some(suffix.into()),
+ keybinding,
+ size: None,
+ elevation: None,
+ }
+ }
+
+ /// Sets the prefix for the keybinding hint.
+ ///
+ /// This method allows adding or changing the prefix text that appears before the keybinding.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use ui::prelude::*;
+ ///
+ /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+X"))
+ /// .prefix("Cut:");
+ /// ```
+ pub fn prefix(mut self, prefix: impl Into<SharedString>) -> Self {
+ self.prefix = Some(prefix.into());
+ self
+ }
+
+ /// Sets the suffix for the keybinding hint.
+ ///
+ /// This method allows adding or changing the suffix text that appears after the keybinding.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use ui::prelude::*;
+ ///
+ /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+F"))
+ /// .suffix("Find");
+ /// ```
+ pub fn suffix(mut self, suffix: impl Into<SharedString>) -> Self {
+ self.suffix = Some(suffix.into());
+ self
+ }
+
+ /// Sets the size of the keybinding hint.
+ ///
+ /// This method allows specifying the size of the keybinding hint in pixels.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use ui::prelude::*;
+ ///
+ /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+Z"))
+ /// .size(Pixels::from(16.0));
+ /// ```
+ pub fn size(mut self, size: impl Into<Option<Pixels>>) -> Self {
+ self.size = size.into();
+ self
+ }
+
+ /// Sets the elevation of the keybinding hint.
+ ///
+ /// This method allows specifying the elevation index for the keybinding hint,
+ /// which affects its visual appearance in terms of depth or layering.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use ui::prelude::*;
+ ///
+ /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+A"))
+ /// .elevation(ElevationIndex::new(1));
+ /// ```
+ pub fn elevation(mut self, elevation: impl Into<Option<ElevationIndex>>) -> Self {
+ self.elevation = elevation.into();
+ self
+ }
+}
+
+impl RenderOnce for KeybindingHint {
+ fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
+ let colors = cx.theme().colors().clone();
+
+ let size = self
+ .size
+ .unwrap_or(TextSize::Small.rems(cx).to_pixels(window.rem_size()));
+ let kb_size = size - px(2.0);
+ let kb_bg = if let Some(elevation) = self.elevation {
+ elevation.on_elevation_bg(cx)
+ } else {
+ theme::color_alpha(colors.element_background, 0.6)
+ };
+
+ h_flex()
+ .items_center()
+ .gap_0p5()
+ .font_buffer(cx)
+ .text_size(size)
+ .text_color(colors.text_muted)
+ .children(self.prefix)
+ .child(
+ h_flex()
+ .items_center()
+ .rounded_md()
+ .px_0p5()
+ .mr_0p5()
+ .border_1()
+ .border_color(kb_bg)
+ .bg(kb_bg.opacity(0.8))
+ .shadow(smallvec![BoxShadow {
+ color: cx.theme().colors().editor_background.opacity(0.8),
+ offset: point(px(0.), px(1.)),
+ blur_radius: px(0.),
+ spread_radius: px(0.),
+ }])
+ .child(self.keybinding.size(kb_size)),
+ )
+ .children(self.suffix)
+ }
+}
+
+impl ComponentPreview for KeybindingHint {
+ fn description() -> impl Into<Option<&'static str>> {
+ "Used to display hint text for keyboard shortcuts. Can have a prefix and suffix."
+ }
+
+ fn examples(window: &mut Window, _cx: &mut App) -> Vec<ComponentExampleGroup<Self>> {
+ let home_fallback = gpui::KeyBinding::new("home", menu::SelectFirst, None);
+ let home = KeyBinding::for_action(&menu::SelectFirst, window)
+ .unwrap_or(KeyBinding::new(home_fallback));
+
+ let end_fallback = gpui::KeyBinding::new("end", menu::SelectLast, None);
+ let end = KeyBinding::for_action(&menu::SelectLast, window)
+ .unwrap_or(KeyBinding::new(end_fallback));
+
+ let enter_fallback = gpui::KeyBinding::new("enter", menu::Confirm, None);
+ let enter = KeyBinding::for_action(&menu::Confirm, window)
+ .unwrap_or(KeyBinding::new(enter_fallback));
+
+ let escape_fallback = gpui::KeyBinding::new("escape", menu::Cancel, None);
+ let escape = KeyBinding::for_action(&menu::Cancel, window)
+ .unwrap_or(KeyBinding::new(escape_fallback));
+
+ vec![
+ example_group_with_title(
+ "Basic",
+ vec![
+ single_example(
+ "With Prefix",
+ KeybindingHint::with_prefix("Go to Start:", home.clone()),
+ ),
+ single_example(
+ "With Suffix",
+ KeybindingHint::with_suffix(end.clone(), "Go to End"),
+ ),
+ single_example(
+ "With Prefix and Suffix",
+ KeybindingHint::new(enter.clone())
+ .prefix("Confirm:")
+ .suffix("Execute selected action"),
+ ),
+ ],
+ ),
+ example_group_with_title(
+ "Sizes",
+ vec![
+ single_example(
+ "Small",
+ KeybindingHint::new(home.clone())
+ .size(Pixels::from(12.0))
+ .prefix("Small:"),
+ ),
+ single_example(
+ "Medium",
+ KeybindingHint::new(end.clone())
+ .size(Pixels::from(16.0))
+ .suffix("Medium"),
+ ),
+ single_example(
+ "Large",
+ KeybindingHint::new(enter.clone())
+ .size(Pixels::from(20.0))
+ .prefix("Large:")
+ .suffix("Size"),
+ ),
+ ],
+ ),
+ example_group_with_title(
+ "Elevations",
+ vec![
+ single_example(
+ "Surface",
+ KeybindingHint::new(home.clone())
+ .elevation(ElevationIndex::Surface)
+ .prefix("Surface:"),
+ ),
+ single_example(
+ "Elevated Surface",
+ KeybindingHint::new(end.clone())
+ .elevation(ElevationIndex::ElevatedSurface)
+ .suffix("Elevated"),
+ ),
+ single_example(
+ "Editor Surface",
+ KeybindingHint::new(enter.clone())
+ .elevation(ElevationIndex::EditorSurface)
+ .prefix("Editor:")
+ .suffix("Surface"),
+ ),
+ single_example(
+ "Modal Surface",
+ KeybindingHint::new(escape.clone())
+ .elevation(ElevationIndex::ModalSurface)
+ .prefix("Modal:")
+ .suffix("Escape"),
+ ),
+ ],
+ ),
+ ]
+ }
+}
@@ -43,6 +43,7 @@ pub struct Checkbox {
id: ElementId,
toggle_state: ToggleState,
disabled: bool,
+ placeholder: bool,
on_click: Option<Box<dyn Fn(&ToggleState, &mut Window, &mut App) + 'static>>,
filled: bool,
style: ToggleStyle,
@@ -62,6 +63,7 @@ impl Checkbox {
style: ToggleStyle::default(),
tooltip: None,
label: None,
+ placeholder: false,
}
}
@@ -71,6 +73,12 @@ impl Checkbox {
self
}
+ /// Sets the disabled state of the [`Checkbox`].
+ pub fn placeholder(mut self, placeholder: bool) -> Self {
+ self.placeholder = placeholder;
+ self
+ }
+
/// Binds a handler to the [`Checkbox`] that will be called when clicked.
pub fn on_click(
mut self,
@@ -145,23 +153,26 @@ impl Checkbox {
impl RenderOnce for Checkbox {
fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement {
let group_id = format!("checkbox_group_{:?}", self.id);
+ let color = if self.disabled {
+ Color::Disabled
+ } else if self.placeholder {
+ Color::Placeholder
+ } else {
+ Color::Selected
+ };
let icon = match self.toggle_state {
- ToggleState::Selected => Some(Icon::new(IconName::Check).size(IconSize::Small).color(
- if self.disabled {
- Color::Disabled
- } else {
- Color::Selected
- },
- )),
- ToggleState::Indeterminate => Some(
- Icon::new(IconName::Dash)
+ ToggleState::Selected => Some(if self.placeholder {
+ Icon::new(IconName::Circle)
+ .size(IconSize::XSmall)
+ .color(color)
+ } else {
+ Icon::new(IconName::Check)
.size(IconSize::Small)
- .color(if self.disabled {
- Color::Disabled
- } else {
- Color::Selected
- }),
- ),
+ .color(color)
+ }),
+ ToggleState::Indeterminate => {
+ Some(Icon::new(IconName::Dash).size(IconSize::Small).color(color))
+ }
ToggleState::Unselected => None,
};
@@ -63,7 +63,7 @@ impl From<Option<bool>> for ToggleState {
match selected {
Some(true) => Self::Selected,
Some(false) => Self::Unselected,
- None => Self::Unselected,
+ None => Self::Indeterminate,
}
}
}
@@ -13,7 +13,7 @@ path = "src/ui_macros.rs"
proc-macro = true
[dependencies]
-proc-macro2 = "1.0.66"
-quote = "1.0.9"
-syn = { version = "1.0.72", features = ["full", "extra-traits"] }
+proc-macro2.workspace = true
+quote.workspace = true
+syn.workspace = true
convert_case.workspace = true
@@ -13,7 +13,7 @@ path = "src/util.rs"
doctest = true
[features]
-test-support = ["tempfile", "git2", "rand"]
+test-support = ["tempfile", "git2", "rand", "util_macros"]
[dependencies]
anyhow.workspace = true
@@ -35,6 +35,7 @@ smol.workspace = true
take-until.workspace = true
tempfile = { workspace = true, optional = true }
unicase.workspace = true
+util_macros = { workspace = true, optional = true }
[target.'cfg(unix)'.dependencies]
libc.workspace = true
@@ -47,3 +48,4 @@ dunce = "1.0"
git2.workspace = true
rand.workspace = true
tempfile.workspace = true
+util_macros.workspace = true
@@ -21,8 +21,8 @@ pub fn home_dir() -> &'static PathBuf {
pub trait PathExt {
fn compact(&self) -> PathBuf;
- fn icon_stem_or_suffix(&self) -> Option<&str>;
fn extension_or_hidden_file_name(&self) -> Option<&str>;
+ fn to_sanitized_string(&self) -> String;
fn try_from_bytes<'a>(bytes: &'a [u8]) -> anyhow::Result<Self>
where
Self: From<&'a Path>,
@@ -73,8 +73,8 @@ impl<T: AsRef<Path>> PathExt for T {
}
}
- /// Returns either the suffix if available, or the file stem otherwise to determine which file icon to use
- fn icon_stem_or_suffix(&self) -> Option<&str> {
+ /// Returns a file's extension or, if the file is hidden, its name without the leading dot
+ fn extension_or_hidden_file_name(&self) -> Option<&str> {
let path = self.as_ref();
let file_name = path.file_name()?.to_str()?;
if file_name.starts_with('.') {
@@ -86,13 +86,18 @@ impl<T: AsRef<Path>> PathExt for T {
.or_else(|| path.file_stem()?.to_str())
}
- /// Returns a file's extension or, if the file is hidden, its name without the leading dot
- fn extension_or_hidden_file_name(&self) -> Option<&str> {
- if let Some(extension) = self.as_ref().extension() {
- return extension.to_str();
+ /// Returns a sanitized string representation of the path.
+ /// Note, on Windows, this assumes that the path is a valid UTF-8 string and
+ /// is not a UNC path.
+ fn to_sanitized_string(&self) -> String {
+ #[cfg(target_os = "windows")]
+ {
+ self.as_ref().to_string_lossy().replace("/", "\\")
+ }
+ #[cfg(not(target_os = "windows"))]
+ {
+ self.as_ref().to_string_lossy().to_string()
}
-
- self.as_ref().file_name()?.to_str()?.split('.').last()
}
}
@@ -115,6 +120,17 @@ impl SanitizedPath {
self.0.to_string_lossy().to_string()
}
+ pub fn to_glob_string(&self) -> String {
+ #[cfg(target_os = "windows")]
+ {
+ self.0.to_string_lossy().replace("/", "\\")
+ }
+ #[cfg(not(target_os = "windows"))]
+ {
+ self.0.to_string_lossy().to_string()
+ }
+ }
+
pub fn join(&self, path: &Self) -> Self {
self.0.join(&path.0).into()
}
@@ -448,14 +464,6 @@ pub fn compare_paths(
}
}
-#[cfg(any(test, feature = "test-support"))]
-pub fn replace_path_separator(path: &str) -> String {
- #[cfg(target_os = "windows")]
- return path.replace("/", std::path::MAIN_SEPARATOR_STR);
- #[cfg(not(target_os = "windows"))]
- return path.to_string();
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -793,33 +801,6 @@ mod tests {
}
}
- #[test]
- fn test_icon_stem_or_suffix() {
- // No dots in name
- let path = Path::new("/a/b/c/file_name.rs");
- assert_eq!(path.icon_stem_or_suffix(), Some("rs"));
-
- // Single dot in name
- let path = Path::new("/a/b/c/file.name.rs");
- assert_eq!(path.icon_stem_or_suffix(), Some("rs"));
-
- // No suffix
- let path = Path::new("/a/b/c/file");
- assert_eq!(path.icon_stem_or_suffix(), Some("file"));
-
- // Multiple dots in name
- let path = Path::new("/a/b/c/long.file.name.rs");
- assert_eq!(path.icon_stem_or_suffix(), Some("rs"));
-
- // Hidden file, no extension
- let path = Path::new("/a/b/c/.gitignore");
- assert_eq!(path.icon_stem_or_suffix(), Some("gitignore"));
-
- // Hidden file, with extension
- let path = Path::new("/a/b/c/.eslintrc.js");
- assert_eq!(path.icon_stem_or_suffix(), Some("eslintrc.js"));
- }
-
#[test]
fn test_extension_or_hidden_file_name() {
// No dots in name
@@ -840,7 +821,7 @@ mod tests {
// Hidden file, with extension
let path = Path::new("/a/b/c/.eslintrc.js");
- assert_eq!(path.extension_or_hidden_file_name(), Some("js"));
+ assert_eq!(path.extension_or_hidden_file_name(), Some("eslintrc.js"));
}
#[test]
@@ -28,6 +28,8 @@ use unicase::UniCase;
use anyhow::{anyhow, Context as _};
pub use take_until::*;
+#[cfg(any(test, feature = "test-support"))]
+pub use util_macros::{separator, uri};
#[macro_export]
macro_rules! debug_panic {
@@ -41,6 +43,50 @@ macro_rules! debug_panic {
};
}
+/// A macro to add "C:" to the beginning of a path literal on Windows, and replace all
+/// the separator from `/` to `\`.
+/// But on non-Windows platforms, it will return the path literal as is.
+///
+/// # Examples
+/// ```rust
+/// use util::path;
+///
+/// let path = path!("/Users/user/file.txt");
+/// #[cfg(target_os = "windows")]
+/// assert_eq!(path, "C:\\Users\\user\\file.txt");
+/// #[cfg(not(target_os = "windows"))]
+/// assert_eq!(path, "/Users/user/file.txt");
+/// ```
+#[cfg(all(any(test, feature = "test-support"), target_os = "windows"))]
+#[macro_export]
+macro_rules! path {
+ ($path:literal) => {
+ concat!("C:", util::separator!($path))
+ };
+}
+
+/// A macro to add "C:" to the beginning of a path literal on Windows, and replace all
+/// the separator from `/` to `\`.
+/// But on non-Windows platforms, it will return the path literal as is.
+///
+/// # Examples
+/// ```rust
+/// use util::path;
+///
+/// let path = path!("/Users/user/file.txt");
+/// #[cfg(target_os = "windows")]
+/// assert_eq!(path, "C:\\Users\\user\\file.txt");
+/// #[cfg(not(target_os = "windows"))]
+/// assert_eq!(path, "/Users/user/file.txt");
+/// ```
+#[cfg(all(any(test, feature = "test-support"), not(target_os = "windows")))]
+#[macro_export]
+macro_rules! path {
+ ($path:literal) => {
+ $path
+ };
+}
+
pub fn truncate(s: &str, max_chars: usize) -> &str {
match s.char_indices().nth(max_chars) {
None => s,
@@ -0,0 +1,18 @@
+[package]
+name = "util_macros"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/util_macros.rs"
+proc-macro = true
+doctest = false
+
+[dependencies]
+quote.workspace = true
+syn.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-APACHE
@@ -0,0 +1,56 @@
+#![cfg_attr(not(target_os = "windows"), allow(unused))]
+
+use proc_macro::TokenStream;
+use quote::quote;
+use syn::{parse_macro_input, LitStr};
+
+/// This macro replaces the path separator `/` with `\` for Windows.
+/// But if the target OS is not Windows, the path is returned as is.
+///
+/// # Example
+/// ```rust
+/// # use util_macros::separator;
+/// let path = separator!("path/to/file");
+/// #[cfg(target_os = "windows")]
+/// assert_eq!(path, "path\\to\\file");
+/// #[cfg(not(target_os = "windows"))]
+/// assert_eq!(path, "path/to/file");
+/// ```
+#[proc_macro]
+pub fn separator(input: TokenStream) -> TokenStream {
+ let path = parse_macro_input!(input as LitStr);
+ let path = path.value();
+
+ #[cfg(target_os = "windows")]
+ let path = path.replace("/", "\\");
+
+ TokenStream::from(quote! {
+ #path
+ })
+}
+
+/// This macro replaces the path prefix `file:///` with `file:///C:/` for Windows.
+/// But if the target OS is not Windows, the URI is returned as is.
+///
+/// # Example
+/// ```rust
+/// use util_macros::uri;
+///
+/// let uri = uri!("file:///path/to/file");
+/// #[cfg(target_os = "windows")]
+/// assert_eq!(uri, "file:///C:/path/to/file");
+/// #[cfg(not(target_os = "windows"))]
+/// assert_eq!(uri, "file:///path/to/file");
+/// ```
+#[proc_macro]
+pub fn uri(input: TokenStream) -> TokenStream {
+ let uri = parse_macro_input!(input as LitStr);
+ let uri = uri.value();
+
+ #[cfg(target_os = "windows")]
+ let uri = uri.replace("file:///", "file:///C:/");
+
+ TokenStream::from(quote! {
+ #uri
+ })
+}
@@ -567,37 +567,45 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
("q", "uit"),
workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Close),
+ close_pinned: false,
},
)
.bang(workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Skip),
+ close_pinned: true,
}),
VimCommand::new(
("wq", ""),
workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Save),
+ close_pinned: false,
},
)
.bang(workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Overwrite),
+ close_pinned: true,
}),
VimCommand::new(
("x", "it"),
workspace::CloseActiveItem {
save_intent: Some(SaveIntent::SaveAll),
+ close_pinned: false,
},
)
.bang(workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Overwrite),
+ close_pinned: true,
}),
VimCommand::new(
("ex", "it"),
workspace::CloseActiveItem {
save_intent: Some(SaveIntent::SaveAll),
+ close_pinned: false,
},
)
.bang(workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Overwrite),
+ close_pinned: true,
}),
VimCommand::new(
("up", "date"),
@@ -657,10 +665,12 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
("bd", "elete"),
workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Close),
+ close_pinned: false,
},
)
.bang(workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Skip),
+ close_pinned: true,
}),
VimCommand::new(("bn", "ext"), workspace::ActivateNextItem).count(),
VimCommand::new(("bN", "ext"), workspace::ActivatePrevItem).count(),
@@ -679,6 +689,7 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
("tabc", "lose"),
workspace::CloseActiveItem {
save_intent: Some(SaveIntent::Close),
+ close_pinned: false,
},
),
VimCommand::new(
@@ -1455,6 +1466,7 @@ mod test {
use editor::Editor;
use gpui::{Context, TestAppContext};
use indoc::indoc;
+ use util::path;
use workspace::Workspace;
#[gpui::test]
@@ -1516,7 +1528,8 @@ mod test {
dd
dd
ˇcc"});
- cx.simulate_shared_keystrokes("k : s / dd / ee enter").await;
+ cx.simulate_shared_keystrokes("k : s / d d / e e enter")
+ .await;
cx.shared_state().await.assert_eq(indoc! {"
aa
dd
@@ -1551,13 +1564,13 @@ mod test {
#[gpui::test]
async fn test_command_write(cx: &mut TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
- let path = Path::new("/root/dir/file.rs");
+ let path = Path::new(path!("/root/dir/file.rs"));
let fs = cx.workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone());
cx.simulate_keystrokes("i @ escape");
cx.simulate_keystrokes(": w enter");
- assert_eq!(fs.load(path).await.unwrap(), "@\n");
+ assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "@\n");
fs.as_fake().insert_file(path, b"oops\n".to_vec()).await;
@@ -1567,12 +1580,12 @@ mod test {
assert!(cx.has_pending_prompt());
// "Cancel"
cx.simulate_prompt_answer(0);
- assert_eq!(fs.load(path).await.unwrap(), "oops\n");
+ assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "oops\n");
assert!(!cx.has_pending_prompt());
// force overwrite
cx.simulate_keystrokes(": w ! enter");
assert!(!cx.has_pending_prompt());
- assert_eq!(fs.load(path).await.unwrap(), "@@\n");
+ assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "@@\n");
}
#[gpui::test]
@@ -1664,7 +1677,7 @@ mod test {
let file_path = file.as_local().unwrap().abs_path(cx);
assert_eq!(text, expected_text);
- assert_eq!(file_path.to_str().unwrap(), expected_path);
+ assert_eq!(file_path, Path::new(expected_path));
}
#[gpui::test]
@@ -1673,16 +1686,22 @@ mod test {
// Assert base state, that we're in /root/dir/file.rs
cx.workspace(|workspace, _, cx| {
- assert_active_item(workspace, "/root/dir/file.rs", "", cx);
+ assert_active_item(workspace, path!("/root/dir/file.rs"), "", cx);
});
// Insert a new file
let fs = cx.workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone());
fs.as_fake()
- .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec())
+ .insert_file(
+ path!("/root/dir/file2.rs"),
+ "This is file2.rs".as_bytes().to_vec(),
+ )
.await;
fs.as_fake()
- .insert_file("/root/dir/file3.rs", "go to file3".as_bytes().to_vec())
+ .insert_file(
+ path!("/root/dir/file3.rs"),
+ "go to file3".as_bytes().to_vec(),
+ )
.await;
// Put the path to the second file into the currently open buffer
@@ -1694,7 +1713,12 @@ mod test {
// We now have two items
cx.workspace(|workspace, _, cx| assert_eq!(workspace.items(cx).count(), 2));
cx.workspace(|workspace, _, cx| {
- assert_active_item(workspace, "/root/dir/file2.rs", "This is file2.rs", cx);
+ assert_active_item(
+ workspace,
+ path!("/root/dir/file2.rs"),
+ "This is file2.rs",
+ cx,
+ );
});
// Update editor to point to `file2.rs`
@@ -1711,7 +1735,7 @@ mod test {
// We now have three items
cx.workspace(|workspace, _, cx| assert_eq!(workspace.items(cx).count(), 3));
cx.workspace(|workspace, _, cx| {
- assert_active_item(workspace, "/root/dir/file3.rs", "go to file3", cx);
+ assert_active_item(workspace, path!("/root/dir/file3.rs"), "go to file3", cx);
});
}
@@ -141,105 +141,105 @@ pub enum Motion {
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct NextWordStart {
#[serde(default)]
ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct NextWordEnd {
#[serde(default)]
ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct PreviousWordStart {
#[serde(default)]
ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct PreviousWordEnd {
#[serde(default)]
ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct NextSubwordStart {
#[serde(default)]
pub(crate) ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct NextSubwordEnd {
#[serde(default)]
pub(crate) ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct PreviousSubwordStart {
#[serde(default)]
pub(crate) ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct PreviousSubwordEnd {
#[serde(default)]
pub(crate) ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct Up {
#[serde(default)]
pub(crate) display_lines: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct Down {
#[serde(default)]
pub(crate) display_lines: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct FirstNonWhitespace {
#[serde(default)]
display_lines: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct EndOfLine {
#[serde(default)]
display_lines: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct StartOfLine {
#[serde(default)]
pub(crate) display_lines: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct UnmatchedForward {
#[serde(default)]
char: char,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct UnmatchedBackward {
#[serde(default)]
char: char,
@@ -182,6 +182,9 @@ impl Vim {
Some(Operator::ToggleComments) => {
self.toggle_comments_motion(motion, times, window, cx)
}
+ Some(Operator::ReplaceWithRegister) => {
+ self.replace_with_register_motion(motion, times, window, cx)
+ }
Some(operator) => {
// Can't do anything for text objects, Ignoring
error!("Unexpected normal mode motion operator: {:?}", operator)
@@ -228,6 +231,9 @@ impl Vim {
Some(Operator::ToggleComments) => {
self.toggle_comments_object(object, around, window, cx)
}
+ Some(Operator::ReplaceWithRegister) => {
+ self.replace_with_register_object(object, around, window, cx)
+ }
_ => {
// Can't do anything for namespace operators. Ignoring
}
@@ -1545,4 +1551,40 @@ mod test {
cx.simulate_shared_keystrokes("x escape shift-o").await;
cx.shared_state().await.assert_eq("// hello\n// ˇ\n// x\n");
}
+
+ #[gpui::test]
+ async fn test_yank_line_with_trailing_newline(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+ cx.set_shared_state("heˇllo\n").await;
+ cx.simulate_shared_keystrokes("y y p").await;
+ cx.shared_state().await.assert_eq("hello\nˇhello\n");
+ }
+
+ #[gpui::test]
+ async fn test_yank_line_without_trailing_newline(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+ cx.set_shared_state("heˇllo").await;
+ cx.simulate_shared_keystrokes("y y p").await;
+ cx.shared_state().await.assert_eq("hello\nˇhello");
+ }
+
+ #[gpui::test]
+ async fn test_yank_multiline_without_trailing_newline(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+ cx.set_shared_state("heˇllo\nhello").await;
+ cx.simulate_shared_keystrokes("2 y y p").await;
+ cx.shared_state()
+ .await
+ .assert_eq("hello\nˇhello\nhello\nhello");
+ }
+
+ #[gpui::test]
+ async fn test_dd_then_paste_without_trailing_newline(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+ cx.set_shared_state("heˇllo").await;
+ cx.simulate_shared_keystrokes("d d").await;
+ cx.shared_state().await.assert_eq("ˇ");
+ cx.simulate_shared_keystrokes("p p").await;
+ cx.shared_state().await.assert_eq("\nhello\nˇhello");
+ }
}
@@ -8,14 +8,14 @@ use std::ops::Range;
use crate::{state::Mode, Vim};
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct Increment {
#[serde(default)]
step: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct Decrement {
#[serde(default)]
step: bool,
@@ -6,12 +6,14 @@ use serde::Deserialize;
use std::cmp;
use crate::{
+ motion::Motion,
+ object::Object,
state::{Mode, Register},
Vim,
};
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct Paste {
#[serde(default)]
before: bool,
@@ -192,12 +194,85 @@ impl Vim {
});
self.switch_mode(Mode::Normal, true, window, cx);
}
+
+ pub fn replace_with_register_object(
+ &mut self,
+ object: Object,
+ around: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.stop_recording(cx);
+ let selected_register = self.selected_register.take();
+ self.update_editor(window, cx, |_, editor, window, cx| {
+ editor.transact(window, cx, |editor, window, cx| {
+ editor.set_clip_at_line_ends(false, cx);
+ editor.change_selections(None, window, cx, |s| {
+ s.move_with(|map, selection| {
+ object.expand_selection(map, selection, around);
+ });
+ });
+
+ let Some(Register { text, .. }) = Vim::update_globals(cx, |globals, cx| {
+ globals.read_register(selected_register, Some(editor), cx)
+ })
+ .filter(|reg| !reg.text.is_empty()) else {
+ return;
+ };
+ editor.insert(&text, window, cx);
+ editor.set_clip_at_line_ends(true, cx);
+ editor.change_selections(None, window, cx, |s| {
+ s.move_with(|map, selection| {
+ selection.start = map.clip_point(selection.start, Bias::Left);
+ selection.end = selection.start
+ })
+ })
+ });
+ });
+ }
+
+ pub fn replace_with_register_motion(
+ &mut self,
+ motion: Motion,
+ times: Option<usize>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.stop_recording(cx);
+ let selected_register = self.selected_register.take();
+ self.update_editor(window, cx, |_, editor, window, cx| {
+ let text_layout_details = editor.text_layout_details(window);
+ editor.transact(window, cx, |editor, window, cx| {
+ editor.set_clip_at_line_ends(false, cx);
+ editor.change_selections(None, window, cx, |s| {
+ s.move_with(|map, selection| {
+ motion.expand_selection(map, selection, times, false, &text_layout_details);
+ });
+ });
+
+ let Some(Register { text, .. }) = Vim::update_globals(cx, |globals, cx| {
+ globals.read_register(selected_register, Some(editor), cx)
+ })
+ .filter(|reg| !reg.text.is_empty()) else {
+ return;
+ };
+ editor.insert(&text, window, cx);
+ editor.set_clip_at_line_ends(true, cx);
+ editor.change_selections(None, window, cx, |s| {
+ s.move_with(|map, selection| {
+ selection.start = map.clip_point(selection.start, Bias::Left);
+ selection.end = selection.start
+ })
+ })
+ });
+ });
+ }
}
#[cfg(test)]
mod test {
use crate::{
- state::Mode,
+ state::{Mode, Register},
test::{NeovimBackedTestContext, VimTestContext},
UseSystemClipboard, VimSettings,
};
@@ -696,12 +771,20 @@ mod test {
// not testing nvim as it doesn't have a filename
cx.simulate_keystrokes("\" % p");
+ #[cfg(not(target_os = "windows"))]
cx.assert_state(
indoc! {"
The quick brown
dogdir/file.rˇs"},
Mode::Normal,
);
+ #[cfg(target_os = "windows")]
+ cx.assert_state(
+ indoc! {"
+ The quick brown
+ dogdir\\file.rˇs"},
+ Mode::Normal,
+ );
}
#[gpui::test]
@@ -734,4 +817,37 @@ mod test {
Mode::Normal,
);
}
+
+ #[gpui::test]
+ async fn test_replace_with_register(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.set_state(
+ indoc! {"
+ ˇfish one
+ two three
+ "},
+ Mode::Normal,
+ );
+ cx.simulate_keystrokes("y i w");
+ cx.simulate_keystrokes("w");
+ cx.simulate_keystrokes("g r i w");
+ cx.assert_state(
+ indoc! {"
+ fish fisˇh
+ two three
+ "},
+ Mode::Normal,
+ );
+ cx.simulate_keystrokes("j b g r e");
+ cx.assert_state(
+ indoc! {"
+ fish fish
+ two fisˇh
+ "},
+ Mode::Normal,
+ );
+ let clipboard: Register = cx.read_from_clipboard().unwrap().into();
+ assert_eq!(clipboard.text, "fish");
+ }
}
@@ -16,7 +16,7 @@ use crate::{
};
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct MoveToNext {
#[serde(default = "default_true")]
case_sensitive: bool,
@@ -27,7 +27,7 @@ pub(crate) struct MoveToNext {
}
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub(crate) struct MoveToPrev {
#[serde(default = "default_true")]
case_sensitive: bool,
@@ -38,6 +38,7 @@ pub(crate) struct MoveToPrev {
}
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
pub(crate) struct Search {
#[serde(default)]
backwards: bool,
@@ -46,6 +47,7 @@ pub(crate) struct Search {
}
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
pub struct FindCommand {
pub query: String,
pub backwards: bool,
@@ -162,13 +162,16 @@ impl Vim {
// that line, we will have expanded the start of the selection to ensure it
// contains a newline (so that delete works as expected). We undo that change
// here.
- let is_last_line = linewise
- && end.row == buffer.max_row().0
- && buffer.max_point().column > 0
- && start.row < buffer.max_row().0
+ let max_point = buffer.max_point();
+ let should_adjust_start = linewise
+ && end.row == max_point.row
+ && max_point.column > 0
+ && start.row < max_point.row
&& start == Point::new(start.row, buffer.line_len(MultiBufferRow(start.row)));
+ let should_add_newline =
+ should_adjust_start || (end == max_point && max_point.column > 0 && linewise);
- if is_last_line {
+ if should_adjust_start {
start = Point::new(start.row + 1, 0);
}
@@ -179,7 +182,7 @@ impl Vim {
for chunk in buffer.text_for_range(start..end) {
text.push_str(chunk);
}
- if is_last_line {
+ if should_add_newline {
text.push('\n');
}
clipboard_selections.push(ClipboardSelection {
@@ -19,6 +19,7 @@ use serde::Deserialize;
use ui::Context;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
pub enum Object {
Word { ignore_punctuation: bool },
Subword { ignore_punctuation: bool },
@@ -44,20 +45,20 @@ pub enum Object {
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct Word {
#[serde(default)]
ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct Subword {
#[serde(default)]
ignore_punctuation: bool,
}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
struct IndentObj {
#[serde(default)]
include_below: bool,
@@ -671,7 +672,7 @@ fn around_subword(
is_word_end || is_subword_end
});
- Some(start..end)
+ Some(start..end).map(|range| expand_to_include_whitespace(map, range, true))
}
fn around_containing_word(
@@ -10,7 +10,6 @@ use gpui::{
Action, App, BorrowAppContext, ClipboardEntry, ClipboardItem, Entity, Global, WeakEntity,
};
use language::Point;
-use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use std::borrow::BorrowMut;
@@ -18,7 +17,7 @@ use std::{fmt::Display, ops::Range, sync::Arc};
use ui::{Context, SharedString};
use workspace::searchable::Direction;
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, JsonSchema, Serialize)]
+#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]
pub enum Mode {
Normal,
Insert,
@@ -59,7 +58,7 @@ impl Default for Mode {
}
}
-#[derive(Clone, Debug, PartialEq, Eq, Deserialize, JsonSchema)]
+#[derive(Clone, Debug, PartialEq)]
pub enum Operator {
Change,
Delete,
@@ -82,7 +81,6 @@ pub enum Operator {
},
AddSurrounds {
// Typically no need to configure this as `SendKeystrokes` can be used - see #23088.
- #[serde(skip)]
target: Option<SurroundsType>,
},
ChangeSurrounds {
@@ -111,6 +109,7 @@ pub enum Operator {
RecordRegister,
ReplayRegister,
ToggleComments,
+ ReplaceWithRegister,
}
#[derive(Default, Clone, Debug)]
@@ -499,6 +498,7 @@ impl Operator {
Operator::AutoIndent => "eq",
Operator::ShellCommand => "sh",
Operator::Rewrap => "gq",
+ Operator::ReplaceWithRegister => "gr",
Operator::Outdent => "<",
Operator::Uppercase => "gU",
Operator::Lowercase => "gu",
@@ -551,6 +551,7 @@ impl Operator {
| Operator::ShellCommand
| Operator::Lowercase
| Operator::Uppercase
+ | Operator::ReplaceWithRegister
| Operator::Object { .. }
| Operator::ChangeSurrounds { target: None }
| Operator::OppositeCase
@@ -554,11 +554,7 @@ mod test {
use gpui::KeyBinding;
use indoc::indoc;
- use crate::{
- state::{Mode, Operator},
- test::VimTestContext,
- PushOperator,
- };
+ use crate::{state::Mode, test::VimTestContext, PushAddSurrounds};
#[gpui::test]
async fn test_add_surrounds(cx: &mut gpui::TestAppContext) {
@@ -749,7 +745,7 @@ mod test {
cx.update(|_, cx| {
cx.bind_keys([KeyBinding::new(
"shift-s",
- PushOperator(Operator::AddSurrounds { target: None }),
+ PushAddSurrounds {},
Some("vim_mode == visual"),
)])
});
@@ -17,12 +17,7 @@ use indoc::indoc;
use search::BufferSearchBar;
use workspace::WorkspaceSettings;
-use crate::{
- insert::NormalBefore,
- motion,
- state::{Mode, Operator},
- PushOperator,
-};
+use crate::{insert::NormalBefore, motion, state::Mode, PushSneak, PushSneakBackward};
#[gpui::test]
async fn test_initially_disabled(cx: &mut gpui::TestAppContext) {
@@ -1347,17 +1342,17 @@ async fn test_sneak(cx: &mut gpui::TestAppContext) {
cx.bind_keys([
KeyBinding::new(
"s",
- PushOperator(Operator::Sneak { first_char: None }),
+ PushSneak { first_char: None },
Some("vim_mode == normal"),
),
KeyBinding::new(
"S",
- PushOperator(Operator::SneakBackward { first_char: None }),
+ PushSneakBackward { first_char: None },
Some("vim_mode == normal"),
),
KeyBinding::new(
"S",
- PushOperator(Operator::SneakBackward { first_char: None }),
+ PushSneakBackward { first_char: None },
Some("vim_mode == visual"),
),
])
@@ -23,6 +23,7 @@ use anyhow::Result;
use collections::HashMap;
use editor::{
movement::{self, FindRange},
+ scroll::Autoscroll,
Anchor, Bias, Editor, EditorEvent, EditorMode, ToPoint,
};
use gpui::{
@@ -34,6 +35,7 @@ use language::{CursorShape, Point, Selection, SelectionGoal, TransactionId};
pub use mode_indicator::ModeIndicator;
use motion::Motion;
use normal::search::SearchSubmit;
+use object::Object;
use schemars::JsonSchema;
use serde::Deserialize;
use serde_derive::Serialize;
@@ -44,55 +46,138 @@ use surrounds::SurroundsType;
use theme::ThemeSettings;
use ui::{px, IntoElement, SharedString};
use vim_mode_setting::VimModeSetting;
-use workspace::{self, Pane, ResizeIntent, Workspace};
+use workspace::{self, Pane, Workspace};
use crate::state::ReplayableAction;
-/// Used to resize the current pane
+/// Number is used to manage vim's count. Pushing a digit
+/// multiplies the current value by 10 and adds the digit.
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-pub struct ResizePane(pub ResizeIntent);
+struct Number(usize);
-/// An Action to Switch between modes
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-pub struct SwitchMode(pub Mode);
+struct SelectRegister(String);
-/// PushOperator is used to put vim into a "minor" mode,
-/// where it's waiting for a specific next set of keystrokes.
-/// For example 'd' needs a motion to complete.
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-pub struct PushOperator(pub Operator);
+#[serde(deny_unknown_fields)]
+struct PushObject {
+ around: bool,
+}
-/// Number is used to manage vim's count. Pushing a digit
-/// multiplies the current value by 10 and adds the digit.
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-struct Number(usize);
+#[serde(deny_unknown_fields)]
+struct PushFindForward {
+ before: bool,
+}
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
-struct SelectRegister(String);
+#[serde(deny_unknown_fields)]
+struct PushFindBackward {
+ after: bool,
+}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct PushSneak {
+ first_char: Option<char>,
+}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct PushSneakBackward {
+ first_char: Option<char>,
+}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct PushAddSurrounds {}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct PushChangeSurrounds {
+ target: Option<Object>,
+}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct PushJump {
+ line: bool,
+}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct PushDigraph {
+ first_char: Option<char>,
+}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct PushLiteral {
+ prefix: Option<String>,
+}
actions!(
vim,
[
+ SwitchToNormalMode,
+ SwitchToInsertMode,
+ SwitchToReplaceMode,
+ SwitchToVisualMode,
+ SwitchToVisualLineMode,
+ SwitchToVisualBlockMode,
+ SwitchToHelixNormalMode,
ClearOperators,
Tab,
Enter,
InnerObject,
- FindForward,
- FindBackward,
MaximizePane,
OpenDefaultKeymap,
ResetPaneSizes,
- Sneak,
- SneakBackward,
+ ResizePaneRight,
+ ResizePaneLeft,
+ ResizePaneUp,
+ ResizePaneDown,
+ PushChange,
+ PushDelete,
+ PushYank,
+ PushReplace,
+ PushDeleteSurrounds,
+ PushMark,
+ PushIndent,
+ PushOutdent,
+ PushAutoIndent,
+ PushRewrap,
+ PushShellCommand,
+ PushLowercase,
+ PushUppercase,
+ PushOppositeCase,
+ PushRegister,
+ PushRecordRegister,
+ PushReplayRegister,
+ PushReplaceWithRegister,
+ PushToggleComments,
]
);
// in the workspace namespace so it's not filtered out when vim is disabled.
-actions!(workspace, [ToggleVimMode]);
+actions!(workspace, [ToggleVimMode,]);
impl_actions!(
vim,
- [ResizePane, SwitchMode, PushOperator, Number, SelectRegister]
+ [
+ Number,
+ SelectRegister,
+ PushObject,
+ PushFindForward,
+ PushFindBackward,
+ PushSneak,
+ PushSneakBackward,
+ PushAddSurrounds,
+ PushChangeSurrounds,
+ PushJump,
+ PushDigraph,
+ PushLiteral
+ ]
);
/// Initializes the `vim` crate.
@@ -141,7 +226,7 @@ pub fn init(cx: &mut App) {
workspace.resize_pane(Axis::Vertical, desired_size - size.size.height, window, cx)
});
- workspace.register_action(|workspace, action: &ResizePane, window, cx| {
+ workspace.register_action(|workspace, _: &ResizePaneRight, window, cx| {
let count = Vim::take_count(cx).unwrap_or(1) as f32;
let theme = ThemeSettings::get_global(cx);
let Ok(font_id) = window.text_system().font_id(&theme.buffer_font) else {
@@ -153,16 +238,36 @@ pub fn init(cx: &mut App) {
else {
return;
};
- let height = theme.buffer_font_size() * theme.buffer_line_height.value();
+ workspace.resize_pane(Axis::Horizontal, width.width * count, window, cx);
+ });
- let (axis, amount) = match action.0 {
- ResizeIntent::Lengthen => (Axis::Vertical, height),
- ResizeIntent::Shorten => (Axis::Vertical, height * -1.),
- ResizeIntent::Widen => (Axis::Horizontal, width.width),
- ResizeIntent::Narrow => (Axis::Horizontal, width.width * -1.),
+ workspace.register_action(|workspace, _: &ResizePaneLeft, window, cx| {
+ let count = Vim::take_count(cx).unwrap_or(1) as f32;
+ let theme = ThemeSettings::get_global(cx);
+ let Ok(font_id) = window.text_system().font_id(&theme.buffer_font) else {
+ return;
};
+ let Ok(width) = window
+ .text_system()
+ .advance(font_id, theme.buffer_font_size(), 'm')
+ else {
+ return;
+ };
+ workspace.resize_pane(Axis::Horizontal, -width.width * count, window, cx);
+ });
- workspace.resize_pane(axis, amount * count, window, cx);
+ workspace.register_action(|workspace, _: &ResizePaneUp, window, cx| {
+ let count = Vim::take_count(cx).unwrap_or(1) as f32;
+ let theme = ThemeSettings::get_global(cx);
+ let height = theme.buffer_font_size() * theme.buffer_line_height.value();
+ workspace.resize_pane(Axis::Vertical, height * count, window, cx);
+ });
+
+ workspace.register_action(|workspace, _: &ResizePaneDown, window, cx| {
+ let count = Vim::take_count(cx).unwrap_or(1) as f32;
+ let theme = ThemeSettings::get_global(cx);
+ let height = theme.buffer_font_size() * theme.buffer_line_height.value();
+ workspace.resize_pane(Axis::Vertical, -height * count, window, cx);
});
workspace.register_action(|workspace, _: &SearchSubmit, window, cx| {
@@ -329,12 +434,212 @@ impl Vim {
});
vim.update(cx, |_, cx| {
- Vim::action(editor, cx, |vim, action: &SwitchMode, window, cx| {
- vim.switch_mode(action.0, false, window, cx)
+ Vim::action(editor, cx, |vim, _: &SwitchToNormalMode, window, cx| {
+ vim.switch_mode(Mode::Normal, false, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &SwitchToInsertMode, window, cx| {
+ vim.switch_mode(Mode::Insert, false, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &SwitchToReplaceMode, window, cx| {
+ vim.switch_mode(Mode::Replace, false, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &SwitchToVisualMode, window, cx| {
+ vim.switch_mode(Mode::Visual, false, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &SwitchToVisualLineMode, window, cx| {
+ vim.switch_mode(Mode::VisualLine, false, window, cx)
+ });
+
+ Vim::action(
+ editor,
+ cx,
+ |vim, _: &SwitchToVisualBlockMode, window, cx| {
+ vim.switch_mode(Mode::VisualBlock, false, window, cx)
+ },
+ );
+
+ Vim::action(
+ editor,
+ cx,
+ |vim, _: &SwitchToHelixNormalMode, window, cx| {
+ vim.switch_mode(Mode::HelixNormal, false, window, cx)
+ },
+ );
+
+ Vim::action(editor, cx, |vim, action: &PushObject, window, cx| {
+ vim.push_operator(
+ Operator::Object {
+ around: action.around,
+ },
+ window,
+ cx,
+ )
});
- Vim::action(editor, cx, |vim, action: &PushOperator, window, cx| {
- vim.push_operator(action.0.clone(), window, cx)
+ Vim::action(editor, cx, |vim, action: &PushFindForward, window, cx| {
+ vim.push_operator(
+ Operator::FindForward {
+ before: action.before,
+ },
+ window,
+ cx,
+ )
+ });
+
+ Vim::action(editor, cx, |vim, action: &PushFindBackward, window, cx| {
+ vim.push_operator(
+ Operator::FindBackward {
+ after: action.after,
+ },
+ window,
+ cx,
+ )
+ });
+
+ Vim::action(editor, cx, |vim, action: &PushSneak, window, cx| {
+ vim.push_operator(
+ Operator::Sneak {
+ first_char: action.first_char,
+ },
+ window,
+ cx,
+ )
+ });
+
+ Vim::action(editor, cx, |vim, action: &PushSneakBackward, window, cx| {
+ vim.push_operator(
+ Operator::SneakBackward {
+ first_char: action.first_char,
+ },
+ window,
+ cx,
+ )
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushAddSurrounds, window, cx| {
+ vim.push_operator(Operator::AddSurrounds { target: None }, window, cx)
+ });
+
+ Vim::action(
+ editor,
+ cx,
+ |vim, action: &PushChangeSurrounds, window, cx| {
+ vim.push_operator(
+ Operator::ChangeSurrounds {
+ target: action.target,
+ },
+ window,
+ cx,
+ )
+ },
+ );
+
+ Vim::action(editor, cx, |vim, action: &PushJump, window, cx| {
+ vim.push_operator(Operator::Jump { line: action.line }, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, action: &PushDigraph, window, cx| {
+ vim.push_operator(
+ Operator::Digraph {
+ first_char: action.first_char,
+ },
+ window,
+ cx,
+ )
+ });
+
+ Vim::action(editor, cx, |vim, action: &PushLiteral, window, cx| {
+ vim.push_operator(
+ Operator::Literal {
+ prefix: action.prefix.clone(),
+ },
+ window,
+ cx,
+ )
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushChange, window, cx| {
+ vim.push_operator(Operator::Change, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushDelete, window, cx| {
+ vim.push_operator(Operator::Delete, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushYank, window, cx| {
+ vim.push_operator(Operator::Yank, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushReplace, window, cx| {
+ vim.push_operator(Operator::Replace, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushDeleteSurrounds, window, cx| {
+ vim.push_operator(Operator::DeleteSurrounds, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushMark, window, cx| {
+ vim.push_operator(Operator::Mark, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushIndent, window, cx| {
+ vim.push_operator(Operator::Indent, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushOutdent, window, cx| {
+ vim.push_operator(Operator::Outdent, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushAutoIndent, window, cx| {
+ vim.push_operator(Operator::AutoIndent, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushRewrap, window, cx| {
+ vim.push_operator(Operator::Rewrap, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushShellCommand, window, cx| {
+ vim.push_operator(Operator::ShellCommand, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushLowercase, window, cx| {
+ vim.push_operator(Operator::Lowercase, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushUppercase, window, cx| {
+ vim.push_operator(Operator::Uppercase, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushOppositeCase, window, cx| {
+ vim.push_operator(Operator::OppositeCase, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushRegister, window, cx| {
+ vim.push_operator(Operator::Register, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushRecordRegister, window, cx| {
+ vim.push_operator(Operator::RecordRegister, window, cx)
+ });
+
+ Vim::action(editor, cx, |vim, _: &PushReplayRegister, window, cx| {
+ vim.push_operator(Operator::ReplayRegister, window, cx)
+ });
+
+ Vim::action(
+ editor,
+ cx,
+ |vim, _: &PushReplaceWithRegister, window, cx| {
+ vim.push_operator(Operator::ReplaceWithRegister, window, cx)
+ },
+ );
+
+ Vim::action(editor, cx, |vim, _: &PushToggleComments, window, cx| {
+ vim.push_operator(Operator::ToggleComments, window, cx)
});
Vim::action(editor, cx, |vim, _: &ClearOperators, window, cx| {
@@ -344,7 +649,19 @@ impl Vim {
vim.push_count_digit(n.0, window, cx);
});
Vim::action(editor, cx, |vim, _: &Tab, window, cx| {
- vim.input_ignored(" ".into(), window, cx)
+ let Some(anchor) = vim
+ .editor()
+ .and_then(|editor| editor.read(cx).inline_completion_start_anchor())
+ else {
+ return;
+ };
+
+ vim.update_editor(window, cx, |_, editor, window, cx| {
+ editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
+ s.select_anchor_ranges([anchor..anchor])
+ });
+ });
+ vim.switch_mode(Mode::Insert, true, window, cx);
});
Vim::action(editor, cx, |vim, _: &Enter, window, cx| {
vim.input_ignored("\n".into(), window, cx)
@@ -1262,8 +1579,8 @@ impl Vim {
if self.mode == Mode::Normal {
self.update_editor(window, cx, |_, editor, window, cx| {
- editor.accept_inline_completion(
- &editor::actions::AcceptInlineCompletion {},
+ editor.accept_edit_prediction(
+ &editor::actions::AcceptEditPrediction {},
window,
cx,
);
@@ -1274,7 +1591,7 @@ impl Vim {
}
fn sync_vim_settings(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- self.update_editor(window, cx, |vim, editor, _, cx| {
+ self.update_editor(window, cx, |vim, editor, window, cx| {
editor.set_cursor_shape(vim.cursor_shape(), cx);
editor.set_clip_at_line_ends(vim.clip_at_line_ends(), cx);
editor.set_collapse_matches(true);
@@ -1282,14 +1599,11 @@ impl Vim {
editor.set_autoindent(vim.should_autoindent());
editor.selections.line_mode = matches!(vim.mode, Mode::VisualLine);
- let enable_inline_completions = match vim.mode {
- Mode::Insert | Mode::Replace => true,
- Mode::Normal => editor
- .inline_completion_provider()
- .map_or(false, |provider| provider.show_completions_in_normal_mode()),
- _ => false,
+ let hide_inline_completions = match vim.mode {
+ Mode::Insert | Mode::Replace => false,
+ _ => true,
};
- editor.set_inline_completions_enabled(enable_inline_completions, cx);
+ editor.set_inline_completions_hidden_for_vim_mode(hide_inline_completions, window, cx);
});
cx.notify()
}
@@ -24,8 +24,10 @@
{"Key":":"}
{"Key":"s"}
{"Key":"/"}
-{"Key":"dd"}
+{"Key":"d"}
+{"Key":"d"}
{"Key":"/"}
-{"Key":"ee"}
+{"Key":"e"}
+{"Key":"e"}
{"Key":"enter"}
-{"Get":{"state":"aa\ndd\nˇee\ncc", "mode":"Normal"}}
+{"Get":{"state":"aa\ndd\nˇee\ncc","mode":"Normal"}}
@@ -0,0 +1,7 @@
+{"Put":{"state":"heˇllo"}}
+{"Key":"d"}
+{"Key":"d"}
+{"Get":{"state":"ˇ","mode":"Normal"}}
+{"Key":"p"}
+{"Key":"p"}
+{"Get":{"state":"\nhello\nˇhello","mode":"Normal"}}
@@ -1,10 +1,10 @@
{"Put":{"state":"0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1\n"}}
{"Key":"ctrl-a"}
-{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}}
+{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n","mode":"Normal"}}
{"Key":"ctrl-a"}
{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0\n","mode":"Normal"}}
{"Key":"ctrl-a"}
{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1\n","mode":"Normal"}}
{"Key":"2"}
{"Key":"ctrl-x"}
-{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}}
+{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n","mode":"Normal"}}
@@ -1,10 +1,10 @@
{"Put":{"state":"0xfffffffffffffffffffˇf\n"}}
{"Key":"ctrl-a"}
-{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}}
+{"Get":{"state":"0x0000fffffffffffffffˇf\n","mode":"Normal"}}
{"Key":"ctrl-a"}
{"Get":{"state":"0x0000000000000000000ˇ0\n","mode":"Normal"}}
{"Key":"ctrl-a"}
{"Get":{"state":"0x0000000000000000000ˇ1\n","mode":"Normal"}}
{"Key":"2"}
{"Key":"ctrl-x"}
-{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}}
+{"Get":{"state":"0x0000fffffffffffffffˇf\n","mode":"Normal"}}
@@ -2,9 +2,9 @@
{"Key":"ctrl-a"}
{"Get":{"state":"inline0x3ˇau32\n","mode":"Normal"}}
{"Key":"ctrl-a"}
-{"Get":{"state":"inline0x3ˇbu32\n", "mode":"Normal"}}
+{"Get":{"state":"inline0x3ˇbu32\n","mode":"Normal"}}
{"Key":"l"}
{"Key":"l"}
{"Key":"l"}
{"Key":"ctrl-a"}
-{"Get":{"state":"inline0x3bu3ˇ3\n", "mode":"Normal"}}
+{"Get":{"state":"inline0x3bu3ˇ3\n","mode":"Normal"}}
@@ -3,4 +3,4 @@
{"Get":{"state":"-ˇ1\n","mode":"Normal"}}
{"Key":"2"}
{"Key":"ctrl-a"}
-{"Get":{"state":"ˇ1\n", "mode":"Normal"}}
+{"Get":{"state":"ˇ1\n","mode":"Normal"}}
@@ -2,12 +2,12 @@
{"Key":"ctrl-a"}
{"Get":{"state":"1844674407370955161ˇ5\n","mode":"Normal"}}
{"Key":"ctrl-a"}
-{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}}
+{"Get":{"state":"-1844674407370955161ˇ5\n","mode":"Normal"}}
{"Key":"ctrl-a"}
-{"Get":{"state":"-1844674407370955161ˇ4\n", "mode":"Normal"}}
+{"Get":{"state":"-1844674407370955161ˇ4\n","mode":"Normal"}}
{"Key":"3"}
{"Key":"ctrl-x"}
-{"Get":{"state":"1844674407370955161ˇ4\n", "mode":"Normal"}}
+{"Get":{"state":"1844674407370955161ˇ4\n","mode":"Normal"}}
{"Key":"2"}
{"Key":"ctrl-a"}
-{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}}
+{"Get":{"state":"-1844674407370955161ˇ5\n","mode":"Normal"}}
@@ -0,0 +1,5 @@
+{"Put":{"state":"heˇllo\n"}}
+{"Key":"y"}
+{"Key":"y"}
+{"Key":"p"}
+{"Get":{"state":"hello\nˇhello\n","mode":"Normal"}}
@@ -0,0 +1,5 @@
+{"Put":{"state":"heˇllo"}}
+{"Key":"y"}
+{"Key":"y"}
+{"Key":"p"}
+{"Get":{"state":"hello\nˇhello","mode":"Normal"}}
@@ -0,0 +1,6 @@
+{"Put":{"state":"heˇllo\nhello"}}
+{"Key":"2"}
+{"Key":"y"}
+{"Key":"y"}
+{"Key":"p"}
+{"Get":{"state":"hello\nˇhello\nhello\nhello","mode":"Normal"}}
@@ -1,7 +1,7 @@
use crate::{Toast, Workspace};
use gpui::{
svg, AnyView, App, AppContext as _, AsyncWindowContext, ClipboardItem, Context, DismissEvent,
- Entity, EventEmitter, Global, PromptLevel, Render, ScrollHandle, Task,
+ Entity, EventEmitter, PromptLevel, Render, ScrollHandle, Task,
};
use parking_lot::Mutex;
use std::sync::{Arc, LazyLock};
@@ -124,8 +124,8 @@ impl Workspace {
Some((click_msg, on_click)) => {
let on_click = on_click.clone();
simple_message_notification::MessageNotification::new(toast.msg.clone())
- .with_click_message(click_msg.clone())
- .on_click(move |window, cx| on_click(window, cx))
+ .primary_message(click_msg.clone())
+ .primary_on_click(move |window, cx| on_click(window, cx))
}
None => simple_message_notification::MessageNotification::new(toast.msg.clone()),
})
@@ -156,10 +156,11 @@ impl Workspace {
pub fn show_initial_notifications(&mut self, cx: &mut Context<Self>) {
// Allow absence of the global so that tests don't need to initialize it.
- let app_notifications = cx
- .try_global::<AppNotifications>()
+ let app_notifications = GLOBAL_APP_NOTIFICATIONS
+ .lock()
+ .app_notifications
.iter()
- .flat_map(|global| global.app_notifications.iter().cloned())
+ .cloned()
.collect::<Vec<_>>();
for (id, build_notification) in app_notifications {
self.show_notification_without_handling_dismiss_events(&id, cx, |cx| {
@@ -375,12 +376,14 @@ pub mod simple_message_notification {
pub struct MessageNotification {
build_content: Box<dyn Fn(&mut Window, &mut Context<Self>) -> AnyElement>,
- on_click: Option<Arc<dyn Fn(&mut Window, &mut Context<Self>)>>,
- click_message: Option<SharedString>,
- secondary_click_message: Option<SharedString>,
+ primary_message: Option<SharedString>,
+ primary_icon: Option<IconName>,
+ primary_icon_color: Option<Color>,
+ primary_on_click: Option<Arc<dyn Fn(&mut Window, &mut Context<Self>)>>,
+ secondary_message: Option<SharedString>,
+ secondary_icon: Option<IconName>,
+ secondary_icon_color: Option<Color>,
secondary_on_click: Option<Arc<dyn Fn(&mut Window, &mut Context<Self>)>>,
- tertiary_click_message: Option<SharedString>,
- tertiary_on_click: Option<Arc<dyn Fn(&mut Window, &mut Context<Self>)>>,
more_info_message: Option<SharedString>,
more_info_url: Option<Arc<str>>,
show_close_button: bool,
@@ -404,12 +407,14 @@ pub mod simple_message_notification {
{
Self {
build_content: Box::new(content),
- on_click: None,
- click_message: None,
+ primary_message: None,
+ primary_icon: None,
+ primary_icon_color: None,
+ primary_on_click: None,
+ secondary_message: None,
+ secondary_icon: None,
+ secondary_icon_color: None,
secondary_on_click: None,
- secondary_click_message: None,
- tertiary_on_click: None,
- tertiary_click_message: None,
more_info_message: None,
more_info_url: None,
show_close_button: true,
@@ -417,51 +422,55 @@ pub mod simple_message_notification {
}
}
- pub fn with_click_message<S>(mut self, message: S) -> Self
+ pub fn primary_message<S>(mut self, message: S) -> Self
where
S: Into<SharedString>,
{
- self.click_message = Some(message.into());
+ self.primary_message = Some(message.into());
self
}
- pub fn on_click<F>(mut self, on_click: F) -> Self
- where
- F: 'static + Fn(&mut Window, &mut Context<Self>),
- {
- self.on_click = Some(Arc::new(on_click));
+ pub fn primary_icon(mut self, icon: IconName) -> Self {
+ self.primary_icon = Some(icon);
self
}
- pub fn with_secondary_click_message<S>(mut self, message: S) -> Self
- where
- S: Into<SharedString>,
- {
- self.secondary_click_message = Some(message.into());
+ pub fn primary_icon_color(mut self, color: Color) -> Self {
+ self.primary_icon_color = Some(color);
self
}
- pub fn on_secondary_click<F>(mut self, on_click: F) -> Self
+ pub fn primary_on_click<F>(mut self, on_click: F) -> Self
where
F: 'static + Fn(&mut Window, &mut Context<Self>),
{
- self.secondary_on_click = Some(Arc::new(on_click));
+ self.primary_on_click = Some(Arc::new(on_click));
self
}
- pub fn with_tertiary_click_message<S>(mut self, message: S) -> Self
+ pub fn secondary_message<S>(mut self, message: S) -> Self
where
S: Into<SharedString>,
{
- self.tertiary_click_message = Some(message.into());
+ self.secondary_message = Some(message.into());
+ self
+ }
+
+ pub fn secondary_icon(mut self, icon: IconName) -> Self {
+ self.secondary_icon = Some(icon);
self
}
- pub fn on_tertiary_click<F>(mut self, on_click: F) -> Self
+ pub fn secondary_icon_color(mut self, color: Color) -> Self {
+ self.secondary_icon_color = Some(color);
+ self
+ }
+
+ pub fn secondary_on_click<F>(mut self, on_click: F) -> Self
where
F: 'static + Fn(&mut Window, &mut Context<Self>),
{
- self.tertiary_on_click = Some(Arc::new(on_click));
+ self.secondary_on_click = Some(Arc::new(on_click));
self
}
@@ -529,66 +538,63 @@ pub mod simple_message_notification {
.child(
h_flex()
.gap_1()
- .children(self.click_message.iter().map(|message| {
- Button::new(message.clone(), message.clone())
+ .children(self.primary_message.iter().map(|message| {
+ let mut button = Button::new(message.clone(), message.clone())
.label_size(LabelSize::Small)
- .icon(IconName::Check)
- .icon_position(IconPosition::Start)
- .icon_size(IconSize::Small)
- .icon_color(Color::Success)
.on_click(cx.listener(|this, _, window, cx| {
- if let Some(on_click) = this.on_click.as_ref() {
+ if let Some(on_click) = this.primary_on_click.as_ref() {
(on_click)(window, cx)
};
this.dismiss(cx)
- }))
+ }));
+
+ if let Some(icon) = self.primary_icon {
+ button = button
+ .icon(icon)
+ .icon_color(self.primary_icon_color.unwrap_or(Color::Muted))
+ .icon_position(IconPosition::Start)
+ .icon_size(IconSize::Small);
+ }
+
+ button
}))
- .children(self.secondary_click_message.iter().map(|message| {
- Button::new(message.clone(), message.clone())
+ .children(self.secondary_message.iter().map(|message| {
+ let mut button = Button::new(message.clone(), message.clone())
.label_size(LabelSize::Small)
- .icon(IconName::Close)
- .icon_position(IconPosition::Start)
- .icon_size(IconSize::Small)
- .icon_color(Color::Error)
.on_click(cx.listener(|this, _, window, cx| {
if let Some(on_click) = this.secondary_on_click.as_ref() {
(on_click)(window, cx)
};
this.dismiss(cx)
- }))
+ }));
+
+ if let Some(icon) = self.secondary_icon {
+ button = button
+ .icon(icon)
+ .icon_position(IconPosition::Start)
+ .icon_size(IconSize::Small)
+ .icon_color(self.secondary_icon_color.unwrap_or(Color::Muted));
+ }
+
+ button
}))
.child(
- h_flex()
- .w_full()
- .gap_1()
- .justify_end()
- .children(self.tertiary_click_message.iter().map(|message| {
- Button::new(message.clone(), message.clone())
- .label_size(LabelSize::Small)
- .on_click(cx.listener(|this, _, window, cx| {
- if let Some(on_click) = this.tertiary_on_click.as_ref()
- {
- (on_click)(window, cx)
- };
- this.dismiss(cx)
- }))
- }))
- .children(
- self.more_info_message
- .iter()
- .zip(self.more_info_url.iter())
- .map(|(message, url)| {
- let url = url.clone();
- Button::new(message.clone(), message.clone())
- .label_size(LabelSize::Small)
- .icon(IconName::ArrowUpRight)
- .icon_size(IconSize::Indicator)
- .icon_color(Color::Muted)
- .on_click(cx.listener(move |_, _, _, cx| {
- cx.open_url(&url);
- }))
- }),
- ),
+ h_flex().w_full().justify_end().children(
+ self.more_info_message
+ .iter()
+ .zip(self.more_info_url.iter())
+ .map(|(message, url)| {
+ let url = url.clone();
+ Button::new(message.clone(), message.clone())
+ .label_size(LabelSize::Small)
+ .icon(IconName::ArrowUpRight)
+ .icon_size(IconSize::Indicator)
+ .icon_color(Color::Muted)
+ .on_click(cx.listener(move |_, _, _, cx| {
+ cx.open_url(&url);
+ }))
+ }),
+ ),
),
)
}
@@ -609,8 +615,6 @@ struct AppNotifications {
)>,
}
-impl Global for AppNotifications {}
-
impl AppNotifications {
pub fn insert(
&mut self,
@@ -72,7 +72,7 @@ impl DraggedSelection {
}
#[derive(Clone, Copy, PartialEq, Debug, Deserialize, JsonSchema)]
-#[serde(rename_all = "camelCase")]
+#[serde(rename_all = "snake_case")]
pub enum SaveIntent {
/// write all files (even if unchanged)
/// prompt before overwriting on-disk changes
@@ -96,13 +96,15 @@ pub enum SaveIntent {
pub struct ActivateItem(pub usize);
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseActiveItem {
pub save_intent: Option<SaveIntent>,
+ #[serde(default)]
+ pub close_pinned: bool,
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseInactiveItems {
pub save_intent: Option<SaveIntent>,
#[serde(default)]
@@ -110,7 +112,7 @@ pub struct CloseInactiveItems {
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseAllItems {
pub save_intent: Option<SaveIntent>,
#[serde(default)]
@@ -118,34 +120,35 @@ pub struct CloseAllItems {
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseCleanItems {
#[serde(default)]
pub close_pinned: bool,
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseItemsToTheRight {
#[serde(default)]
pub close_pinned: bool,
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseItemsToTheLeft {
#[serde(default)]
pub close_pinned: bool,
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct RevealInProjectPanel {
#[serde(skip)]
pub entry_id: Option<u64>,
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)]
+#[serde(deny_unknown_fields)]
pub struct DeploySearch {
#[serde(default)]
pub replace_enabled: bool,
@@ -582,6 +585,11 @@ impl Pane {
if let Some(active_item) = self.active_item() {
if self.focus_handle.is_focused(window) {
+ // Schedule a redraw next frame, so that the focus changes below take effect
+ cx.on_next_frame(window, |_, _, cx| {
+ cx.notify();
+ });
+
// Pane was focused directly. We need to either focus a view inside the active item,
// or focus the active item itself
if let Some(weak_last_focus_handle) =
@@ -1218,6 +1226,37 @@ impl Pane {
return None;
}
+ if self.is_tab_pinned(self.active_item_index) && !action.close_pinned {
+ // Activate any non-pinned tab in same pane
+ let non_pinned_tab_index = self
+ .items()
+ .enumerate()
+ .find(|(index, _item)| !self.is_tab_pinned(*index))
+ .map(|(index, _item)| index);
+ if let Some(index) = non_pinned_tab_index {
+ self.activate_item(index, false, false, window, cx);
+ return None;
+ }
+
+ // Activate any non-pinned tab in different pane
+ let current_pane = cx.entity();
+ self.workspace
+ .update(cx, |workspace, cx| {
+ let panes = workspace.center.panes();
+ let pane_with_unpinned_tab = panes.iter().find(|pane| {
+ if **pane == ¤t_pane {
+ return false;
+ }
+ pane.read(cx).has_unpinned_tabs()
+ });
+ if let Some(pane) = pane_with_unpinned_tab {
+ pane.update(cx, |pane, cx| pane.activate_unpinned_tab(window, cx));
+ }
+ })
+ .ok();
+
+ return None;
+ };
let active_item_id = self.items[self.active_item_index].item_id();
Some(self.close_item_by_id(
active_item_id,
@@ -2099,6 +2138,24 @@ impl Pane {
self.pinned_tab_count != 0
}
+ fn has_unpinned_tabs(&self) -> bool {
+ self.pinned_tab_count < self.items.len()
+ }
+
+ fn activate_unpinned_tab(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ if self.items.is_empty() {
+ return;
+ }
+ let Some(index) = self
+ .items()
+ .enumerate()
+ .find_map(|(index, _item)| (!self.is_tab_pinned(index)).then_some(index))
+ else {
+ return;
+ };
+ self.activate_item(index, true, true, window, cx);
+ }
+
fn render_tab(
&self,
ix: usize,
@@ -2274,7 +2331,10 @@ impl Pane {
pane.unpin_tab_at(ix, window, cx);
}))
} else {
- end_slot_action = &CloseActiveItem { save_intent: None };
+ end_slot_action = &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ };
end_slot_tooltip_text = "Close Tab";
IconButton::new("close tab", IconName::Close)
.when(!always_show_close_button, |button| {
@@ -2344,7 +2404,10 @@ impl Pane {
menu = menu
.entry(
"Close",
- Some(Box::new(CloseActiveItem { save_intent: None })),
+ Some(Box::new(CloseActiveItem {
+ save_intent: None,
+ close_pinned: true,
+ })),
window.handler_for(&pane, move |pane, window, cx| {
pane.close_item_by_id(item_id, SaveIntent::Close, window, cx)
.detach_and_log_err(cx);
@@ -2985,14 +3048,9 @@ impl Pane {
self.items
.iter()
- .map(|item| item.item_id())
- .filter(|item_id| {
- if let Some(ix) = self.index_for_item_id(*item_id) {
- self.is_tab_pinned(ix)
- } else {
- true
- }
- })
+ .enumerate()
+ .filter(|(index, _item)| self.is_tab_pinned(*index))
+ .map(|(_, item)| item.item_id())
.collect()
}
@@ -3555,7 +3613,14 @@ mod tests {
pane.update_in(cx, |pane, window, cx| {
assert!(pane
- .close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ .close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false
+ },
+ window,
+ cx
+ )
.is_none())
});
}
@@ -3896,7 +3961,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -3909,7 +3981,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "C", "D*"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -3917,7 +3996,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B*", "C"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -3925,7 +4011,14 @@ mod tests {
assert_item_labels(&pane, ["A", "C*"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -3961,7 +4054,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -3974,7 +4074,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "C", "D*"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -3982,7 +4089,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "C*"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -3990,7 +4104,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B*"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -4026,7 +4147,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -4039,7 +4167,14 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "C", "D*"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -4052,7 +4187,14 @@ mod tests {
assert_item_labels(&pane, ["A*", "B", "C"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -4060,7 +4202,14 @@ mod tests {
assert_item_labels(&pane, ["B*", "C"], cx);
pane.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.unwrap()
.await
@@ -4294,7 +4443,7 @@ mod tests {
let project = Project::test(fs, None, cx).await;
let (workspace, cx) =
- cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
let item_a = add_labeled_item(&pane, "A", false, cx);
@@ -4320,6 +4469,71 @@ mod tests {
assert_item_labels(&pane, [], cx);
}
+ #[gpui::test]
+ async fn test_close_pinned_tab_with_non_pinned_in_same_pane(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, None, cx).await;
+ let (workspace, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+
+ // Non-pinned tabs in same pane
+ let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
+ add_labeled_item(&pane, "A", false, cx);
+ add_labeled_item(&pane, "B", false, cx);
+ add_labeled_item(&pane, "C", false, cx);
+ pane.update_in(cx, |pane, window, cx| {
+ pane.pin_tab_at(0, window, cx);
+ });
+ set_labeled_items(&pane, ["A*", "B", "C"], cx);
+ pane.update_in(cx, |pane, window, cx| {
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ );
+ });
+ // Non-pinned tab should be active
+ assert_item_labels(&pane, ["A", "B*", "C"], cx);
+ }
+
+ #[gpui::test]
+ async fn test_close_pinned_tab_with_non_pinned_in_different_pane(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, None, cx).await;
+ let (workspace, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+
+ // No non-pinned tabs in same pane, non-pinned tabs in another pane
+ let pane1 = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
+ let pane2 = workspace.update_in(cx, |workspace, window, cx| {
+ workspace.split_pane(pane1.clone(), SplitDirection::Right, window, cx)
+ });
+ add_labeled_item(&pane1, "A", false, cx);
+ pane1.update_in(cx, |pane, window, cx| {
+ pane.pin_tab_at(0, window, cx);
+ });
+ set_labeled_items(&pane1, ["A*"], cx);
+ add_labeled_item(&pane2, "B", false, cx);
+ set_labeled_items(&pane2, ["B"], cx);
+ pane1.update_in(cx, |pane, window, cx| {
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ );
+ });
+ // Non-pinned tab of other pane should be active
+ assert_item_labels(&pane2, ["B*"], cx);
+ }
+
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
@@ -725,6 +725,7 @@ impl PaneAxis {
}
#[derive(Clone, Copy, Debug, Deserialize, PartialEq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
pub enum SplitDirection {
Up,
Down,
@@ -807,14 +808,6 @@ impl SplitDirection {
}
}
-#[derive(Clone, Copy, Debug, Deserialize, JsonSchema, PartialEq)]
-pub enum ResizeIntent {
- Lengthen,
- Shorten,
- Widen,
- Narrow,
-}
-
mod element {
use std::mem;
use std::{cell::RefCell, iter, rc::Rc, sync::Arc};
@@ -6,7 +6,7 @@ use ui::{
element_cell, prelude::*, string_cell, utils::calculate_contrast_ratio, AudioStatus,
Availability, Avatar, AvatarAudioStatusIndicator, AvatarAvailabilityIndicator, ButtonLike,
Checkbox, CheckboxWithLabel, ContentGroup, DecoratedIcon, ElevationIndex, Facepile,
- IconDecoration, Indicator, Switch, Table, TintColor, Tooltip,
+ IconDecoration, Indicator, KeybindingHint, Switch, Table, TintColor, Tooltip,
};
use crate::{Item, Workspace};
@@ -408,6 +408,7 @@ impl ThemePreview {
.child(Facepile::render_component_previews(window, cx))
.child(Icon::render_component_previews(window, cx))
.child(IconDecoration::render_component_previews(window, cx))
+ .child(KeybindingHint::render_component_previews(window, cx))
.child(Indicator::render_component_previews(window, cx))
.child(Switch::render_component_previews(window, cx))
.child(Table::render_component_previews(window, cx))
@@ -21,7 +21,8 @@ use client::{
};
use collections::{hash_map, HashMap, HashSet};
use derive_more::{Deref, DerefMut};
-use dock::{Dock, DockPosition, Panel, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE};
+pub use dock::Panel;
+use dock::{Dock, DockPosition, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE};
use futures::{
channel::{
mpsc::{self, UnboundedReceiver, UnboundedSender},
@@ -169,12 +170,7 @@ pub struct OpenPaths {
pub struct ActivatePane(pub usize);
#[derive(Clone, Deserialize, PartialEq, JsonSchema)]
-pub struct ActivatePaneInDirection(pub SplitDirection);
-
-#[derive(Clone, Deserialize, PartialEq, JsonSchema)]
-pub struct SwapPaneInDirection(pub SplitDirection);
-
-#[derive(Clone, Deserialize, PartialEq, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MoveItemToPane {
pub destination: usize,
#[serde(default = "default_true")]
@@ -182,6 +178,7 @@ pub struct MoveItemToPane {
}
#[derive(Clone, Deserialize, PartialEq, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct MoveItemToPaneInDirection {
pub direction: SplitDirection,
#[serde(default = "default_true")]
@@ -189,25 +186,25 @@ pub struct MoveItemToPaneInDirection {
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct SaveAll {
pub save_intent: Option<SaveIntent>,
}
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct Save {
pub save_intent: Option<SaveIntent>,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Default, JsonSchema)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseAllItemsAndPanes {
pub save_intent: Option<SaveIntent>,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Default, JsonSchema)]
-#[serde(rename_all = "camelCase")]
+#[serde(deny_unknown_fields)]
pub struct CloseInactiveTabsAndPanes {
pub save_intent: Option<SaveIntent>,
}
@@ -216,6 +213,7 @@ pub struct CloseInactiveTabsAndPanes {
pub struct SendKeystrokes(pub String);
#[derive(Clone, Deserialize, PartialEq, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct Reload {
pub binary_path: Option<PathBuf>,
}
@@ -234,7 +232,6 @@ impl_actions!(
workspace,
[
ActivatePane,
- ActivatePaneInDirection,
CloseAllItemsAndPanes,
CloseInactiveTabsAndPanes,
MoveItemToPane,
@@ -243,11 +240,24 @@ impl_actions!(
Reload,
Save,
SaveAll,
- SwapPaneInDirection,
SendKeystrokes,
]
);
+actions!(
+ workspace,
+ [
+ ActivatePaneLeft,
+ ActivatePaneRight,
+ ActivatePaneUp,
+ ActivatePaneDown,
+ SwapPaneLeft,
+ SwapPaneRight,
+ SwapPaneUp,
+ SwapPaneDown,
+ ]
+);
+
#[derive(PartialEq, Eq, Debug)]
pub enum CloseIntent {
/// Quit the program entirely.
@@ -300,6 +310,7 @@ impl PartialEq for Toast {
}
#[derive(Debug, Default, Clone, Deserialize, PartialEq, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct OpenTerminal {
pub working_directory: PathBuf,
}
@@ -4820,29 +4831,38 @@ impl Workspace {
workspace.activate_previous_window(cx)
}),
)
- .on_action(
- cx.listener(|workspace, action: &ActivatePaneInDirection, window, cx| {
- workspace.activate_pane_in_direction(action.0, window, cx)
- }),
- )
+ .on_action(cx.listener(|workspace, _: &ActivatePaneLeft, window, cx| {
+ workspace.activate_pane_in_direction(SplitDirection::Left, window, cx)
+ }))
+ .on_action(cx.listener(|workspace, _: &ActivatePaneRight, window, cx| {
+ workspace.activate_pane_in_direction(SplitDirection::Right, window, cx)
+ }))
+ .on_action(cx.listener(|workspace, _: &ActivatePaneUp, window, cx| {
+ workspace.activate_pane_in_direction(SplitDirection::Up, window, cx)
+ }))
+ .on_action(cx.listener(|workspace, _: &ActivatePaneDown, window, cx| {
+ workspace.activate_pane_in_direction(SplitDirection::Down, window, cx)
+ }))
.on_action(cx.listener(|workspace, _: &ActivateNextPane, window, cx| {
workspace.activate_next_pane(window, cx)
}))
- .on_action(
- cx.listener(|workspace, action: &ActivatePaneInDirection, window, cx| {
- workspace.activate_pane_in_direction(action.0, window, cx)
- }),
- )
.on_action(cx.listener(
|workspace, action: &MoveItemToPaneInDirection, window, cx| {
workspace.move_item_to_pane_in_direction(action, window, cx)
},
))
- .on_action(
- cx.listener(|workspace, action: &SwapPaneInDirection, _, cx| {
- workspace.swap_pane_in_direction(action.0, cx)
- }),
- )
+ .on_action(cx.listener(|workspace, _: &SwapPaneLeft, _, cx| {
+ workspace.swap_pane_in_direction(SplitDirection::Left, cx)
+ }))
+ .on_action(cx.listener(|workspace, _: &SwapPaneRight, _, cx| {
+ workspace.swap_pane_in_direction(SplitDirection::Right, cx)
+ }))
+ .on_action(cx.listener(|workspace, _: &SwapPaneUp, _, cx| {
+ workspace.swap_pane_in_direction(SplitDirection::Up, cx)
+ }))
+ .on_action(cx.listener(|workspace, _: &SwapPaneDown, _, cx| {
+ workspace.swap_pane_in_direction(SplitDirection::Down, cx)
+ }))
.on_action(cx.listener(|this, _: &ToggleLeftDock, window, cx| {
this.toggle_dock(DockPosition::Left, window, cx);
}))
@@ -5207,8 +5227,9 @@ fn notify_if_database_failed(workspace: WindowHandle<Workspace>, cx: &mut AsyncA
|cx| {
cx.new(|_| {
MessageNotification::new("Failed to load the database file.")
- .with_click_message("File an issue")
- .on_click(|_window, cx| cx.open_url(REPORT_ISSUE_URL))
+ .primary_message("File an Issue")
+ .primary_icon(IconName::Plus)
+ .primary_on_click(|_window, cx| cx.open_url(REPORT_ISSUE_URL))
})
},
);
@@ -8154,6 +8175,7 @@ mod tests {
pane.close_active_item(
&CloseActiveItem {
save_intent: Some(SaveIntent::Close),
+ close_pinned: false,
},
window,
cx,
@@ -8258,7 +8280,14 @@ mod tests {
});
let close_singleton_buffer_task = pane
.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.expect("should have active singleton buffer to close");
cx.background_executor.run_until_parked();
@@ -8364,7 +8393,14 @@ mod tests {
});
let _close_multi_buffer_task = pane
.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.expect("should have active multi buffer to close");
cx.background_executor.run_until_parked();
@@ -8455,7 +8491,14 @@ mod tests {
});
let close_multi_buffer_task = pane
.update_in(cx, |pane, window, cx| {
- pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx)
+ pane.close_active_item(
+ &CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
+ window,
+ cx,
+ )
})
.expect("should have active multi buffer to close");
cx.background_executor.run_until_parked();
@@ -177,7 +177,7 @@ pub struct Snapshot {
completed_scan_id: usize,
}
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RepositoryEntry {
/// The git status entries for this repository.
/// Note that the paths on this repository are relative to the git work directory.
@@ -199,9 +199,10 @@ pub struct RepositoryEntry {
/// - my_sub_folder_1/project_root/changed_file_1
/// - my_sub_folder_2/changed_file_2
pub(crate) statuses_by_path: SumTree<StatusEntry>,
- pub work_directory_id: ProjectEntryId,
+ work_directory_id: ProjectEntryId,
pub work_directory: WorkDirectory,
pub(crate) branch: Option<Arc<str>>,
+ pub current_merge_conflicts: TreeSet<RepoPath>,
}
impl Deref for RepositoryEntry {
@@ -255,6 +256,11 @@ impl RepositoryEntry {
.map(|entry| entry.to_proto())
.collect(),
removed_statuses: Default::default(),
+ current_merge_conflicts: self
+ .current_merge_conflicts
+ .iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
}
}
@@ -305,6 +311,11 @@ impl RepositoryEntry {
branch: self.branch.as_ref().map(|branch| branch.to_string()),
updated_statuses,
removed_statuses,
+ current_merge_conflicts: self
+ .current_merge_conflicts
+ .iter()
+ .map(RepoPath::to_proto)
+ .collect(),
}
}
}
@@ -360,15 +371,19 @@ impl WorkDirectory {
/// of the project root folder, then the returned RepoPath is relative to the root
/// of the repository and not a valid path inside the project.
pub fn relativize(&self, path: &Path) -> Result<RepoPath> {
- if let Some(location_in_repo) = &self.location_in_repo {
- Ok(location_in_repo.join(path).into())
+ let repo_path = if let Some(location_in_repo) = &self.location_in_repo {
+ // Avoid joining a `/` to location_in_repo in the case of a single-file worktree.
+ if path == Path::new("") {
+ RepoPath(location_in_repo.clone())
+ } else {
+ location_in_repo.join(path).into()
+ }
} else {
- let relativized_path = path
- .strip_prefix(&self.path)
- .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?;
-
- Ok(relativized_path.into())
- }
+ path.strip_prefix(&self.path)
+ .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?
+ .into()
+ };
+ Ok(repo_path)
}
/// This is the opposite operation to `relativize` above
@@ -454,6 +469,7 @@ struct BackgroundScannerState {
#[derive(Debug, Clone)]
pub struct LocalRepositoryEntry {
+ pub(crate) work_directory_id: ProjectEntryId,
pub(crate) work_directory: WorkDirectory,
pub(crate) git_dir_scan_id: usize,
pub(crate) status_scan_id: usize,
@@ -463,6 +479,7 @@ pub struct LocalRepositoryEntry {
pub(crate) dot_git_dir_abs_path: Arc<Path>,
/// Absolute path to the .git file, if we're in a git worktree.
pub(crate) dot_git_worktree_abs_path: Option<Arc<Path>>,
+ pub current_merge_head_shas: Vec<String>,
}
impl sum_tree::Item for LocalRepositoryEntry {
@@ -886,7 +903,7 @@ impl Worktree {
}
}
}
- Ok(None)
+ Err(anyhow!("No repository found for {path:?}"))
})
}
Worktree::Remote(_) => {
@@ -895,6 +912,30 @@ impl Worktree {
}
}
+ pub fn load_committed_file(&self, path: &Path, cx: &App) -> Task<Result<Option<String>>> {
+ match self {
+ Worktree::Local(this) => {
+ let path = Arc::from(path);
+ let snapshot = this.snapshot();
+ cx.background_executor().spawn(async move {
+ if let Some(repo) = snapshot.repository_for_path(&path) {
+ if let Some(repo_path) = repo.relativize(&path).log_err() {
+ if let Some(git_repo) =
+ snapshot.git_repositories.get(&repo.work_directory_id)
+ {
+ return Ok(git_repo.repo_ptr.load_committed_text(&repo_path));
+ }
+ }
+ }
+ Err(anyhow!("No repository found for {path:?}"))
+ })
+ }
+ Worktree::Remote(_) => Task::ready(Err(anyhow!(
+ "remote worktrees can't yet load committed files"
+ ))),
+ }
+ }
+
pub fn load_binary_file(
&self,
path: &Path,
@@ -1295,14 +1336,7 @@ impl LocalWorktree {
let settings = self.settings.clone();
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
let background_scanner = cx.background_executor().spawn({
- let abs_path = &snapshot.abs_path;
- #[cfg(target_os = "windows")]
- let abs_path = abs_path
- .as_path()
- .canonicalize()
- .unwrap_or_else(|_| abs_path.as_path().to_path_buf());
- #[cfg(not(target_os = "windows"))]
- let abs_path = abs_path.as_path().to_path_buf();
+ let abs_path = snapshot.abs_path.as_path().to_path_buf();
let background = cx.background_executor().clone();
async move {
let (events, watcher) = fs.watch(&abs_path, FS_WATCH_LATENCY).await;
@@ -2500,6 +2534,13 @@ impl Snapshot {
for repository in update.updated_repositories {
let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id);
if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) {
+ let conflicted_paths = TreeSet::from_ordered_entries(
+ repository
+ .current_merge_conflicts
+ .into_iter()
+ .map(|path| RepoPath(Path::new(&path).into())),
+ );
+
if self
.repositories
.contains(&PathKey(work_dir_entry.path.clone()), &())
@@ -2519,6 +2560,7 @@ impl Snapshot {
.update(&PathKey(work_dir_entry.path.clone()), &(), |repo| {
repo.branch = repository.branch.map(Into::into);
repo.statuses_by_path.edit(edits, &());
+ repo.current_merge_conflicts = conflicted_paths
});
} else {
let statuses = SumTree::from_iter(
@@ -2541,6 +2583,7 @@ impl Snapshot {
},
branch: repository.branch.map(Into::into),
statuses_by_path: statuses,
+ current_merge_conflicts: conflicted_paths,
},
&(),
);
@@ -2665,21 +2708,10 @@ impl Snapshot {
/// Get the repository whose work directory contains the given path.
pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> {
- let mut cursor = self.repositories.cursor::<PathProgress>(&());
- let mut repository = None;
-
- // Git repositories may contain other git repositories. As a side effect of
- // lexicographic sorting by path, deeper repositories will be after higher repositories
- // So, let's loop through every matching repository until we can't find any more to find
- // the deepest repository that could contain this path.
- while cursor.seek_forward(&PathTarget::Contains(path), Bias::Left, &())
- && cursor.item().is_some()
- {
- repository = cursor.item();
- cursor.next(&());
- }
-
- repository
+ self.repositories
+ .iter()
+ .filter(|repo| repo.work_directory.directory_contains(path))
+ .last()
}
/// Given an ordered iterator of entries, returns an iterator of those entries,
@@ -3319,18 +3351,23 @@ impl BackgroundScannerState {
let t0 = Instant::now();
let repository = fs.open_repo(&dot_git_abs_path)?;
- let actual_repo_path = repository.dot_git_dir();
+ let repository_path = repository.path();
+ watcher.add(&repository_path).log_err()?;
- let actual_dot_git_dir_abs_path = smol::block_on(find_git_dir(&actual_repo_path, fs))?;
- watcher.add(&actual_repo_path).log_err()?;
-
- let dot_git_worktree_abs_path = if actual_dot_git_dir_abs_path.as_ref() == dot_git_abs_path
- {
+ let actual_dot_git_dir_abs_path = repository.main_repository_path();
+ let dot_git_worktree_abs_path = if actual_dot_git_dir_abs_path == dot_git_abs_path {
None
} else {
// The two paths could be different because we opened a git worktree.
- // When that happens, the .git path in the worktree (`dot_git_abs_path`) is a file that
- // points to the worktree-subdirectory in the actual .git directory (`git_dir_path`)
+ // When that happens:
+ //
+ // * `dot_git_abs_path` is a file that points to the worktree-subdirectory in the actual
+ // .git directory.
+ //
+ // * `repository_path` is the worktree-subdirectory.
+ //
+ // * `actual_dot_git_dir_abs_path` is the path to the actual .git directory. In git
+ // documentation this is called the "commondir".
watcher.add(&dot_git_abs_path).log_err()?;
Some(Arc::from(dot_git_abs_path))
};
@@ -3354,17 +3391,20 @@ impl BackgroundScannerState {
work_directory: work_directory.clone(),
branch: repository.branch_name().map(Into::into),
statuses_by_path: Default::default(),
+ current_merge_conflicts: Default::default(),
},
&(),
);
let local_repository = LocalRepositoryEntry {
+ work_directory_id: work_dir_id,
work_directory: work_directory.clone(),
git_dir_scan_id: 0,
status_scan_id: 0,
repo_ptr: repository.clone(),
- dot_git_dir_abs_path: actual_dot_git_dir_abs_path,
+ dot_git_dir_abs_path: actual_dot_git_dir_abs_path.into(),
dot_git_worktree_abs_path,
+ current_merge_head_shas: Default::default(),
};
self.snapshot
@@ -3391,15 +3431,6 @@ async fn is_git_dir(path: &Path, fs: &dyn Fs) -> bool {
matches!(config_metadata, Ok(Some(_)))
}
-async fn find_git_dir(path: &Path, fs: &dyn Fs) -> Option<Arc<Path>> {
- for ancestor in path.ancestors() {
- if is_git_dir(ancestor, fs).await {
- return Some(Arc::from(ancestor));
- }
- }
- None
-}
-
async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
let contents = fs.load(abs_path).await?;
let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
@@ -5126,11 +5157,11 @@ impl BackgroundScanner {
.snapshot
.git_repositories
.iter()
- .find_map(|(entry_id, repo)| {
+ .find_map(|(_, repo)| {
if repo.dot_git_dir_abs_path.as_ref() == &dot_git_dir
|| repo.dot_git_worktree_abs_path.as_deref() == Some(&dot_git_dir)
{
- Some((*entry_id, repo.clone()))
+ Some(repo.clone())
} else {
None
}
@@ -5147,13 +5178,13 @@ impl BackgroundScanner {
None => continue,
}
}
- Some((entry_id, local_repository)) => {
+ Some(local_repository) => {
if local_repository.git_dir_scan_id == scan_id {
continue;
}
let Some(work_dir) = state
.snapshot
- .entry_for_id(entry_id)
+ .entry_for_id(local_repository.work_directory_id)
.map(|entry| entry.path.clone())
else {
continue;
@@ -5162,10 +5193,13 @@ impl BackgroundScanner {
let branch = local_repository.repo_ptr.branch_name();
local_repository.repo_ptr.reload_index();
- state.snapshot.git_repositories.update(&entry_id, |entry| {
- entry.git_dir_scan_id = scan_id;
- entry.status_scan_id = scan_id;
- });
+ state.snapshot.git_repositories.update(
+ &local_repository.work_directory_id,
+ |entry| {
+ entry.git_dir_scan_id = scan_id;
+ entry.status_scan_id = scan_id;
+ },
+ );
state.snapshot.snapshot.repositories.update(
&PathKey(work_dir.clone()),
&(),
@@ -5270,6 +5304,11 @@ impl BackgroundScanner {
return;
};
+ let merge_head_shas = job.local_repository.repo().merge_head_shas();
+ if merge_head_shas != job.local_repository.current_merge_head_shas {
+ mem::take(&mut repository.current_merge_conflicts);
+ }
+
let mut new_entries_by_path = SumTree::new(&());
for (repo_path, status) in statuses.entries.iter() {
let project_path = repository.work_directory.unrelativize(repo_path);
@@ -5281,6 +5320,9 @@ impl BackgroundScanner {
},
&(),
);
+ if status.is_conflicted() {
+ repository.current_merge_conflicts.insert(repo_path.clone());
+ }
if let Some(path) = project_path {
changed_paths.push(path);
@@ -5294,6 +5336,13 @@ impl BackgroundScanner {
.repositories
.insert_or_replace(repository, &());
+ state
+ .snapshot
+ .git_repositories
+ .update(&job.local_repository.work_directory_id, |entry| {
+ entry.current_merge_head_shas = merge_head_shas;
+ });
+
util::extend_sorted(
&mut state.changed_paths,
changed_paths,
@@ -5965,7 +6014,6 @@ impl<'a> Iterator for Traversal<'a> {
enum PathTarget<'a> {
Path(&'a Path),
Successor(&'a Path),
- Contains(&'a Path),
}
impl<'a> PathTarget<'a> {
@@ -5979,13 +6027,6 @@ impl<'a> PathTarget<'a> {
Ordering::Equal
}
}
- PathTarget::Contains(path) => {
- if path.starts_with(other) {
- Ordering::Equal
- } else {
- Ordering::Greater
- }
- }
}
}
}
@@ -2156,7 +2156,13 @@ const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus {
second_head: UnmergedStatusCode::Updated,
});
+// NOTE:
+// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
+// a directory which some program has already open.
+// This is a limitation of the Windows.
+// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
#[gpui::test]
+#[cfg_attr(target_os = "windows", ignore)]
async fn test_rename_work_directory(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
@@ -2184,7 +2190,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
let repo = git_init(&root_path.join("projects/project1"));
git_add("a", &repo);
git_commit("init", &repo);
- std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
+ std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
@@ -2209,7 +2215,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
root_path.join("projects/project1"),
root_path.join("projects/project2"),
)
- .ok();
+ .unwrap();
tree.flush_fs_events(cx).await;
cx.read(|cx| {
@@ -2335,7 +2341,13 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
});
}
+// NOTE:
+// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
+// a directory which some program has already open.
+// This is a limitation of the Windows.
+// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
#[gpui::test]
+#[cfg_attr(target_os = "windows", ignore)]
async fn test_file_status(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
@@ -2,7 +2,7 @@
description = "The fast, collaborative code editor."
edition.workspace = true
name = "zed"
-version = "0.173.0"
+version = "0.174.0"
publish.workspace = true
license = "GPL-3.0-or-later"
authors = ["Zed Team <hi@zed.dev>"]
@@ -188,9 +188,12 @@ fn main() {
let session_id = Uuid::new_v4().to_string();
let session = app.background_executor().block(Session::new());
let app_version = AppVersion::init(env!("CARGO_PKG_VERSION"));
+ let app_commit_sha =
+ option_env!("ZED_COMMIT_SHA").map(|commit_sha| AppCommitSha(commit_sha.to_string()));
reliability::init_panic_hook(
app_version,
+ app_commit_sha.clone(),
system_id.as_ref().map(|id| id.to_string()),
installation_id.as_ref().map(|id| id.to_string()),
session_id.clone(),
@@ -281,8 +284,8 @@ fn main() {
app.run(move |cx| {
release_channel::init(app_version, cx);
gpui_tokio::init(cx);
- if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") {
- AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx);
+ if let Some(app_commit_sha) = app_commit_sha {
+ AppCommitSha::set_global(app_commit_sha, cx);
}
settings::init(cx);
handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed);
@@ -8,7 +8,7 @@ use gpui::{App, SemanticVersion};
use http_client::{self, HttpClient, HttpClientWithUrl, HttpRequestExt, Method};
use paths::{crashes_dir, crashes_retired_dir};
use project::Project;
-use release_channel::{ReleaseChannel, RELEASE_CHANNEL};
+use release_channel::{AppCommitSha, ReleaseChannel, RELEASE_CHANNEL};
use settings::Settings;
use smol::stream::StreamExt;
use std::{
@@ -25,6 +25,7 @@ static PANIC_COUNT: AtomicU32 = AtomicU32::new(0);
pub fn init_panic_hook(
app_version: SemanticVersion,
+ app_commit_sha: Option<AppCommitSha>,
system_id: Option<String>,
installation_id: Option<String>,
session_id: String,
@@ -54,12 +55,22 @@ pub fn init_panic_hook(
let location = info.location().unwrap();
let backtrace = Backtrace::new();
eprintln!(
- "Thread {:?} panicked with {:?} at {}:{}:{}\n{:?}",
+ "Thread {:?} panicked with {:?} at {}:{}:{}\n{}{:?}",
thread_name,
payload,
location.file(),
location.line(),
location.column(),
+ match app_commit_sha.as_ref() {
+ Some(commit_sha) => format!(
+ "https://github.com/zed-industries/zed/blob/{}/src/{}#L{} \
+ (may not be uploaded, line may be incorrect if files modified)\n",
+ commit_sha.0,
+ location.file(),
+ location.line()
+ ),
+ None => "".to_string(),
+ },
backtrace,
);
std::process::exit(-1);
@@ -103,6 +114,7 @@ pub fn init_panic_hook(
line: location.line(),
}),
app_version: app_version.to_string(),
+ app_commit_sha: app_commit_sha.as_ref().map(|sha| sha.0.clone()),
release_channel: RELEASE_CHANNEL.dev_name().into(),
target: env!("TARGET").to_owned().into(),
os_name: telemetry::os_name(),
@@ -20,12 +20,14 @@ use command_palette_hooks::CommandPaletteFilter;
use editor::ProposedChangesEditorToolbar;
use editor::{scroll::Autoscroll, Editor, MultiBuffer};
use feature_flags::{FeatureFlagAppExt, FeatureFlagViewExt, GitUiFeatureFlag};
+use fs::Fs;
use futures::{channel::mpsc, select_biased, StreamExt};
use gpui::{
actions, point, px, Action, App, AppContext as _, AsyncApp, Context, DismissEvent, Element,
Entity, Focusable, KeyBinding, MenuItem, ParentElement, PathPromptOptions, PromptLevel,
ReadGlobal, SharedString, Styled, Task, TitlebarOptions, Window, WindowKind, WindowOptions,
};
+use image_viewer::ImageInfo;
pub use open_listener::*;
use outline_panel::OutlinePanel;
use paths::{local_settings_file_relative_path, local_tasks_file_relative_path};
@@ -49,7 +51,7 @@ use std::time::Duration;
use std::{borrow::Cow, ops::Deref, path::Path, sync::Arc};
use terminal_view::terminal_panel::{self, TerminalPanel};
use theme::{ActiveTheme, ThemeSettings};
-use ui::PopoverMenuHandle;
+use ui::{prelude::*, PopoverMenuHandle};
use util::markdown::MarkdownString;
use util::{asset_str, ResultExt};
use uuid::Uuid;
@@ -201,6 +203,7 @@ pub fn initialize_workspace(
let active_toolchain_language =
cx.new(|cx| toolchain_selector::ActiveToolchain::new(workspace, window, cx));
let vim_mode_indicator = cx.new(|cx| vim::ModeIndicator::new(window, cx));
+ let image_info = cx.new(|_cx| ImageInfo::new(workspace));
let cursor_position =
cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace));
workspace.status_bar().update(cx, |status_bar, cx| {
@@ -211,10 +214,9 @@ pub fn initialize_workspace(
status_bar.add_right_item(active_toolchain_language, window, cx);
status_bar.add_right_item(vim_mode_indicator, window, cx);
status_bar.add_right_item(cursor_position, window, cx);
+ status_bar.add_right_item(image_info, window, cx);
});
- auto_update_ui::notify_of_any_new_update(window, cx);
-
let handle = cx.entity().downgrade();
window.on_window_should_close(cx, move |window, cx| {
handle
@@ -879,7 +881,12 @@ fn about(
) {
let release_channel = ReleaseChannel::global(cx).display_name();
let version = env!("CARGO_PKG_VERSION");
- let message = format!("{release_channel} {version}");
+ let debug = if cfg!(debug_assertions) {
+ "(debug)"
+ } else {
+ ""
+ };
+ let message = format!("{release_channel} {version} {debug}");
let detail = AppCommitSha::try_global(cx).map(|sha| sha.0.clone());
let prompt = window.prompt(PromptLevel::Info, &message, detail.as_deref(), &["OK"], cx);
@@ -1143,18 +1150,34 @@ pub fn handle_keymap_file_changes(
cx.update(|cx| {
let load_result = KeymapFile::load(&user_keymap_content, cx);
match load_result {
- KeymapFileLoadResult::Success { key_bindings } => {
+ KeymapFileLoadResult::Success {
+ key_bindings,
+ keymap_file,
+ } => {
reload_keymaps(cx, key_bindings);
dismiss_app_notification(¬ification_id, cx);
+ show_keymap_migration_notification_if_needed(
+ keymap_file,
+ notification_id.clone(),
+ cx,
+ );
}
KeymapFileLoadResult::SomeFailedToLoad {
key_bindings,
+ keymap_file,
error_message,
} => {
if !key_bindings.is_empty() {
reload_keymaps(cx, key_bindings);
}
- show_keymap_file_load_error(notification_id.clone(), error_message, cx)
+ dismiss_app_notification(¬ification_id, cx);
+ if !show_keymap_migration_notification_if_needed(
+ keymap_file,
+ notification_id.clone(),
+ cx,
+ ) {
+ show_keymap_file_load_error(notification_id.clone(), error_message, cx);
+ }
}
KeymapFileLoadResult::JsonParseFailure { error } => {
show_keymap_file_json_error(notification_id.clone(), &error, cx)
@@ -1177,8 +1200,8 @@ fn show_keymap_file_json_error(
show_app_notification(notification_id, cx, move |cx| {
cx.new(|_cx| {
MessageNotification::new(message.clone())
- .with_click_message("Open keymap file")
- .on_click(|window, cx| {
+ .primary_message("Open Keymap File")
+ .primary_on_click(|window, cx| {
window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx);
cx.emit(DismissEvent);
})
@@ -1186,6 +1209,61 @@ fn show_keymap_file_json_error(
});
}
+fn show_keymap_migration_notification_if_needed(
+ keymap_file: KeymapFile,
+ notification_id: NotificationId,
+ cx: &mut App,
+) -> bool {
+ if !KeymapFile::should_migrate_keymap(keymap_file) {
+ return false;
+ }
+ show_app_notification(notification_id, cx, move |cx| {
+ cx.new(move |_cx| {
+ let message = "A newer version of Zed has simplified several keymaps. Your existing keymaps may be deprecated. You can migrate them by clicking below. A backup will be created in your home directory.";
+ let button_text = "Backup and Migrate Keymap";
+ MessageNotification::new_from_builder(move |_, _| {
+ gpui::div().text_xs().child(message).into_any()
+ })
+ .primary_message(button_text)
+ .primary_on_click(move |_, cx| {
+ let fs = <dyn Fs>::global(cx);
+ cx.spawn(move |weak_notification, mut cx| async move {
+ KeymapFile::migrate_keymap(fs).await.ok();
+ weak_notification.update(&mut cx, |_, cx| {
+ cx.emit(DismissEvent);
+ }).ok();
+ }).detach();
+ })
+ })
+ });
+ return true;
+}
+
+fn show_settings_migration_notification_if_needed(
+ notification_id: NotificationId,
+ settings: serde_json::Value,
+ cx: &mut App,
+) {
+ if !SettingsStore::should_migrate_settings(&settings) {
+ return;
+ }
+ show_app_notification(notification_id, cx, move |cx| {
+ cx.new(move |_cx| {
+ let message = "A newer version of Zed has updated some settings. Your existing settings may be deprecated. You can migrate them by clicking below. A backup will be created in your home directory.";
+ let button_text = "Backup and Migrate Settings";
+ MessageNotification::new_from_builder(move |_, _| {
+ gpui::div().text_xs().child(message).into_any()
+ })
+ .primary_message(button_text)
+ .primary_on_click(move |_, cx| {
+ let fs = <dyn Fs>::global(cx);
+ cx.update_global(|store: &mut SettingsStore, _| store.migrate_settings(fs));
+ cx.emit(DismissEvent);
+ })
+ })
+ });
+}
+
fn show_keymap_file_load_error(
notification_id: NotificationId,
markdown_error_message: MarkdownString,
@@ -1220,8 +1298,8 @@ fn show_keymap_file_load_error(
))
.into_any()
})
- .with_click_message("Open keymap file")
- .on_click(|window, cx| {
+ .primary_message("Open Keymap File")
+ .primary_on_click(|window, cx| {
window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx);
cx.emit(DismissEvent);
})
@@ -1258,12 +1336,12 @@ pub fn load_default_keymap(cx: &mut App) {
}
}
-pub fn handle_settings_changed(error: Option<anyhow::Error>, cx: &mut App) {
+pub fn handle_settings_changed(result: Result<serde_json::Value, anyhow::Error>, cx: &mut App) {
struct SettingsParseErrorNotification;
let id = NotificationId::unique::<SettingsParseErrorNotification>();
- match error {
- Some(error) => {
+ match result {
+ Err(error) => {
if let Some(InvalidSettingsError::LocalSettings { .. }) =
error.downcast_ref::<InvalidSettingsError>()
{
@@ -1273,15 +1351,19 @@ pub fn handle_settings_changed(error: Option<anyhow::Error>, cx: &mut App) {
show_app_notification(id, cx, move |cx| {
cx.new(|_cx| {
MessageNotification::new(format!("Invalid user settings file\n{error}"))
- .with_click_message("Open settings file")
- .on_click(|window, cx| {
+ .primary_message("Open Settings File")
+ .primary_icon(IconName::Settings)
+ .primary_on_click(|window, cx| {
window.dispatch_action(zed_actions::OpenSettings.boxed_clone(), cx);
cx.emit(DismissEvent);
})
})
});
}
- None => dismiss_app_notification(&id, cx),
+ Ok(settings) => {
+ dismiss_app_notification(&id, cx);
+ show_settings_migration_notification_if_needed(id, settings, cx);
+ }
}
}
@@ -1569,6 +1651,7 @@ mod tests {
time::Duration,
};
use theme::{ThemeRegistry, ThemeSettings};
+ use util::{path, separator};
use workspace::{
item::{Item, ItemHandle},
open_new, open_paths, pane, NewFile, OpenVisible, SaveIntent, SplitDirection,
@@ -1737,12 +1820,15 @@ mod tests {
app_state
.fs
.as_fake()
- .insert_tree("/root", json!({"a": "hey", "b": "", "dir": {"c": "f"}}))
+ .insert_tree(
+ path!("/root"),
+ json!({"a": "hey", "b": "", "dir": {"c": "f"}}),
+ )
.await;
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/dir")],
+ &[PathBuf::from(path!("/root/dir"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -1754,7 +1840,7 @@ mod tests {
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/a")],
+ &[PathBuf::from(path!("/root/a"))],
app_state.clone(),
workspace::OpenOptions {
open_new_workspace: Some(false),
@@ -1769,7 +1855,7 @@ mod tests {
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/dir/c")],
+ &[PathBuf::from(path!("/root/dir/c"))],
app_state.clone(),
workspace::OpenOptions {
open_new_workspace: Some(true),
@@ -1789,12 +1875,15 @@ mod tests {
app_state
.fs
.as_fake()
- .insert_tree("/root", json!({"dir1": {"a": "b"}, "dir2": {"c": "d"}}))
+ .insert_tree(
+ path!("/root"),
+ json!({"dir1": {"a": "b"}, "dir2": {"c": "d"}}),
+ )
.await;
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/dir1/a")],
+ &[PathBuf::from(path!("/root/dir1/a"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -1807,7 +1896,7 @@ mod tests {
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/dir2/c")],
+ &[PathBuf::from(path!("/root/dir2/c"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -1819,7 +1908,7 @@ mod tests {
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/dir2")],
+ &[PathBuf::from(path!("/root/dir2"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -1835,7 +1924,7 @@ mod tests {
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/dir2/c")],
+ &[PathBuf::from(path!("/root/dir2/c"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -1864,12 +1953,12 @@ mod tests {
app_state
.fs
.as_fake()
- .insert_tree("/root", json!({"a": "hey"}))
+ .insert_tree(path!("/root"), json!({"a": "hey"}))
.await;
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/a")],
+ &[PathBuf::from(path!("/root/a"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -1951,7 +2040,7 @@ mod tests {
// Opening the buffer again doesn't impact the window's edited state.
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/a")],
+ &[PathBuf::from(path!("/root/a"))],
app_state,
workspace::OpenOptions::default(),
cx,
@@ -2013,12 +2102,12 @@ mod tests {
app_state
.fs
.as_fake()
- .insert_tree("/root", json!({"a": "hey"}))
+ .insert_tree(path!("/root"), json!({"a": "hey"}))
.await;
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/a")],
+ &[PathBuf::from(path!("/root/a"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -2070,7 +2159,7 @@ mod tests {
// When we now reopen the window, the edited state and the edited buffer are back
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/root/a")],
+ &[PathBuf::from(path!("/root/a"))],
app_state.clone(),
workspace::OpenOptions::default(),
cx,
@@ -2166,7 +2255,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"a": {
"file1": "contents 1",
@@ -2177,7 +2266,7 @@ mod tests {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
project.update(cx, |project, _cx| {
project.languages().add(markdown_language())
});
@@ -2298,7 +2387,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/",
+ path!("/"),
json!({
"dir1": {
"a.txt": ""
@@ -2316,7 +2405,7 @@ mod tests {
cx.update(|cx| {
open_paths(
- &[PathBuf::from("/dir1/")],
+ &[PathBuf::from(path!("/dir1/"))],
app_state,
workspace::OpenOptions::default(),
cx,
@@ -2363,7 +2452,7 @@ mod tests {
window
.update(cx, |workspace, window, cx| {
workspace.open_paths(
- vec!["/dir1/a.txt".into()],
+ vec![path!("/dir1/a.txt").into()],
OpenVisible::All,
None,
window,
@@ -2374,7 +2463,12 @@ mod tests {
.await;
cx.read(|cx| {
let workspace = workspace.read(cx);
- assert_project_panel_selection(workspace, Path::new("/dir1"), Path::new("a.txt"), cx);
+ assert_project_panel_selection(
+ workspace,
+ Path::new(path!("/dir1")),
+ Path::new("a.txt"),
+ cx,
+ );
assert_eq!(
workspace
.active_pane()
@@ -2393,7 +2487,7 @@ mod tests {
window
.update(cx, |workspace, window, cx| {
workspace.open_paths(
- vec!["/dir2/b.txt".into()],
+ vec![path!("/dir2/b.txt").into()],
OpenVisible::All,
None,
window,
@@ -2404,14 +2498,19 @@ mod tests {
.await;
cx.read(|cx| {
let workspace = workspace.read(cx);
- assert_project_panel_selection(workspace, Path::new("/dir2/b.txt"), Path::new(""), cx);
+ assert_project_panel_selection(
+ workspace,
+ Path::new(path!("/dir2/b.txt")),
+ Path::new(""),
+ cx,
+ );
let worktree_roots = workspace
.worktrees(cx)
.map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
.collect::<HashSet<_>>();
assert_eq!(
worktree_roots,
- vec!["/dir1", "/dir2/b.txt"]
+ vec![path!("/dir1"), path!("/dir2/b.txt")]
.into_iter()
.map(Path::new)
.collect(),
@@ -2434,7 +2533,7 @@ mod tests {
window
.update(cx, |workspace, window, cx| {
workspace.open_paths(
- vec!["/dir3".into(), "/dir3/c.txt".into()],
+ vec![path!("/dir3").into(), path!("/dir3/c.txt").into()],
OpenVisible::All,
None,
window,
@@ -2445,14 +2544,19 @@ mod tests {
.await;
cx.read(|cx| {
let workspace = workspace.read(cx);
- assert_project_panel_selection(workspace, Path::new("/dir3"), Path::new("c.txt"), cx);
+ assert_project_panel_selection(
+ workspace,
+ Path::new(path!("/dir3")),
+ Path::new("c.txt"),
+ cx,
+ );
let worktree_roots = workspace
.worktrees(cx)
.map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
.collect::<HashSet<_>>();
assert_eq!(
worktree_roots,
- vec!["/dir1", "/dir2/b.txt", "/dir3"]
+ vec![path!("/dir1"), path!("/dir2/b.txt"), path!("/dir3")]
.into_iter()
.map(Path::new)
.collect(),
@@ -2474,23 +2578,39 @@ mod tests {
// Ensure opening invisibly a file outside an existing worktree adds a new, invisible worktree.
window
.update(cx, |workspace, window, cx| {
- workspace.open_paths(vec!["/d.txt".into()], OpenVisible::None, None, window, cx)
+ workspace.open_paths(
+ vec![path!("/d.txt").into()],
+ OpenVisible::None,
+ None,
+ window,
+ cx,
+ )
})
.unwrap()
.await;
cx.read(|cx| {
let workspace = workspace.read(cx);
- assert_project_panel_selection(workspace, Path::new("/d.txt"), Path::new(""), cx);
+ assert_project_panel_selection(
+ workspace,
+ Path::new(path!("/d.txt")),
+ Path::new(""),
+ cx,
+ );
let worktree_roots = workspace
.worktrees(cx)
.map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
.collect::<HashSet<_>>();
assert_eq!(
worktree_roots,
- vec!["/dir1", "/dir2/b.txt", "/dir3", "/d.txt"]
- .into_iter()
- .map(Path::new)
- .collect(),
+ vec![
+ path!("/dir1"),
+ path!("/dir2/b.txt"),
+ path!("/dir3"),
+ path!("/d.txt")
+ ]
+ .into_iter()
+ .map(Path::new)
+ .collect(),
);
let visible_worktree_roots = workspace
@@ -2499,7 +2619,7 @@ mod tests {
.collect::<HashSet<_>>();
assert_eq!(
visible_worktree_roots,
- vec!["/dir1", "/dir2/b.txt", "/dir3"]
+ vec![path!("/dir1"), path!("/dir2/b.txt"), path!("/dir3")]
.into_iter()
.map(Path::new)
.collect(),
@@ -2535,7 +2655,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
".gitignore": "ignored_dir\n",
".git": {
@@ -2560,7 +2680,7 @@ mod tests {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
project.update(cx, |project, _cx| {
project.languages().add(markdown_language())
});
@@ -2569,9 +2689,9 @@ mod tests {
let initial_entries = cx.read(|cx| workspace.file_project_paths(cx));
let paths_to_open = [
- Path::new("/root/excluded_dir/file").to_path_buf(),
- Path::new("/root/.git/HEAD").to_path_buf(),
- Path::new("/root/excluded_dir/ignored_subdir").to_path_buf(),
+ PathBuf::from(path!("/root/excluded_dir/file")),
+ PathBuf::from(path!("/root/.git/HEAD")),
+ PathBuf::from(path!("/root/excluded_dir/ignored_subdir")),
];
let (opened_workspace, new_items) = cx
.update(|cx| {
@@ -2616,8 +2736,8 @@ mod tests {
opened_paths,
vec![
None,
- Some(".git/HEAD".to_string()),
- Some("excluded_dir/file".to_string()),
+ Some(separator!(".git/HEAD").to_string()),
+ Some(separator!("excluded_dir/file").to_string()),
],
"Excluded files should get opened, excluded dir should not get opened"
);
@@ -2643,7 +2763,7 @@ mod tests {
opened_buffer_paths.sort();
assert_eq!(
opened_buffer_paths,
- vec![".git/HEAD".to_string(), "excluded_dir/file".to_string()],
+ vec![separator!(".git/HEAD").to_string(), separator!("excluded_dir/file").to_string()],
"Despite not being present in the worktrees, buffers for excluded files are opened and added to the pane"
);
});
@@ -2655,10 +2775,10 @@ mod tests {
app_state
.fs
.as_fake()
- .insert_tree("/root", json!({ "a.txt": "" }))
+ .insert_tree(path!("/root"), json!({ "a.txt": "" }))
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
project.update(cx, |project, _cx| {
project.languages().add(markdown_language())
});
@@ -2669,7 +2789,7 @@ mod tests {
window
.update(cx, |workspace, window, cx| {
workspace.open_paths(
- vec![PathBuf::from("/root/a.txt")],
+ vec![PathBuf::from(path!("/root/a.txt"))],
OpenVisible::All,
None,
window,
@@ -2693,7 +2813,7 @@ mod tests {
app_state
.fs
.as_fake()
- .insert_file("/root/a.txt", b"changed".to_vec())
+ .insert_file(path!("/root/a.txt"), b"changed".to_vec())
.await;
cx.run_until_parked();
@@ -2721,9 +2841,13 @@ mod tests {
#[gpui::test]
async fn test_open_and_save_new_file(cx: &mut TestAppContext) {
let app_state = init_test(cx);
- app_state.fs.create_dir(Path::new("/root")).await.unwrap();
+ app_state
+ .fs
+ .create_dir(Path::new(path!("/root")))
+ .await
+ .unwrap();
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
project.update(cx, |project, _| {
project.languages().add(markdown_language());
project.languages().add(rust_lang());
@@ -2766,7 +2890,7 @@ mod tests {
.unwrap();
cx.background_executor.run_until_parked();
cx.simulate_new_path_selection(|parent_dir| {
- assert_eq!(parent_dir, Path::new("/root"));
+ assert_eq!(parent_dir, Path::new(path!("/root")));
Some(parent_dir.join("the-new-name.rs"))
});
cx.read(|cx| {
@@ -2922,7 +3046,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"a": {
"file1": "contents 1",
@@ -2933,7 +3057,7 @@ mod tests {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
project.update(cx, |project, _cx| {
project.languages().add(markdown_language())
});
@@ -2979,7 +3103,10 @@ mod tests {
});
cx.dispatch_action(
window.into(),
- workspace::CloseActiveItem { save_intent: None },
+ workspace::CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
);
cx.background_executor.run_until_parked();
@@ -2992,7 +3119,10 @@ mod tests {
cx.dispatch_action(
window.into(),
- workspace::CloseActiveItem { save_intent: None },
+ workspace::CloseActiveItem {
+ save_intent: None,
+ close_pinned: false,
+ },
);
cx.background_executor.run_until_parked();
cx.simulate_prompt_answer(1);
@@ -3020,7 +3150,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"a": {
"file1": "contents 1\n".repeat(20),
@@ -3031,7 +3161,7 @@ mod tests {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
project.update(cx, |project, _cx| {
project.languages().add(markdown_language())
});
@@ -3262,7 +3392,7 @@ mod tests {
.unwrap();
app_state
.fs
- .remove_file(Path::new("/root/a/file2"), Default::default())
+ .remove_file(Path::new(path!("/root/a/file2")), Default::default())
.await
.unwrap();
cx.background_executor.run_until_parked();
@@ -3403,7 +3533,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"a": {
"file1": "",
@@ -3415,7 +3545,7 @@ mod tests {
)
.await;
- let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
project.update(cx, |project, _cx| {
project.languages().add(markdown_language())
});
@@ -3881,24 +4011,28 @@ mod tests {
"vim::FindCommand"
| "vim::Literal"
| "vim::ResizePane"
- | "vim::SwitchMode"
- | "vim::PushOperator"
+ | "vim::PushObject"
+ | "vim::PushFindForward"
+ | "vim::PushFindBackward"
+ | "vim::PushSneak"
+ | "vim::PushSneakBackward"
+ | "vim::PushChangeSurrounds"
+ | "vim::PushJump"
+ | "vim::PushDigraph"
+ | "vim::PushLiteral"
| "vim::Number"
| "vim::SelectRegister"
| "terminal::SendText"
| "terminal::SendKeystroke"
| "app_menu::OpenApplicationMenu"
- | "app_menu::NavigateApplicationMenuInDirection"
| "picker::ConfirmInput"
| "editor::HandleInput"
| "editor::FoldAtLevel"
| "pane::ActivateItem"
| "workspace::ActivatePane"
- | "workspace::ActivatePaneInDirection"
| "workspace::MoveItemToPane"
| "workspace::MoveItemToPaneInDirection"
| "workspace::OpenTerminal"
- | "workspace::SwapPaneInDirection"
| "workspace::SendKeystrokes"
| "zed::OpenBrowser"
| "zed::OpenZedUrl" => {}
@@ -4012,6 +4146,7 @@ mod tests {
app_state.client.http_client().clone(),
cx,
);
+ image_viewer::init(cx);
language_model::init(cx);
language_models::init(
app_state.user_store.clone(),
@@ -76,7 +76,10 @@ pub fn app_menus() -> Vec<Menu> {
MenuItem::action("Save All", workspace::SaveAll { save_intent: None }),
MenuItem::action(
"Close Editor",
- workspace::CloseActiveItem { save_intent: None },
+ workspace::CloseActiveItem {
+ save_intent: None,
+ close_pinned: true,
+ },
),
MenuItem::action("Close Window", workspace::CloseWindow),
],
@@ -4,7 +4,7 @@ use copilot::{Copilot, CopilotCompletionProvider};
use editor::{Editor, EditorMode};
use feature_flags::{FeatureFlagAppExt, PredictEditsFeatureFlag};
use gpui::{AnyWindowHandle, App, AppContext, Context, Entity, WeakEntity};
-use language::language_settings::{all_language_settings, InlineCompletionProvider};
+use language::language_settings::{all_language_settings, EditPredictionProvider};
use settings::SettingsStore;
use std::{cell::RefCell, rc::Rc, sync::Arc};
use supermaven::{Supermaven, SupermavenCompletionProvider};
@@ -41,8 +41,8 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
editors
.borrow_mut()
.insert(editor_handle, window.window_handle());
- let provider = all_language_settings(None, cx).inline_completions.provider;
- assign_inline_completion_provider(
+ let provider = all_language_settings(None, cx).edit_predictions.provider;
+ assign_edit_prediction_provider(
editor,
provider,
&client,
@@ -54,11 +54,11 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
})
.detach();
- let mut provider = all_language_settings(None, cx).inline_completions.provider;
+ let mut provider = all_language_settings(None, cx).edit_predictions.provider;
for (editor, window) in editors.borrow().iter() {
_ = window.update(cx, |_window, window, cx| {
_ = editor.update(cx, |editor, cx| {
- assign_inline_completion_provider(
+ assign_edit_prediction_provider(
editor,
provider,
&client,
@@ -79,8 +79,8 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
let client = client.clone();
let user_store = user_store.clone();
move |active, cx| {
- let provider = all_language_settings(None, cx).inline_completions.provider;
- assign_inline_completion_providers(&editors, provider, &client, user_store.clone(), cx);
+ let provider = all_language_settings(None, cx).edit_predictions.provider;
+ assign_edit_prediction_providers(&editors, provider, &client, user_store.clone(), cx);
if active && !cx.is_action_available(&zeta::ClearHistory) {
cx.on_action(clear_zeta_edit_history);
}
@@ -93,10 +93,23 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
let client = client.clone();
let user_store = user_store.clone();
move |cx| {
- let new_provider = all_language_settings(None, cx).inline_completions.provider;
+ let new_provider = all_language_settings(None, cx).edit_predictions.provider;
+
if new_provider != provider {
+ let tos_accepted = user_store
+ .read(cx)
+ .current_user_has_accepted_terms()
+ .unwrap_or(false);
+
+ telemetry::event!(
+ "Edit Prediction Provider Changed",
+ from = provider,
+ to = new_provider,
+ zed_ai_tos_accepted = tos_accepted,
+ );
+
provider = new_provider;
- assign_inline_completion_providers(
+ assign_edit_prediction_providers(
&editors,
provider,
&client,
@@ -104,13 +117,9 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
cx,
);
- if !user_store
- .read(cx)
- .current_user_has_accepted_terms()
- .unwrap_or(false)
- {
+ if !tos_accepted {
match provider {
- InlineCompletionProvider::Zed => {
+ EditPredictionProvider::Zed => {
let Some(window) = cx.active_window() else {
return;
};
@@ -124,9 +133,9 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
})
.ok();
}
- InlineCompletionProvider::None
- | InlineCompletionProvider::Copilot
- | InlineCompletionProvider::Supermaven => {}
+ EditPredictionProvider::None
+ | EditPredictionProvider::Copilot
+ | EditPredictionProvider::Supermaven => {}
}
}
}
@@ -141,9 +150,9 @@ fn clear_zeta_edit_history(_: &zeta::ClearHistory, cx: &mut App) {
}
}
-fn assign_inline_completion_providers(
+fn assign_edit_prediction_providers(
editors: &Rc<RefCell<HashMap<WeakEntity<Editor>, AnyWindowHandle>>>,
- provider: InlineCompletionProvider,
+ provider: EditPredictionProvider,
client: &Arc<Client>,
user_store: Entity<UserStore>,
cx: &mut App,
@@ -151,7 +160,7 @@ fn assign_inline_completion_providers(
for (editor, window) in editors.borrow().iter() {
_ = window.update(cx, |_window, window, cx| {
_ = editor.update(cx, |editor, cx| {
- assign_inline_completion_provider(
+ assign_edit_prediction_provider(
editor,
provider,
&client,
@@ -178,7 +187,7 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &mut Context<Ed
editor
.register_action(cx.listener(
|editor, _: &copilot::NextSuggestion, window: &mut Window, cx: &mut Context<Editor>| {
- editor.next_inline_completion(&Default::default(), window, cx);
+ editor.next_edit_prediction(&Default::default(), window, cx);
},
))
.detach();
@@ -188,7 +197,7 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &mut Context<Ed
_: &copilot::PreviousSuggestion,
window: &mut Window,
cx: &mut Context<Editor>| {
- editor.previous_inline_completion(&Default::default(), window, cx);
+ editor.previous_edit_prediction(&Default::default(), window, cx);
},
))
.detach();
@@ -204,9 +213,9 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &mut Context<Ed
.detach();
}
-fn assign_inline_completion_provider(
+fn assign_edit_prediction_provider(
editor: &mut Editor,
- provider: InlineCompletionProvider,
+ provider: EditPredictionProvider,
client: &Arc<Client>,
user_store: Entity<UserStore>,
window: &mut Window,
@@ -216,8 +225,8 @@ fn assign_inline_completion_provider(
let singleton_buffer = editor.buffer().read(cx).as_singleton();
match provider {
- InlineCompletionProvider::None => {}
- InlineCompletionProvider::Copilot => {
+ EditPredictionProvider::None => {}
+ EditPredictionProvider::Copilot => {
if let Some(copilot) = Copilot::global(cx) {
if let Some(buffer) = singleton_buffer {
if buffer.read(cx).file().is_some() {
@@ -227,16 +236,16 @@ fn assign_inline_completion_provider(
}
}
let provider = cx.new(|_| CopilotCompletionProvider::new(copilot));
- editor.set_inline_completion_provider(Some(provider), window, cx);
+ editor.set_edit_prediction_provider(Some(provider), window, cx);
}
}
- InlineCompletionProvider::Supermaven => {
+ EditPredictionProvider::Supermaven => {
if let Some(supermaven) = Supermaven::global(cx) {
let provider = cx.new(|_| SupermavenCompletionProvider::new(supermaven));
- editor.set_inline_completion_provider(Some(provider), window, cx);
+ editor.set_edit_prediction_provider(Some(provider), window, cx);
}
}
- InlineCompletionProvider::Zed => {
+ EditPredictionProvider::Zed => {
if cx.has_flag::<PredictEditsFeatureFlag>()
|| (cfg!(debug_assertions) && client.status().borrow().is_connected())
{
@@ -271,7 +280,7 @@ fn assign_inline_completion_provider(
let provider =
cx.new(|_| zeta::ZetaInlineCompletionProvider::new(zeta, data_collection));
- editor.set_inline_completion_provider(Some(provider), window, cx);
+ editor.set_edit_prediction_provider(Some(provider), window, cx);
}
}
}
@@ -535,6 +535,7 @@ mod tests {
use editor::Editor;
use gpui::TestAppContext;
use serde_json::json;
+ use util::path;
use workspace::{AppState, Workspace};
use crate::zed::{open_listener::open_local_workspace, tests::init_test};
@@ -547,7 +548,7 @@ mod tests {
.fs
.as_fake()
.insert_tree(
- "/root",
+ path!("/root"),
json!({
"dir1": {
"file1.txt": "content1",
@@ -560,7 +561,7 @@ mod tests {
assert_eq!(cx.windows().len(), 0);
// First open the workspace directory
- open_workspace_file("/root/dir1", None, app_state.clone(), cx).await;
+ open_workspace_file(path!("/root/dir1"), None, app_state.clone(), cx).await;
assert_eq!(cx.windows().len(), 1);
let workspace = cx.windows()[0].downcast::<Workspace>().unwrap();
@@ -571,7 +572,7 @@ mod tests {
.unwrap();
// Now open a file inside that workspace
- open_workspace_file("/root/dir1/file1.txt", None, app_state.clone(), cx).await;
+ open_workspace_file(path!("/root/dir1/file1.txt"), None, app_state.clone(), cx).await;
assert_eq!(cx.windows().len(), 1);
workspace
@@ -581,7 +582,13 @@ mod tests {
.unwrap();
// Now open a file inside that workspace, but tell Zed to open a new window
- open_workspace_file("/root/dir1/file1.txt", Some(true), app_state.clone(), cx).await;
+ open_workspace_file(
+ path!("/root/dir1/file1.txt"),
+ Some(true),
+ app_state.clone(),
+ cx,
+ )
+ .await;
assert_eq!(cx.windows().len(), 2);
@@ -599,12 +606,16 @@ mod tests {
async fn test_open_workspace_with_nonexistent_files(cx: &mut TestAppContext) {
let app_state = init_test(cx);
- app_state.fs.as_fake().insert_tree("/root", json!({})).await;
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(path!("/root"), json!({}))
+ .await;
assert_eq!(cx.windows().len(), 0);
// Test case 1: Open a single file that does not exist yet
- open_workspace_file("/root/file5.txt", None, app_state.clone(), cx).await;
+ open_workspace_file(path!("/root/file5.txt"), None, app_state.clone(), cx).await;
assert_eq!(cx.windows().len(), 1);
let workspace_1 = cx.windows()[0].downcast::<Workspace>().unwrap();
@@ -616,7 +627,7 @@ mod tests {
// Test case 2: Open a single file that does not exist yet,
// but tell Zed to add it to the current workspace
- open_workspace_file("/root/file6.txt", Some(false), app_state.clone(), cx).await;
+ open_workspace_file(path!("/root/file6.txt"), Some(false), app_state.clone(), cx).await;
assert_eq!(cx.windows().len(), 1);
workspace_1
@@ -628,7 +639,7 @@ mod tests {
// Test case 3: Open a single file that does not exist yet,
// but tell Zed to NOT add it to the current workspace
- open_workspace_file("/root/file7.txt", Some(true), app_state.clone(), cx).await;
+ open_workspace_file(path!("/root/file7.txt"), Some(true), app_state.clone(), cx).await;
assert_eq!(cx.windows().len(), 2);
let workspace_2 = cx.windows()[1].downcast::<Workspace>().unwrap();
@@ -16,8 +16,8 @@ use gpui::{
use search::{buffer_search, BufferSearchBar};
use settings::{Settings, SettingsStore};
use ui::{
- prelude::*, ButtonStyle, ContextMenu, IconButton, IconButtonShape, IconName, IconSize,
- PopoverMenu, PopoverMenuHandle, Tooltip,
+ prelude::*, ButtonStyle, ContextMenu, ContextMenuEntry, IconButton, IconButtonShape, IconName,
+ IconSize, PopoverMenu, PopoverMenuHandle, Tooltip,
};
use vim_mode_setting::VimModeSetting;
use workspace::{
@@ -94,7 +94,8 @@ impl Render for QuickActionBar {
git_blame_inline_enabled,
show_git_blame_gutter,
auto_signature_help_enabled,
- inline_completions_enabled,
+ show_inline_completions,
+ inline_completion_enabled,
) = {
let editor = editor.read(cx);
let selection_menu_enabled = editor.selection_menu_enabled(cx);
@@ -103,7 +104,8 @@ impl Render for QuickActionBar {
let git_blame_inline_enabled = editor.git_blame_inline_enabled();
let show_git_blame_gutter = editor.show_git_blame_gutter();
let auto_signature_help_enabled = editor.auto_signature_help_enabled(cx);
- let inline_completions_enabled = editor.inline_completions_enabled(cx);
+ let show_inline_completions = editor.should_show_inline_completions(cx);
+ let inline_completion_enabled = editor.inline_completions_enabled(cx);
(
selection_menu_enabled,
@@ -112,7 +114,8 @@ impl Render for QuickActionBar {
git_blame_inline_enabled,
show_git_blame_gutter,
auto_signature_help_enabled,
- inline_completions_enabled,
+ show_inline_completions,
+ inline_completion_enabled,
)
};
@@ -294,26 +297,32 @@ impl Render for QuickActionBar {
},
);
- menu = menu.toggleable_entry(
- "Edit Predictions",
- inline_completions_enabled,
- IconPosition::Start,
- Some(editor::actions::ToggleInlineCompletions.boxed_clone()),
- {
+ let mut inline_completion_entry = ContextMenuEntry::new("Edit Predictions")
+ .toggleable(IconPosition::Start, inline_completion_enabled && show_inline_completions)
+ .disabled(!inline_completion_enabled)
+ .action(Some(
+ editor::actions::ToggleEditPrediction.boxed_clone(),
+ )).handler({
let editor = editor.clone();
move |window, cx| {
editor
.update(cx, |editor, cx| {
editor.toggle_inline_completions(
- &editor::actions::ToggleInlineCompletions,
+ &editor::actions::ToggleEditPrediction,
window,
cx,
);
})
.ok();
}
- },
- );
+ });
+ if !inline_completion_enabled {
+ inline_completion_entry = inline_completion_entry.documentation_aside(|_| {
+ Label::new("You can't toggle edit predictions for this file as it is within the excluded files list.").into_any_element()
+ });
+ }
+
+ menu = menu.item(inline_completion_entry);
menu = menu.separator();
@@ -12,11 +12,13 @@ use serde::{Deserialize, Serialize};
pub fn init() {}
#[derive(Clone, PartialEq, Deserialize, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct OpenBrowser {
pub url: String,
}
#[derive(Clone, PartialEq, Deserialize, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct OpenZedUrl {
pub url: String,
}
@@ -69,6 +71,7 @@ pub mod theme_selector {
use serde::Deserialize;
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)]
+ #[serde(deny_unknown_fields)]
pub struct Toggle {
/// A list of theme names to filter the theme selector down to.
pub themes_filter: Option<Vec<String>>,
@@ -83,6 +86,7 @@ pub mod icon_theme_selector {
use serde::Deserialize;
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)]
+ #[serde(deny_unknown_fields)]
pub struct Toggle {
/// A list of icon theme names to filter the theme selector down to.
pub themes_filter: Option<Vec<String>>,
@@ -99,6 +103,7 @@ pub mod assistant {
actions!(assistant, [ToggleFocus, DeployPromptLibrary]);
#[derive(Clone, Default, Deserialize, PartialEq, JsonSchema)]
+ #[serde(deny_unknown_fields)]
pub struct InlineAssist {
pub prompt: Option<String>,
}
@@ -107,6 +112,7 @@ pub mod assistant {
}
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct OpenRecent {
#[serde(default)]
pub create_new_window: bool,
@@ -154,6 +160,7 @@ impl Spawn {
/// Rerun the last task.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
+#[serde(deny_unknown_fields)]
pub struct Rerun {
/// Controls whether the task context is reevaluated prior to execution of a task.
/// If it is not, environment variables such as ZED_COLUMN, ZED_FILE are gonna be the same as in the last execution of a task
@@ -37,8 +37,8 @@ language_models.workspace = true
log.workspace = true
menu.workspace = true
postage.workspace = true
+project.workspace = true
regex.workspace = true
-rpc.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
@@ -52,6 +52,7 @@ uuid.workspace = true
workspace.workspace = true
worktree.workspace = true
zed_actions.workspace = true
+zed_llm_client.workspace = true
[dev-dependencies]
collections = { workspace = true, features = ["test-support"] }
@@ -4,11 +4,16 @@ use command_palette_hooks::CommandPaletteFilter;
use feature_flags::{
FeatureFlagAppExt as _, PredictEditsFeatureFlag, PredictEditsRateCompletionsFeatureFlag,
};
+use gpui::actions;
+use language::language_settings::{AllLanguageSettings, EditPredictionProvider};
+use settings::update_settings_file;
use ui::App;
use workspace::Workspace;
use crate::{onboarding_modal::ZedPredictModal, RateCompletionModal, RateCompletions};
+actions!(edit_predictions, [ResetOnboarding]);
+
pub fn init(cx: &mut App) {
cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
workspace.register_action(|workspace, _: &RateCompletions, window, cx| {
@@ -31,6 +36,20 @@ pub fn init(cx: &mut App) {
}
},
);
+
+ workspace.register_action(|workspace, _: &ResetOnboarding, _window, cx| {
+ update_settings_file::<AllLanguageSettings>(
+ workspace.app_state().fs.clone(),
+ cx,
+ move |file, _| {
+ file.features
+ .get_or_insert(Default::default())
+ .edit_prediction_provider = Some(EditPredictionProvider::None)
+ },
+ );
+
+ crate::onboarding_banner::clear_dismissed(cx);
+ });
})
.detach();
@@ -1,5 +1,8 @@
use regex::Regex;
+/// The most common license locations, with US and UK English spelling.
+pub const LICENSE_FILES_TO_CHECK: &[&str] = &["LICENSE", "LICENCE", "LICENSE.txt", "LICENCE.txt"];
+
pub fn is_license_eligible_for_data_collection(license: &str) -> bool {
// TODO: Include more licenses later (namely, Apache)
for pattern in [MIT_LICENSE_REGEX, ISC_LICENSE_REGEX] {
@@ -1,14 +1,17 @@
use chrono::Utc;
use feature_flags::{FeatureFlagAppExt as _, PredictEditsFeatureFlag};
use gpui::Subscription;
-use language::language_settings::{all_language_settings, InlineCompletionProvider};
+use language::language_settings::{all_language_settings, EditPredictionProvider};
use settings::SettingsStore;
use ui::{prelude::*, ButtonLike, Tooltip};
use util::ResultExt;
+use crate::onboarding_event;
+
/// Prompts the user to try Zed's Edit Prediction feature
pub struct ZedPredictBanner {
dismissed: bool,
+ provider: EditPredictionProvider,
_subscription: Subscription,
}
@@ -16,43 +19,34 @@ impl ZedPredictBanner {
pub fn new(cx: &mut Context<Self>) -> Self {
Self {
dismissed: get_dismissed(),
+ provider: all_language_settings(None, cx).edit_predictions.provider,
_subscription: cx.observe_global::<SettingsStore>(Self::handle_settings_changed),
}
}
fn should_show(&self, cx: &mut App) -> bool {
- if !cx.has_flag::<PredictEditsFeatureFlag>() || self.dismissed {
- return false;
- }
-
- let provider = all_language_settings(None, cx).inline_completions.provider;
-
- match provider {
- InlineCompletionProvider::None
- | InlineCompletionProvider::Copilot
- | InlineCompletionProvider::Supermaven => true,
- InlineCompletionProvider::Zed => false,
- }
+ cx.has_flag::<PredictEditsFeatureFlag>() && !self.dismissed && !self.provider.is_zed()
}
fn handle_settings_changed(&mut self, cx: &mut Context<Self>) {
- if self.dismissed {
+ let new_provider = all_language_settings(None, cx).edit_predictions.provider;
+
+ if new_provider == self.provider {
return;
}
- let provider = all_language_settings(None, cx).inline_completions.provider;
-
- match provider {
- InlineCompletionProvider::None
- | InlineCompletionProvider::Copilot
- | InlineCompletionProvider::Supermaven => {}
- InlineCompletionProvider::Zed => {
- self.dismiss(cx);
- }
+ if new_provider.is_zed() {
+ self.dismiss(cx);
+ } else {
+ self.dismissed = get_dismissed();
}
+
+ self.provider = new_provider;
+ cx.notify();
}
fn dismiss(&mut self, cx: &mut Context<Self>) {
+ onboarding_event!("Banner Dismissed");
persist_dismissed(cx);
self.dismissed = true;
cx.notify();
@@ -61,14 +55,14 @@ impl ZedPredictBanner {
const DISMISSED_AT_KEY: &str = "zed_predict_banner_dismissed_at";
-pub(crate) fn get_dismissed() -> bool {
+fn get_dismissed() -> bool {
db::kvp::KEY_VALUE_STORE
.read_kvp(DISMISSED_AT_KEY)
.log_err()
.map_or(false, |dismissed| dismissed.is_some())
}
-pub(crate) fn persist_dismissed(cx: &mut App) {
+fn persist_dismissed(cx: &mut App) {
cx.spawn(|_| {
let time = Utc::now().to_rfc3339();
db::kvp::KEY_VALUE_STORE.write_kvp(DISMISSED_AT_KEY.into(), time)
@@ -76,6 +70,11 @@ pub(crate) fn persist_dismissed(cx: &mut App) {
.detach_and_log_err(cx);
}
+pub(crate) fn clear_dismissed(cx: &mut App) {
+ cx.spawn(|_| db::kvp::KEY_VALUE_STORE.delete_kvp(DISMISSED_AT_KEY.into()))
+ .detach_and_log_err(cx);
+}
+
impl Render for ZedPredictBanner {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
if !self.should_show(cx) {
@@ -107,6 +106,7 @@ impl Render for ZedPredictBanner {
),
)
.on_click(|_, window, cx| {
+ onboarding_event!("Banner Clicked");
window.dispatch_action(Box::new(zed_actions::OpenZedPredictOnboarding), cx)
}),
)
@@ -1,6 +1,6 @@
use std::{sync::Arc, time::Duration};
-use crate::{Zeta, ZED_PREDICT_DATA_COLLECTION_CHOICE};
+use crate::{onboarding_event, ZED_PREDICT_DATA_COLLECTION_CHOICE};
use client::{Client, UserStore};
use db::kvp::KEY_VALUE_STORE;
use feature_flags::FeatureFlagAppExt as _;
@@ -9,12 +9,11 @@ use gpui::{
ease_in_out, svg, Animation, AnimationExt as _, ClickEvent, DismissEvent, Entity, EventEmitter,
FocusHandle, Focusable, MouseDownEvent, Render,
};
-use language::language_settings::{AllLanguageSettings, InlineCompletionProvider};
+use language::language_settings::{AllLanguageSettings, EditPredictionProvider};
use settings::{update_settings_file, Settings};
-use ui::{prelude::*, Checkbox, TintColor, Tooltip};
+use ui::{prelude::*, Checkbox, TintColor};
use util::ResultExt;
use workspace::{notifications::NotifyTaskExt, ModalView, Workspace};
-use worktree::Worktree;
/// Introduces user to Zed's Edit Prediction feature and terms of service
pub struct ZedPredictModal {
@@ -26,7 +25,6 @@ pub struct ZedPredictModal {
terms_of_service: bool,
data_collection_expanded: bool,
data_collection_opted_in: bool,
- worktrees: Vec<Entity<Worktree>>,
}
#[derive(PartialEq, Eq)]
@@ -48,8 +46,6 @@ impl ZedPredictModal {
window: &mut Window,
cx: &mut Context<Workspace>,
) {
- let worktrees = workspace.visible_worktrees(cx).collect();
-
workspace.toggle_modal(window, cx, |_window, cx| Self {
user_store,
client,
@@ -59,23 +55,28 @@ impl ZedPredictModal {
terms_of_service: false,
data_collection_expanded: false,
data_collection_opted_in: false,
- worktrees,
});
}
fn view_terms(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
cx.open_url("https://zed.dev/terms-of-service");
cx.notify();
+
+ onboarding_event!("ToS Link Clicked");
}
fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
cx.open_url("https://zed.dev/blog/"); // TODO Add the link when live
cx.notify();
+
+ onboarding_event!("Blog Link clicked");
}
fn inline_completions_doc(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
cx.open_url("https://zed.dev/docs/configuring-zed#inline-completions");
cx.notify();
+
+ onboarding_event!("Docs Link Clicked");
}
fn accept_and_enable(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context<Self>) {
@@ -104,20 +105,18 @@ impl ZedPredictModal {
update_settings_file::<AllLanguageSettings>(this.fs.clone(), cx, move |file, _| {
file.features
.get_or_insert(Default::default())
- .inline_completion_provider = Some(InlineCompletionProvider::Zed);
+ .edit_prediction_provider = Some(EditPredictionProvider::Zed);
});
- if this.worktrees.is_empty() {
- cx.emit(DismissEvent);
- return;
- }
-
- Zeta::register(None, this.client.clone(), this.user_store.clone(), cx);
-
cx.emit(DismissEvent);
})
})
.detach_and_notify_err(window, cx);
+
+ onboarding_event!(
+ "Enable Clicked",
+ data_collection_opted_in = self.data_collection_opted_in,
+ );
}
fn sign_in(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context<Self>) {
@@ -134,12 +133,15 @@ impl ZedPredictModal {
this.update(&mut cx, |this, cx| {
this.sign_in_status = status;
+ onboarding_event!("Signed In");
cx.notify()
})?;
result
})
.detach_and_notify_err(window, cx);
+
+ onboarding_event!("Sign In Clicked");
}
fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
@@ -158,19 +160,30 @@ impl Focusable for ZedPredictModal {
impl ModalView for ZedPredictModal {}
impl Render for ZedPredictModal {
- fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let window_height = window.viewport_size().height;
+ let max_height = window_height - px(200.);
+
let base = v_flex()
- .id("zed predict tos")
+ .id("edit-prediction-onboarding")
.key_context("ZedPredictModal")
+ .relative()
.w(px(440.))
+ .h_full()
+ .max_h(max_height)
.p_4()
- .relative()
.gap_2()
- .overflow_hidden()
+ .when(self.data_collection_expanded, |element| {
+ element.overflow_y_scroll()
+ })
+ .when(!self.data_collection_expanded, |element| {
+ element.overflow_hidden()
+ })
.elevation_3(cx)
.track_focus(&self.focus_handle(cx))
.on_action(cx.listener(Self::cancel))
.on_action(cx.listener(|_, _: &menu::Cancel, _window, cx| {
+ onboarding_event!("Cancelled", trigger = "Action");
cx.emit(DismissEvent);
}))
.on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| {
@@ -181,7 +194,7 @@ impl Render for ZedPredictModal {
.p_1p5()
.absolute()
.top_1()
- .left_1p5()
+ .left_1()
.right_0()
.h(px(200.))
.child(
@@ -244,7 +257,7 @@ impl Render for ZedPredictModal {
v_flex()
.gap_2()
.items_center()
- .pr_4()
+ .pr_2p5()
.child(tab(0).ml_neg_20())
.child(tab(1))
.child(tab(2).ml_20())
@@ -253,6 +266,7 @@ impl Render for ZedPredictModal {
.child(h_flex().absolute().top_2().right_2().child(
IconButton::new("cancel", IconName::X).on_click(cx.listener(
|_, _: &ClickEvent, _window, cx| {
+ onboarding_event!("Cancelled", trigger = "X click");
cx.emit(DismissEvent);
},
)),
@@ -291,9 +305,14 @@ impl Render for ZedPredictModal {
fn info_item(label_text: impl Into<SharedString>) -> impl Element {
h_flex()
+ .items_start()
.gap_2()
- .child(Icon::new(IconName::Check).size(IconSize::XSmall))
- .child(label_item(label_text))
+ .child(
+ div()
+ .mt_1p5()
+ .child(Icon::new(IconName::Check).size(IconSize::XSmall)),
+ )
+ .child(div().w_full().child(label_item(label_text)))
}
fn multiline_info_item<E1: Into<SharedString>, E2: IntoElement>(
@@ -314,7 +333,7 @@ impl Render for ZedPredictModal {
.label("Read and accept the")
.on_click(cx.listener(move |this, state, _window, cx| {
this.terms_of_service = *state == ToggleState::Selected;
- cx.notify()
+ cx.notify();
})),
)
.child(
@@ -329,6 +348,7 @@ impl Render for ZedPredictModal {
v_flex()
.child(
h_flex()
+ .flex_wrap()
.child(
Checkbox::new(
"training-data-checkbox",
@@ -336,17 +356,6 @@ impl Render for ZedPredictModal {
)
.label("Optionally share training data (OSS-only).")
.fill()
- .when(self.worktrees.is_empty(), |element| {
- element.disabled(true).tooltip(move |window, cx| {
- Tooltip::with_meta(
- "No Project Open",
- None,
- "Open a project to enable this option.",
- window,
- cx,
- )
- })
- })
.on_click(cx.listener(
move |this, state, _window, cx| {
this.data_collection_opted_in =
@@ -355,7 +364,6 @@ impl Render for ZedPredictModal {
},
)),
)
- // TODO: show each worktree if more than 1
.child(
Button::new("learn-more", "Learn More")
.icon(accordion_icons.0)
@@ -364,7 +372,11 @@ impl Render for ZedPredictModal {
.on_click(cx.listener(|this, _, _, cx| {
this.data_collection_expanded =
!this.data_collection_expanded;
- cx.notify()
+ cx.notify();
+
+ if this.data_collection_expanded {
+ onboarding_event!("Data Collection Learn More Clicked");
+ }
})),
),
)
@@ -394,9 +406,11 @@ impl Render for ZedPredictModal {
))
.child(info_item("Toggle it anytime via the status bar menu."))
.child(multiline_info_item(
- "Files that can contain sensitive data, like `.env`, are",
+ "Files with sensitive data, like `.env`, are excluded",
h_flex()
- .child(label_item("excluded by default via the"))
+ .w_full()
+ .flex_wrap()
+ .child(label_item("by default via the"))
.child(
Button::new("doc-link", "disabled_globs").on_click(
cx.listener(Self::inline_completions_doc),
@@ -0,0 +1,9 @@
+#[macro_export]
+macro_rules! onboarding_event {
+ ($name:expr) => {
+ telemetry::event!($name, source = "Edit Prediction Onboarding");
+ };
+ ($name:expr, $($key:ident $(= $value:expr)?),+ $(,)?) => {
+ telemetry::event!($name, source = "Edit Prediction Onboarding", $($key $(= $value)?),+);
+ };
+}
@@ -52,6 +52,8 @@ impl RateCompletionModal {
pub fn toggle(workspace: &mut Workspace, window: &mut Window, cx: &mut Context<Workspace>) {
if let Some(zeta) = Zeta::global(cx) {
workspace.toggle_modal(window, cx, |_window, cx| RateCompletionModal::new(zeta, cx));
+
+ telemetry::event!("Rate Completion Modal Open", source = "Edit Prediction");
}
}
@@ -3,6 +3,7 @@ mod init;
mod license_detection;
mod onboarding_banner;
mod onboarding_modal;
+mod onboarding_telemetry;
mod rate_completion_modal;
pub(crate) use completion_diff_element::*;
@@ -10,6 +11,7 @@ use db::kvp::KEY_VALUE_STORE;
pub use init::*;
use inline_completion::DataCollectionState;
pub use license_detection::is_license_eligible_for_data_collection;
+use license_detection::LICENSE_FILES_TO_CHECK;
pub use onboarding_banner::*;
pub use rate_completion_modal::*;
@@ -24,12 +26,11 @@ use gpui::{
};
use http_client::{HttpClient, Method};
use language::{
- language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot, EditPreview,
- OffsetRangeExt, Point, ToOffset, ToPoint,
+ Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, Point, ToOffset, ToPoint,
};
use language_models::LlmApiToken;
use postage::watch;
-use rpc::{PredictEditsParams, PredictEditsResponse, EXPIRED_LLM_TOKEN_HEADER_NAME};
+use project::Project;
use settings::WorktreeId;
use std::{
borrow::Cow,
@@ -47,6 +48,7 @@ use telemetry_events::InlineCompletionRating;
use util::ResultExt;
use uuid::Uuid;
use worktree::Worktree;
+use zed_llm_client::{PredictEditsBody, PredictEditsResponse, EXPIRED_LLM_TOKEN_HEADER_NAME};
const CURSOR_MARKER: &'static str = "<|user_cursor_is_here|>";
const START_OF_FILE_MARKER: &'static str = "<|start_of_file|>";
@@ -362,17 +364,19 @@ impl Zeta {
pub fn request_completion_impl<F, R>(
&mut self,
+ project: Option<&Entity<Project>>,
buffer: &Entity<Buffer>,
cursor: language::Anchor,
- data_collection_permission: bool,
+ can_collect_data: bool,
cx: &mut Context<Self>,
perform_predict_edits: F,
) -> Task<Result<Option<InlineCompletion>>>
where
- F: FnOnce(Arc<Client>, LlmApiToken, bool, PredictEditsParams) -> R + 'static,
+ F: FnOnce(Arc<Client>, LlmApiToken, bool, PredictEditsBody) -> R + 'static,
R: Future<Output = Result<PredictEditsResponse>> + Send + 'static,
{
let snapshot = self.report_changes_for_buffer(&buffer, cx);
+ let diagnostic_groups = snapshot.diagnostic_groups(None);
let cursor_point = cursor.to_point(&snapshot);
let cursor_offset = cursor_point.to_offset(&snapshot);
let events = self.events.clone();
@@ -386,10 +390,39 @@ impl Zeta {
let is_staff = cx.is_staff();
let buffer = buffer.clone();
+
+ let local_lsp_store =
+ project.and_then(|project| project.read(cx).lsp_store().read(cx).as_local());
+ let diagnostic_groups = if let Some(local_lsp_store) = local_lsp_store {
+ Some(
+ diagnostic_groups
+ .into_iter()
+ .filter_map(|(language_server_id, diagnostic_group)| {
+ let language_server =
+ local_lsp_store.running_language_server_for_id(language_server_id)?;
+
+ Some((
+ language_server.name(),
+ diagnostic_group.resolve::<usize>(&snapshot),
+ ))
+ })
+ .collect::<Vec<_>>(),
+ )
+ } else {
+ None
+ };
+
cx.spawn(|_, cx| async move {
let request_sent_at = Instant::now();
- let (input_events, input_excerpt, excerpt_range, input_outline) = cx
+ struct BackgroundValues {
+ input_events: String,
+ input_excerpt: String,
+ excerpt_range: Range<usize>,
+ input_outline: String,
+ }
+
+ let values = cx
.background_executor()
.spawn({
let snapshot = snapshot.clone();
@@ -418,18 +451,36 @@ impl Zeta {
// is not counted towards TOTAL_BYTE_LIMIT.
let input_outline = prompt_for_outline(&snapshot);
- anyhow::Ok((input_events, input_excerpt, excerpt_range, input_outline))
+ anyhow::Ok(BackgroundValues {
+ input_events,
+ input_excerpt,
+ excerpt_range,
+ input_outline,
+ })
}
})
.await?;
- log::debug!("Events:\n{}\nExcerpt:\n{}", input_events, input_excerpt);
+ log::debug!(
+ "Events:\n{}\nExcerpt:\n{}",
+ values.input_events,
+ values.input_excerpt
+ );
- let body = PredictEditsParams {
- input_events: input_events.clone(),
- input_excerpt: input_excerpt.clone(),
- outline: Some(input_outline.clone()),
- data_collection_permission,
+ let body = PredictEditsBody {
+ input_events: values.input_events.clone(),
+ input_excerpt: values.input_excerpt.clone(),
+ outline: Some(values.input_outline.clone()),
+ can_collect_data,
+ diagnostic_groups: diagnostic_groups.and_then(|diagnostic_groups| {
+ diagnostic_groups
+ .into_iter()
+ .map(|(name, diagnostic_group)| {
+ Ok((name.to_string(), serde_json::to_value(diagnostic_group)?))
+ })
+ .collect::<Result<Vec<_>>>()
+ .log_err()
+ }),
};
let response = perform_predict_edits(client, llm_token, is_staff, body).await?;
@@ -441,12 +492,12 @@ impl Zeta {
output_excerpt,
buffer,
&snapshot,
- excerpt_range,
+ values.excerpt_range,
cursor_offset,
path,
- input_outline,
- input_events,
- input_excerpt,
+ values.input_outline,
+ values.input_events,
+ values.input_excerpt,
request_sent_at,
&cx,
)
@@ -465,11 +516,13 @@ impl Zeta {
and then another
"#};
+ let project = None;
let buffer = cx.new(|cx| Buffer::local(test_buffer_text, cx));
let position = buffer.read(cx).anchor_before(Point::new(1, 0));
let completion_tasks = vec![
self.fake_completion(
+ project,
&buffer,
position,
PredictEditsResponse {
@@ -485,6 +538,7 @@ and then another
cx,
),
self.fake_completion(
+ project,
&buffer,
position,
PredictEditsResponse {
@@ -500,6 +554,7 @@ and then another
cx,
),
self.fake_completion(
+ project,
&buffer,
position,
PredictEditsResponse {
@@ -516,6 +571,7 @@ and then another
cx,
),
self.fake_completion(
+ project,
&buffer,
position,
PredictEditsResponse {
@@ -532,6 +588,7 @@ and then another
cx,
),
self.fake_completion(
+ project,
&buffer,
position,
PredictEditsResponse {
@@ -547,6 +604,7 @@ and then another
cx,
),
self.fake_completion(
+ project,
&buffer,
position,
PredictEditsResponse {
@@ -561,6 +619,7 @@ and then another
cx,
),
self.fake_completion(
+ project,
&buffer,
position,
PredictEditsResponse {
@@ -593,6 +652,7 @@ and then another
#[cfg(any(test, feature = "test-support"))]
pub fn fake_completion(
&mut self,
+ project: Option<&Entity<Project>>,
buffer: &Entity<Buffer>,
position: language::Anchor,
response: PredictEditsResponse,
@@ -600,22 +660,24 @@ and then another
) -> Task<Result<Option<InlineCompletion>>> {
use std::future::ready;
- self.request_completion_impl(buffer, position, false, cx, |_, _, _, _| {
+ self.request_completion_impl(project, buffer, position, false, cx, |_, _, _, _| {
ready(Ok(response))
})
}
pub fn request_completion(
&mut self,
+ project: Option<&Entity<Project>>,
buffer: &Entity<Buffer>,
position: language::Anchor,
- data_collection_permission: bool,
+ can_collect_data: bool,
cx: &mut Context<Self>,
) -> Task<Result<Option<InlineCompletion>>> {
self.request_completion_impl(
+ project,
buffer,
position,
- data_collection_permission,
+ can_collect_data,
cx,
Self::perform_predict_edits,
)
@@ -625,7 +687,7 @@ and then another
client: Arc<Client>,
llm_token: LlmApiToken,
_is_staff: bool,
- body: PredictEditsParams,
+ body: PredictEditsBody,
) -> impl Future<Output = Result<PredictEditsResponse>> {
async move {
let http_client = client.http_client();
@@ -952,21 +1014,41 @@ impl LicenseDetectionWatcher {
pub fn new(worktree: &Worktree, cx: &mut Context<Worktree>) -> Self {
let (mut is_open_source_tx, is_open_source_rx) = watch::channel_with::<bool>(false);
- let loaded_file_fut = worktree.load_file(Path::new("LICENSE"), cx);
+ // Check if worktree is a single file, if so we do not need to check for a LICENSE file
+ let task = if worktree.abs_path().is_file() {
+ Task::ready(())
+ } else {
+ let loaded_files = LICENSE_FILES_TO_CHECK
+ .iter()
+ .map(Path::new)
+ .map(|file| worktree.load_file(file, cx))
+ .collect::<ArrayVec<_, { LICENSE_FILES_TO_CHECK.len() }>>();
+
+ cx.background_executor().spawn(async move {
+ for loaded_file in loaded_files.into_iter() {
+ let Ok(loaded_file) = loaded_file.await else {
+ continue;
+ };
+
+ let path = &loaded_file.file.path;
+ if is_license_eligible_for_data_collection(&loaded_file.text) {
+ log::info!("detected '{path:?}' as open source license");
+ *is_open_source_tx.borrow_mut() = true;
+ } else {
+ log::info!("didn't detect '{path:?}' as open source license");
+ }
- Self {
- is_open_source_rx,
- _is_open_source_task: cx.spawn(|_, _| async move {
- // TODO: Don't display error if file not found
- let Some(loaded_file) = loaded_file_fut.await.log_err() else {
+ // stop on the first license that successfully read
return;
- };
+ }
- let is_loaded_file_open_source_thing: bool =
- is_license_eligible_for_data_collection(&loaded_file.text);
+ log::debug!("didn't find a license file to check, assuming closed source");
+ })
+ };
- *is_open_source_tx.borrow_mut() = is_loaded_file_open_source_thing;
- }),
+ Self {
+ is_open_source_rx,
+ _is_open_source_task: task,
}
}
@@ -1365,7 +1447,7 @@ impl ProviderDataCollection {
.map_or(false, |choice| choice.read(cx).is_enabled())
}
- pub fn data_collection_permission(&self, cx: &App) -> bool {
+ pub fn can_collect_data(&self, cx: &App) -> bool {
self.choice
.as_ref()
.is_some_and(|choice| choice.read(cx).is_enabled())
@@ -1418,7 +1500,7 @@ impl ZetaInlineCompletionProvider {
}
}
-impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvider {
+impl inline_completion::EditPredictionProvider for ZetaInlineCompletionProvider {
fn name() -> &'static str {
"zed-predict"
}
@@ -1431,10 +1513,6 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide
true
}
- fn show_completions_in_normal_mode() -> bool {
- true
- }
-
fn show_tab_accept_marker() -> bool {
true
}
@@ -1456,15 +1534,11 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide
fn is_enabled(
&self,
- buffer: &Entity<Buffer>,
- cursor_position: language::Anchor,
- cx: &App,
+ _buffer: &Entity<Buffer>,
+ _cursor_position: language::Anchor,
+ _cx: &App,
) -> bool {
- let buffer = buffer.read(cx);
- let file = buffer.file();
- let language = buffer.language_at(cursor_position);
- let settings = all_language_settings(file, cx);
- settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx)
+ true
}
fn needs_terms_acceptance(&self, cx: &App) -> bool {
@@ -1477,6 +1551,7 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide
fn refresh(
&mut self,
+ project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
position: language::Anchor,
_debounce: bool,
@@ -1499,8 +1574,7 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide
let pending_completion_id = self.next_pending_completion_id;
self.next_pending_completion_id += 1;
- let data_collection_permission =
- self.provider_data_collection.data_collection_permission(cx);
+ let can_collect_data = self.provider_data_collection.can_collect_data(cx);
let last_request_timestamp = self.last_request_timestamp;
let task = cx.spawn(|this, mut cx| async move {
@@ -1513,7 +1587,13 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide
let completion_request = this.update(&mut cx, |this, cx| {
this.last_request_timestamp = Instant::now();
this.zeta.update(cx, |zeta, cx| {
- zeta.request_completion(&buffer, position, data_collection_permission, cx)
+ zeta.request_completion(
+ project.as_ref(),
+ &buffer,
+ position,
+ can_collect_data,
+ cx,
+ )
})
});
@@ -1842,7 +1922,7 @@ mod tests {
let buffer = cx.new(|cx| Buffer::local(buffer_content, cx));
let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0)));
let completion_task = zeta.update(cx, |zeta, cx| {
- zeta.request_completion(&buffer, cursor, false, cx)
+ zeta.request_completion(None, &buffer, cursor, false, cx)
});
let token_request = server.receive::<proto::GetLlmToken>().await.unwrap();
@@ -29,7 +29,7 @@ To use GitHub Copilot (enabled by default), add the following to your `settings.
```json
{
"features": {
- "inline_completion_provider": "copilot"
+ "edit_prediction_provider": "copilot"
}
}
```
@@ -43,7 +43,7 @@ To use Supermaven, add the following to your `settings.json`:
```json
{
"features": {
- "inline_completion_provider": "supermaven"
+ "edit_prediction_provider": "supermaven"
}
}
```
@@ -56,23 +56,23 @@ Once you have configured an Edit Prediction provider, you can start using edit p
There are a number of actions/shortcuts available to interact with edit predictions:
-- `editor: accept inline completion` (`tab`): To accept the current edit prediction
-- `editor: accept partial inline completion` (`ctrl-cmd-right`): To accept the current edit prediction up to the next word boundary
-- `editor: show inline completion` (`alt-tab`): Trigger an edit prediction request manually
-- `editor: next inline completion` (`alt-tab`): To cycle to the next edit prediction
-- `editor: previous inline completion` (`alt-shift-tab`): To cycle to the previous edit prediction
+- `editor: accept edit prediction` (`tab`): To accept the current edit prediction
+- `editor: accept partial edit prediction` (`ctrl-cmd-right`): To accept the current edit prediction up to the next word boundary
+- `editor: show edit prediction` (`alt-tab`): Trigger an edit prediction request manually
+- `editor: next edit prediction` (`alt-tab`): To cycle to the next edit prediction
+- `editor: previous edit prediction` (`alt-shift-tab`): To cycle to the previous edit prediction
-### Disabling Inline-Completions
+### Disabling Edit Prediction
-To disable completions that appear automatically as you type, add the following to your `settings.json`:
+To disable predictions that appear automatically as you type, add the following to your `settings.json`:
```json
{
- "show_inline_completions": false
+ "show_edit_predictions": false
}
```
-You can trigger edit predictions manually by executing `editor: show inline completion` (`alt-tab`).
+You can trigger edit predictions manually by executing `editor: show edit prediction` (`alt-tab`).
You can also add this as a language-specific setting in your `settings.json` to disable edit predictions for a specific language:
@@ -80,7 +80,7 @@ You can also add this as a language-specific setting in your `settings.json` to
{
"language": {
"python": {
- "show_inline_completions": false
+ "show_edit_predictions": false
}
}
}
@@ -378,11 +378,11 @@ There are two options to choose from:
## Edit Predictions
- Description: Settings for edit predictions.
-- Setting: `inline_completions`
+- Setting: `edit_predictions`
- Default:
```json
- "inline_completions": {
+ "edit_predictions": {
"disabled_globs": [
"**/.env*",
"**/*.pem",
@@ -398,18 +398,18 @@ There are two options to choose from:
### Disabled Globs
-- Description: A list of globs representing files that edit predictions should be disabled for.
+- Description: A list of globs for which edit predictions should be disabled for. This list adds to a pre-existing, sensible default set of globs. Any additional ones you add are combined with them.
- Setting: `disabled_globs`
-- Default: `[".env"]`
+- Default: `["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"]`
**Options**
-List of `string` values
+List of `string` values.
## Edit Predictions Disabled in
- Description: A list of language scopes in which edit predictions should be disabled.
-- Setting: `inline_completions_disabled_in`
+- Setting: `edit_predictions_disabled_in`
- Default: `[]`
**Options**
@@ -434,7 +434,7 @@ List of `string` values
{
"languages": {
"Go": {
- "inline_completions_disabled_in": ["comment", "string"]
+ "edit_predictions_disabled_in": ["comment", "string"]
}
}
}
@@ -1478,7 +1478,7 @@ The following settings can be overridden for each specific language:
- [`hard_tabs`](#hard-tabs)
- [`preferred_line_length`](#preferred-line-length)
- [`remove_trailing_whitespace_on_save`](#remove-trailing-whitespace-on-save)
-- [`show_inline_completions`](#show-inline-completions)
+- [`show_edit_predictions`](#show-edit-predictions)
- [`show_whitespaces`](#show-whitespaces)
- [`soft_wrap`](#soft-wrap)
- [`tab_size`](#tab-size)
@@ -1654,8 +1654,8 @@ Or to set a `socks5` proxy:
## Show Edit Predictions
-- Description: Whether to show edit predictions as you type or manually by triggering `editor::ShowInlineCompletion`.
-- Setting: `show_inline_completions`
+- Description: Whether to show edit predictions as you type or manually by triggering `editor::ShowEditPrediction`.
+- Setting: `show_edit_predictions`
- Default: `true`
**Options**
@@ -14,6 +14,8 @@ Extensions can add the following capabilities to Zed:
Before starting to develop an extension for Zed, be sure to [install Rust via rustup](https://www.rust-lang.org/tools/install).
+> Rust must be installed via rustup. If you have Rust installed via homebrew or otherwise, installing dev extensions will not work.
+
When developing an extension, you can use it in Zed without needing to publish it by installing it as a _dev extension_.
From the extensions page, click the `Install Dev Extension` button and select the directory containing your extension.
@@ -95,6 +97,12 @@ To publish an extension, open a PR to [the `zed-industries/extensions` repo](htt
In your PR, do the following:
1. Add your extension as a Git submodule within the `extensions/` directory
+
+```sh
+git submodule add https://github.com/your-username/foobar-zed.git extensions/foobar
+git add extensions/foobar
+```
+
2. Add a new entry to the top-level `extensions.toml` file containing your extension:
```toml
@@ -119,7 +119,7 @@ command palette, by looking in the default keymaps for
or
[Linux](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-linux.json), or by using Zed's autocomplete in your keymap file.
-Most actions do not require any arguments, and so you can bind them as strings: `"ctrl-a": "language_selector::Toggle"`. Some require a single argument, and must be bound as an array: `"ctrl-a": ["workspace::ActivatePaneInDirection", "down"]`. Some actions require multiple arguments, and are bound as an array of a string and an object: `"ctrl-a": ["pane::DeploySearch", { "replace_enabled": true }]`.
+Most actions do not require any arguments, and so you can bind them as strings: `"ctrl-a": "language_selector::Toggle"`. Some require a single argument, and must be bound as an array: `"cmd-1": ["workspace::ActivatePane", 0]`. Some actions require multiple arguments, and are bound as an array of a string and an object: `"ctrl-a": ["pane::DeploySearch", { "replace_enabled": true }]`.
### Precedence
@@ -159,6 +159,7 @@ Zed's vim mode includes some features that are usually provided by very popular
- You can comment and uncomment selections with `gc` in visual mode and `gcc` in normal mode.
- The project panel supports many shortcuts modeled after the Vim plugin `netrw`: navigation with `hjkl`, open file with `o`, open file in a new tab with `t`, etc.
- You can add key bindings to your keymap to navigate "camelCase" names. [Head down to the Optional key bindings](#optional-key-bindings) section to learn how.
+- You can use `gr` to do [ReplaceWithRegister](https://github.com/vim-scripts/ReplaceWithRegister).
## Command palette
@@ -367,10 +368,10 @@ But you cannot use the same shortcuts to move between all the editor docks (the
{
"context": "Dock",
"bindings": {
- "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"],
- "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"],
- "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"],
- "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"]
+ "ctrl-w h": "workspace::ActivatePaneLeft",
+ "ctrl-w l": "workspace::ActivatePaneRight",
+ "ctrl-w k": "workspace::ActivatePaneUp",
+ "ctrl-w j": "workspace::ActivatePaneDown"
// ... or other keybindings
}
}
@@ -398,12 +399,7 @@ Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), b
{
"context": "vim_mode == visual",
"bindings": {
- "shift-s": [
- "vim::PushOperator",
- {
- "AddSurrounds": {}
- }
- ]
+ "shift-s": ["vim::PushAddSurrounds", {}]
}
}
```
@@ -415,8 +411,8 @@ The [Sneak motion](https://github.com/justinmk/vim-sneak) feature allows for qui
{
"context": "vim_mode == normal || vim_mode == visual",
"bindings": {
- "s": ["vim::PushOperator", { "Sneak": {} }],
- "S": ["vim::PushOperator", { "SneakBackward": {} }]
+ "s": ["vim::PushSneak", {}],
+ "S": ["vim::PushSneakBackward", {}]
}
}
]
@@ -1,6 +1,6 @@
# Extracting an extension to dedicated repo
-These are some notes of how to extract an extension from the main zed repository and generate a new repository which preserves the history as best as possible. In the this example we will be extracting the `ruby` extension, substitute as appropriate.
+These are some notes of how to extract an extension from the main zed repository and generate a new repository which preserves the history as best as possible. In the this example we will be extracting the `ruby` extension, substitute as appropriate.
## Pre-requisites
@@ -23,7 +23,7 @@ regex:(?<![\[a-zA-Z0-9])(#[0-9]{3,5})==>zed-industries/zed\1
```
This file takes the form of `patern==>replacement`, where the replacement is optional.
-Note whitespace matters so `ruby: ==>` is removing the `ruby:` prefix from a commit messages and adding a space after `==> ` means the replacement begins with a space. Regex capture groups are numbered `\1`, `\2`, etc.
+Note whitespace matters so `ruby: ==>` is removing the `ruby:` prefix from a commit messages and adding a space after `==> ` means the replacement begins with a space. Regex capture groups are numbered `\1`, `\2`, etc.
See: [Git Filter Repo Docs](https://htmlpreview.github.io/?https://github.com/newren/git-filter-repo/blob/docs/html/git-filter-repo.html) for more.
@@ -34,16 +34,17 @@ See: [Git Filter Repo Docs](https://htmlpreview.github.io/?https://github.com/ne
> `setopt interactive_comments && echo "setopt interactive_comments" >> ~/.zshrc`
```sh
-rm -rf zed3
-git clone --single-branch --no-tags git@github.com:zed-industries/zed.git zed3
-cd zed3
+LANGNAME=ruby
+rm -rf $LANGNAME
+git clone --single-branch --no-tags git@github.com:zed-industries/zed.git $LANGNAME
+cd $LANGNAME
# This removes the LICENSE symlink
-git filter-repo --invert-paths --path extensions/ruby/LICENSE-APACHE
+git filter-repo --invert-paths --path extensions/$LANGNAME/LICENSE-APACHE
git filter-repo \
--use-mailmap \
- --subdirectory-filter extensions/ruby/ \
+ --subdirectory-filter extensions/$LANGNAME/ \
--path LICENSE-APACHE \
--replace-message ~/projects/expressions.txt
```
@@ -65,10 +66,11 @@ You can always add tags later, but it's a nice touch.
Show you all commits that mention a version number:
```sh
-git log --grep="(\d+\.\d+\.\d+\.)" --perl-regexp --oneline --reverse
+git log --grep="(\d+\.\d+\.\d+)" --perl-regexp --oneline --reverse
```
Then just:
+
```
git tag v0.0.2 abcd1234
git tag v0.0.3 deadbeef
@@ -76,13 +78,77 @@ git tag v0.0.3 deadbeef
Usually the initial extraction didn't mention a version number so you can just do that one manually.
-4. Push to the new repo
+4. [Optional] Add a README.md and commit.
+
+5. Push to the new repo
-Create a new empty repo on github under the [zed-extensions](https://github.com/zed-extensions) organization.
+Create a new empty repo on github under the [zed-extensions](https://github.com/organizations/zed-extensions/repositories/new) organization.
```
-git remote add origin git@github.com:zed-extensions/ruby
+git remote add origin git@github.com:zed-extensions/$LANGNAME
git push origin main --tags
+git branch --set-upstream-to=origin/main main
+```
+
+6. Setup the new repository:
+
+- Go to the repository settings:
+ - Disable Wikis
+ - Uncheck "Allow Merge Commits"
+ - Check "Allow Squash Merging"
+ - Default commit message: "Pull request title and description"
+
+7. Publish a new version of the extension.
+
+```
+OLD_VERSION=$(grep '^version = ' extension.toml | cut -d'"' -f2)
+NEW_VERSION=$(echo "$OLD_VERSION" | awk -F. '{$NF = $NF + 1;} 1' OFS=.)
+echo $OLD_VERSION $NEW_VERSION
+perl -i -pe "s/$OLD_VERSION/$NEW_VERSION/" extension.toml
+
+# if there's rust code, update this too.
+test -f Cargo.toml && perl -i -pe "s/$OLD_VERSION/$NEW_VERSION/" cargo.toml
+test -f Cargo.toml && cargo check
+
+# commit and push
+git add -u
+git checkout -b "bump_${NEW_VERSION}"
+git commit -m "Bump to v${NEW_VERSION}"
+git push
+gh pr create --title "Bump to v${NEW_VERSION}" --web
+
+# merge PR in web interface
+git checkout main
+git pull
+git tag v${NEW_VERSION}
+git push origin v${NEW_VERSION}
+```
+
+7. In zed repository, `rm -rf extension/langname` and push a PR.
+
+8. Update extensions repository:
+
+```sh
+cd ../extensions
+git checkout main
+git pull
+git submodule init
+git submodule update
+git status
+
+git checkout -b ${LANGNAME}_v${NEW_VERSION}
+git submodule add https://github.com/zed-extensions/${LANGNAME}.git extensions/${LANGNAME}
+pnpm sort-extensions
+
+# edit extensions.toml:
+# - bump version
+# - change `submodule` from `extensions/zed` to new path
+# - remove `path` line all together
+
+git add extensions.toml .gitmodules extensions/${LANGNAME}
+git diff --cached
+git commit -m "Bump ${LANGNAME} to v${NEW_VERSION}"
+git push
```
-5. [Optional]
+Create PR and reference the Zed PR with removal from tree.
@@ -52,6 +52,8 @@ if "$rustup_installed"; then
rustup target add "$remote_server_triple"
fi
+export CC=$(which clang)
+
# Build binary in release mode
export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib"
cargo build --release --target "${target_triple}" --package zed --package cli
@@ -0,0 +1,22 @@
+param (
+ [Parameter(Mandatory = $true)]
+ [int]$MAX_SIZE_IN_GB
+)
+
+$ErrorActionPreference = "Stop"
+$PSNativeCommandUseErrorActionPreference = $true
+$ProgressPreference = "SilentlyContinue"
+
+if (-Not (Test-Path -Path "target")) {
+ Write-Host "target directory does not exist yet"
+ exit 0
+}
+
+$current_size_gb = (Get-ChildItem -Recurse -Force -File -Path "target" | Measure-Object -Property Length -Sum).Sum / 1GB
+
+Write-Host "target directory size: ${current_size_gb}GB. max size: ${MAX_SIZE_IN_GB}GB"
+
+if ($current_size_gb -gt $MAX_SIZE_IN_GB) {
+ Write-Host "Dev drive is almost full, increase the size first!"
+ exit 1
+}
@@ -45,6 +45,7 @@ if [[ -n $apt ]]; then
libsqlite3-dev
musl-tools
musl-dev
+ build-essential
)
if (grep -qP 'PRETTY_NAME="(Linux Mint 22|.+24\.(04|10))' /etc/os-release); then
deps+=( mold libstdc++-14-dev )
@@ -0,0 +1,86 @@
+#!/bin/bash
+
+# Try to make sure we are in the zed repo root
+if [ ! -d "crates" ] || [ ! -d "script" ]; then
+ echo "Error: Run from the \`zed\` repo root"
+ exit 1
+fi
+
+if [ ! -f "Cargo.toml" ]; then
+ echo "Error: Run from the \`zed\` repo root"
+ exit 1
+fi
+
+if [ $# -eq 0 ]; then
+ echo "Usage: $0 <crate_name> [optional_license_flag]"
+ exit 1
+fi
+
+CRATE_NAME="$1"
+
+LICENSE_FLAG=$(echo "${2}" | tr '[:upper:]' '[:lower:]')
+if [[ "$LICENSE_FLAG" == *"apache"* ]]; then
+ LICENSE_MODE="Apache-2.0"
+ LICENSE_FILE="LICENSE-APACHE"
+elif [[ "$LICENSE_FLAG" == *"agpl"* ]]; then
+ LICENSE_MODE="AGPL-3.0-or-later"
+ LICENSE_FILE="LICENSE-AGPL"
+else
+ LICENSE_MODE="GPL-3.0-or-later"
+ LICENSE_FILE="LICENSE"
+fi
+
+if [[ ! "$CRATE_NAME" =~ ^[a-z0-9_]+$ ]]; then
+ echo "Error: Crate name must be lowercase and contain only alphanumeric characters and underscores"
+ exit 1
+fi
+
+CRATE_PATH="crates/$CRATE_NAME"
+mkdir -p "$CRATE_PATH/src"
+
+# Symlink the license
+ln -sf "../../../$LICENSE_FILE" "$CRATE_PATH/LICENSE"
+
+CARGO_TOML_TEMPLATE=$(cat << 'EOF'
+[package]
+name = "$CRATE_NAME"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "$LICENSE_MODE"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/$CRATE_NAME.rs"
+
+[features]
+default = []
+
+[dependencies]
+anyhow.workspace = true
+gpui.workspace = true
+ui.workspace = true
+util.workspace = true
+
+# Uncomment other workspace dependencies as needed
+# assistant.workspace = true
+# client.workspace = true
+# project.workspace = true
+# settings.workspace = true
+EOF
+)
+
+# Populate template
+CARGO_TOML_CONTENT=$(echo "$CARGO_TOML_TEMPLATE" | sed \
+ -e "s/\$CRATE_NAME/$CRATE_NAME/g" \
+ -e "s/\$LICENSE_MODE/$LICENSE_MODE/g")
+
+echo "$CARGO_TOML_CONTENT" > "$CRATE_PATH/Cargo.toml"
+
+echo "//! # $CRATE_NAME" > "$CRATE_PATH/src/$CRATE_NAME.rs"
+
+echo "Created new crate: $CRATE_NAME in $CRATE_PATH"
+echo "License: $LICENSE_MODE (symlinked from $LICENSE_FILE)"
+echo "Don't forget to add the new crate to the workspace!"
@@ -3,7 +3,8 @@
# The current version of the Windows runner is 10.0.20348 which does not support DevDrive option.
# Ref: https://learn.microsoft.com/en-us/windows/dev-drive/
-$Volume = New-VHD -Path C:/zed_dev_drive.vhdx -SizeBytes 30GB |
+# Currently, total CI requires almost 45GB of space, here we are creating a 100GB drive.
+$Volume = New-VHD -Path C:/zed_dev_drive.vhdx -SizeBytes 100GB |
Mount-VHD -Passthru |
Initialize-Disk -Passthru |
New-Partition -AssignDriveLetter -UseMaximumSize |
@@ -14,7 +15,7 @@ $Drive = "$($Volume.DriveLetter):"
# Show some debug information
Write-Output $Volume
Write-Output "Using Dev Drive at $Drive"
-
+
# Move Cargo to the dev drive
New-Item -Path "$($Drive)/.cargo/bin" -ItemType Directory -Force
Copy-Item -Path "C:/Users/runneradmin/.cargo/*" -Destination "$($Drive)/.cargo/" -Recurse -Force