Detailed changes
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
@@ -18,7 +18,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Checkout code
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
ref: ${{ github.event.inputs.branch }}
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}
@@ -25,6 +25,7 @@ env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: 0
RUST_BACKTRACE: 1
+ RUSTFLAGS: "-D warnings"
jobs:
migration_checks:
@@ -36,7 +37,7 @@ jobs:
- test
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
fetch-depth: 0 # fetch full history
@@ -78,25 +79,26 @@ jobs:
- buildjet-8vcpu-ubuntu-2204
steps:
- name: Checkout repo
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Run style checks
uses: ./.github/actions/check_style
- name: Check for typos
- uses: crate-ci/typos@v1.24.6
+ uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
with:
config: ./typos.toml
macos_tests:
timeout-minutes: 60
name: (macOS) Run Clippy and tests
+ if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- test
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -115,17 +117,18 @@ jobs:
uses: ./.github/actions/run_tests
- name: Build collab
- run: RUSTFLAGS="-D warnings" cargo build -p collab
+ run: cargo build -p collab
- name: Build other binaries and features
run: |
- RUSTFLAGS="-D warnings" cargo build --workspace --bins --all-features
+ cargo build --workspace --bins --all-features
cargo check -p gpui --features "macos-blade"
- RUSTFLAGS="-D warnings" cargo build -p remote_server
+ cargo build -p remote_server
linux_tests:
timeout-minutes: 60
name: (Linux) Run Clippy and tests
+ if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204
steps:
@@ -133,7 +136,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -153,11 +156,12 @@ jobs:
uses: ./.github/actions/run_tests
- name: Build Zed
- run: RUSTFLAGS="-D warnings" cargo build -p zed
+ run: cargo build -p zed
build_remote_server:
timeout-minutes: 60
name: (Linux) Build Remote Server
+ if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204
steps:
@@ -165,7 +169,7 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -179,16 +183,20 @@ jobs:
run: ./script/remote-server && ./script/install-mold 2.34.0
- name: Build Remote Server
- run: RUSTFLAGS="-D warnings" cargo build -p remote_server
+ run: cargo build -p remote_server
# todo(windows): Actually run the tests
windows_tests:
timeout-minutes: 60
name: (Windows) Run Clippy and tests
+ if: github.repository_owner == 'zed-industries'
runs-on: hosted-windows-1
steps:
+ # more info here:- https://github.com/rust-lang/cargo/issues/13020
+ - name: Enable longer pathnames for git
+ run: git config --system core.longpaths true
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -203,7 +211,7 @@ jobs:
run: cargo xtask clippy
- name: Build Zed
- run: $env:RUSTFLAGS="-D warnings"; cargo build
+ run: cargo build
bundle-mac:
timeout-minutes: 60
@@ -229,7 +237,7 @@ jobs:
node-version: "18"
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
# We need to fetch more than one commit so that `script/draft-release-notes`
# is able to diff between the current and previous tag.
@@ -314,7 +322,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -361,7 +369,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:
@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:
@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:
@@ -13,7 +13,7 @@ jobs:
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -17,7 +17,7 @@ jobs:
- test
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
fetch-depth: 0
@@ -71,7 +71,7 @@ jobs:
run: doctl registry login
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -97,7 +97,7 @@ jobs:
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:
@@ -31,7 +31,7 @@ jobs:
}
- name: Check for Typos with Typos-CLI
- uses: crate-ci/typos@v1.24.6
+ uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
with:
config: ./typos.toml
files: ./docs/
@@ -16,7 +16,7 @@ jobs:
- ubuntu-latest
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -27,7 +27,7 @@ jobs:
node-version: "18"
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -23,7 +23,7 @@ jobs:
- test
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
fetch-depth: 0
@@ -44,7 +44,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -75,7 +75,7 @@ jobs:
node-version: "18"
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -109,7 +109,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -149,7 +149,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
@@ -182,7 +182,7 @@ jobs:
- bundle-linux-arm
steps:
- name: Checkout repo
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
@@ -261,9 +261,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4"
[[package]]
name = "anyhow"
-version = "1.0.89"
+version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
+checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
[[package]]
name = "approx"
@@ -291,6 +291,12 @@ dependencies = [
"syn 2.0.76",
]
+[[package]]
+name = "arraydeque"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236"
+
[[package]]
name = "arrayref"
version = "0.3.8"
@@ -385,7 +391,7 @@ dependencies = [
"ctor",
"db",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"feature_flags",
"fs",
"futures 0.3.30",
@@ -453,9 +459,11 @@ dependencies = [
"anyhow",
"collections",
"derive_more",
+ "futures 0.3.30",
"gpui",
"language",
"parking_lot",
+ "pretty_assertions",
"serde",
"serde_json",
"workspace",
@@ -2548,9 +2556,8 @@ dependencies = [
"ctor",
"dashmap 6.0.1",
"derive_more",
- "dev_server_projects",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"envy",
"file_finder",
"fs",
@@ -2559,7 +2566,6 @@ dependencies = [
"git_hosting_providers",
"google_ai",
"gpui",
- "headless",
"hex",
"http_client",
"hyper 0.14.30",
@@ -2706,7 +2712,7 @@ dependencies = [
"command_palette_hooks",
"ctor",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"fuzzy",
"go_to_line",
"gpui",
@@ -3474,18 +3480,6 @@ dependencies = [
"syn 1.0.109",
]
-[[package]]
-name = "dev_server_projects"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "client",
- "gpui",
- "rpc",
- "serde",
- "serde_json",
-]
-
[[package]]
name = "diagnostics"
version = "0.1.0"
@@ -3495,7 +3489,7 @@ dependencies = [
"collections",
"ctor",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"language",
@@ -3683,7 +3677,7 @@ dependencies = [
"ctor",
"db",
"emojis",
- "env_logger",
+ "env_logger 0.11.5",
"file_icons",
"futures 0.3.30",
"fuzzy",
@@ -3889,6 +3883,19 @@ dependencies = [
"regex",
]
+[[package]]
+name = "env_logger"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
+dependencies = [
+ "humantime",
+ "is-terminal",
+ "log",
+ "regex",
+ "termcolor",
+]
+
[[package]]
name = "env_logger"
version = "0.11.5"
@@ -3997,7 +4004,7 @@ dependencies = [
"client",
"clock",
"collections",
- "env_logger",
+ "env_logger 0.11.5",
"feature_flags",
"fs",
"git",
@@ -4092,7 +4099,7 @@ dependencies = [
"client",
"collections",
"ctor",
- "env_logger",
+ "env_logger 0.11.5",
"fs",
"futures 0.3.30",
"gpui",
@@ -4134,7 +4141,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"clap",
- "env_logger",
+ "env_logger 0.11.5",
"extension",
"fs",
"language",
@@ -4293,7 +4300,7 @@ dependencies = [
"collections",
"ctor",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"file_icons",
"futures 0.3.30",
"fuzzy",
@@ -5048,7 +5055,7 @@ dependencies = [
"ctor",
"derive_more",
"embed-resource",
- "env_logger",
+ "env_logger 0.11.5",
"etagere",
"filedescriptor",
"flume",
@@ -5238,6 +5245,15 @@ dependencies = [
"serde",
]
+[[package]]
+name = "hashlink"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
+dependencies = [
+ "hashbrown 0.14.5",
+]
+
[[package]]
name = "hashlink"
version = "0.9.1"
@@ -5271,28 +5287,6 @@ dependencies = [
"http 0.2.12",
]
-[[package]]
-name = "headless"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "client",
- "extension",
- "fs",
- "futures 0.3.30",
- "gpui",
- "language",
- "log",
- "node_runtime",
- "postage",
- "project",
- "proto",
- "settings",
- "shellexpand 2.1.2",
- "signal-hook",
- "util",
-]
-
[[package]]
name = "heck"
version = "0.3.3"
@@ -6218,7 +6212,7 @@ dependencies = [
"collections",
"ctor",
"ec4rs",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"fuzzy",
"git",
@@ -6275,7 +6269,7 @@ dependencies = [
"copilot",
"ctor",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"feature_flags",
"futures 0.3.30",
"google_ai",
@@ -6332,7 +6326,7 @@ dependencies = [
"collections",
"copilot",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"language",
@@ -6366,6 +6360,11 @@ dependencies = [
"lsp",
"node_runtime",
"paths",
+ "pet",
+ "pet-conda",
+ "pet-core",
+ "pet-poetry",
+ "pet-reporter",
"project",
"regex",
"rope",
@@ -6662,7 +6661,7 @@ dependencies = [
"async-pipe",
"collections",
"ctor",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"log",
@@ -6745,7 +6744,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"assets",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"language",
@@ -6858,7 +6857,7 @@ dependencies = [
"clap",
"clap_complete",
"elasticlunr-rs",
- "env_logger",
+ "env_logger 0.11.5",
"futures-util",
"handlebars 5.1.2",
"ignore",
@@ -7040,6 +7039,15 @@ dependencies = [
"windows-sys 0.48.0",
]
+[[package]]
+name = "msvc_spectre_libs"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8661ace213a0a130c7c5b9542df5023aedf092a02008ccf477b39ff108990305"
+dependencies = [
+ "cc",
+]
+
[[package]]
name = "multi_buffer"
version = "0.1.0"
@@ -7048,7 +7056,7 @@ dependencies = [
"clock",
"collections",
"ctor",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"itertools 0.13.0",
@@ -7762,8 +7770,10 @@ dependencies = [
"serde",
"serde_json",
"settings",
+ "smallvec",
"smol",
"theme",
+ "ui",
"util",
"workspace",
"worktree",
@@ -8006,6 +8016,366 @@ dependencies = [
"sha2",
]
+[[package]]
+name = "pet"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "clap",
+ "env_logger 0.10.2",
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-conda",
+ "pet-core",
+ "pet-env-var-path",
+ "pet-fs",
+ "pet-global-virtualenvs",
+ "pet-homebrew",
+ "pet-jsonrpc",
+ "pet-linux-global-python",
+ "pet-mac-commandlinetools",
+ "pet-mac-python-org",
+ "pet-mac-xcode",
+ "pet-pipenv",
+ "pet-poetry",
+ "pet-pyenv",
+ "pet-python-utils",
+ "pet-reporter",
+ "pet-telemetry",
+ "pet-venv",
+ "pet-virtualenv",
+ "pet-virtualenvwrapper",
+ "pet-windows-registry",
+ "pet-windows-store",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "pet-conda"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "env_logger 0.10.2",
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-reporter",
+ "regex",
+ "serde",
+ "serde_json",
+ "yaml-rust2",
+]
+
+[[package]]
+name = "pet-core"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "clap",
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-fs",
+ "regex",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "pet-env-var-path"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-conda",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+ "regex",
+]
+
+[[package]]
+name = "pet-fs"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+]
+
+[[package]]
+name = "pet-global-virtualenvs"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-conda",
+ "pet-core",
+ "pet-fs",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-homebrew"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-conda",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+ "regex",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "pet-jsonrpc"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "env_logger 0.10.2",
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "pet-linux-global-python"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-mac-commandlinetools"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-mac-python-org"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-mac-xcode"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-pipenv"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-poetry"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "base64 0.22.1",
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-reporter",
+ "pet-virtualenv",
+ "regex",
+ "serde",
+ "serde_json",
+ "sha2",
+ "toml 0.8.19",
+]
+
+[[package]]
+name = "pet-pyenv"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-conda",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-reporter",
+ "regex",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "pet-python-utils"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "env_logger 0.10.2",
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "regex",
+ "serde",
+ "serde_json",
+ "sha2",
+]
+
+[[package]]
+name = "pet-reporter"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "env_logger 0.10.2",
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-jsonrpc",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "pet-telemetry"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "env_logger 0.10.2",
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "regex",
+]
+
+[[package]]
+name = "pet-venv"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-python-utils",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-virtualenv"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+]
+
+[[package]]
+name = "pet-virtualenvwrapper"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+]
+
+[[package]]
+name = "pet-windows-registry"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-conda",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+ "pet-windows-store",
+ "regex",
+ "winreg 0.52.0",
+]
+
+[[package]]
+name = "pet-windows-store"
+version = "0.1.0"
+source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c"
+dependencies = [
+ "lazy_static",
+ "log",
+ "msvc_spectre_libs",
+ "pet-core",
+ "pet-fs",
+ "pet-python-utils",
+ "pet-virtualenv",
+ "regex",
+ "winreg 0.52.0",
+]
+
[[package]]
name = "petgraph"
version = "0.6.5"
@@ -8094,7 +8464,7 @@ dependencies = [
"anyhow",
"ctor",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"gpui",
"menu",
"serde",
@@ -8440,8 +8810,7 @@ dependencies = [
"client",
"clock",
"collections",
- "dev_server_projects",
- "env_logger",
+ "env_logger 0.11.5",
"fs",
"futures 0.3.30",
"fuzzy",
@@ -8512,6 +8881,7 @@ dependencies = [
"serde_derive",
"serde_json",
"settings",
+ "smallvec",
"theme",
"ui",
"util",
@@ -8977,8 +9347,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"auto_update",
- "client",
- "dev_server_projects",
"editor",
"file_finder",
"futures 0.3.30",
@@ -8995,14 +9363,12 @@ dependencies = [
"project",
"release_channel",
"remote",
- "rpc",
"schemars",
"serde",
"serde_json",
"settings",
"smol",
"task",
- "terminal_view",
"theme",
"ui",
"util",
@@ -9159,7 +9525,7 @@ dependencies = [
"clap",
"client",
"clock",
- "env_logger",
+ "env_logger 0.11.5",
"fork",
"fs",
"futures 0.3.30",
@@ -9210,7 +9576,7 @@ dependencies = [
"collections",
"command_palette_hooks",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"http_client",
@@ -9490,7 +9856,7 @@ dependencies = [
"arrayvec",
"criterion",
"ctor",
- "env_logger",
+ "env_logger 0.11.5",
"gpui",
"log",
"rand 0.8.5",
@@ -9521,7 +9887,7 @@ dependencies = [
"base64 0.22.1",
"chrono",
"collections",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"parking_lot",
@@ -10110,7 +10476,7 @@ dependencies = [
"client",
"clock",
"collections",
- "env_logger",
+ "env_logger 0.11.5",
"feature_flags",
"fs",
"futures 0.3.30",
@@ -10803,7 +11169,7 @@ dependencies = [
"futures-io",
"futures-util",
"hashbrown 0.14.5",
- "hashlink",
+ "hashlink 0.9.1",
"hex",
"indexmap 2.4.0",
"log",
@@ -11127,7 +11493,7 @@ version = "0.1.0"
dependencies = [
"arrayvec",
"ctor",
- "env_logger",
+ "env_logger 0.11.5",
"log",
"rand 0.8.5",
"rayon",
@@ -11141,7 +11507,7 @@ dependencies = [
"client",
"collections",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"futures 0.3.30",
"gpui",
"http_client",
@@ -11440,7 +11806,7 @@ dependencies = [
"collections",
"ctor",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"gpui",
"language",
"menu",
@@ -11647,7 +12013,7 @@ dependencies = [
"clock",
"collections",
"ctor",
- "env_logger",
+ "env_logger 0.11.5",
"gpui",
"http_client",
"log",
@@ -11908,7 +12274,6 @@ dependencies = [
"client",
"collections",
"command_palette",
- "dev_server_projects",
"editor",
"extensions_ui",
"feature_flags",
@@ -12137,6 +12502,21 @@ dependencies = [
"winnow 0.6.18",
]
+[[package]]
+name = "toolchain_selector"
+version = "0.1.0"
+dependencies = [
+ "editor",
+ "fuzzy",
+ "gpui",
+ "language",
+ "picker",
+ "project",
+ "ui",
+ "util",
+ "workspace",
+]
+
[[package]]
name = "topological-sort"
version = "0.2.2"
@@ -12882,6 +13262,7 @@ dependencies = [
"git",
"gpui",
"picker",
+ "project",
"ui",
"util",
"workspace",
@@ -14305,8 +14686,7 @@ dependencies = [
"collections",
"db",
"derive_more",
- "dev_server_projects",
- "env_logger",
+ "env_logger 0.11.5",
"fs",
"futures 0.3.30",
"git",
@@ -14343,7 +14723,7 @@ dependencies = [
"anyhow",
"clock",
"collections",
- "env_logger",
+ "env_logger 0.11.5",
"fs",
"futures 0.3.30",
"fuzzy",
@@ -14513,6 +14893,17 @@ dependencies = [
"clap",
]
+[[package]]
+name = "yaml-rust2"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8902160c4e6f2fb145dbe9d6760a75e3c9522d8bf796ed7047c85919ac7115f8"
+dependencies = [
+ "arraydeque",
+ "encoding_rs",
+ "hashlink 0.8.4",
+]
+
[[package]]
name = "yansi"
version = "1.0.1"
@@ -14624,10 +15015,9 @@ dependencies = [
"command_palette_hooks",
"copilot",
"db",
- "dev_server_projects",
"diagnostics",
"editor",
- "env_logger",
+ "env_logger 0.11.5",
"extension",
"extensions_ui",
"feature_flags",
@@ -14640,7 +15030,6 @@ dependencies = [
"git_hosting_providers",
"go_to_line",
"gpui",
- "headless",
"http_client",
"image_viewer",
"inline_completion_button",
@@ -14695,6 +15084,7 @@ dependencies = [
"theme",
"theme_selector",
"time",
+ "toolchain_selector",
"tree-sitter-md",
"tree-sitter-rust",
"ui",
@@ -14742,7 +15132,7 @@ dependencies = [
[[package]]
name = "zed_dart"
-version = "0.1.1"
+version = "0.1.2"
dependencies = [
"zed_extension_api 0.1.0",
]
@@ -23,7 +23,6 @@ members = [
"crates/context_servers",
"crates/copilot",
"crates/db",
- "crates/dev_server_projects",
"crates/diagnostics",
"crates/docs_preprocessor",
"crates/editor",
@@ -45,7 +44,6 @@ members = [
"crates/google_ai",
"crates/gpui",
"crates/gpui_macros",
- "crates/headless",
"crates/html_to_markdown",
"crates/http_client",
"crates/image_viewer",
@@ -119,6 +117,7 @@ members = [
"crates/theme_selector",
"crates/time_format",
"crates/title_bar",
+ "crates/toolchain_selector",
"crates/ui",
"crates/ui_input",
"crates/ui_macros",
@@ -201,7 +200,6 @@ command_palette_hooks = { path = "crates/command_palette_hooks" }
context_servers = { path = "crates/context_servers" }
copilot = { path = "crates/copilot" }
db = { path = "crates/db" }
-dev_server_projects = { path = "crates/dev_server_projects" }
diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" }
extension = { path = "crates/extension" }
@@ -219,7 +217,6 @@ go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]}
gpui_macros = { path = "crates/gpui_macros" }
-headless = { path = "crates/headless" }
html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" }
image_viewer = { path = "crates/image_viewer" }
@@ -294,6 +291,7 @@ theme_importer = { path = "crates/theme_importer" }
theme_selector = { path = "crates/theme_selector" }
time_format = { path = "crates/time_format" }
title_bar = { path = "crates/title_bar" }
+toolchain_selector = { path = "crates/toolchain_selector" }
ui = { path = "crates/ui" }
ui_input = { path = "crates/ui_input" }
ui_macros = { path = "crates/ui_macros" }
@@ -380,6 +378,11 @@ ordered-float = "2.1.1"
palette = { version = "0.7.5", default-features = false, features = ["std"] }
parking_lot = "0.12.1"
pathdiff = "0.2"
+pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
+pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
+pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
+pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
+pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
postage = { version = "0.5", features = ["futures-traits"] }
pretty_assertions = "1.3.0"
profiling = "1"
@@ -313,6 +313,15 @@
"ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
+ "ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
+ "ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
+ "ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
+ "ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
+ "ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
+ "ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
+ "ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
+ "ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
+ "ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
"ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -505,6 +514,13 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
+ {
+ "context": "ProposedChangesEditor",
+ "bindings": {
+ "ctrl-shift-y": "editor::ApplyDiffHunk",
+ "ctrl-alt-a": "editor::ApplyAllDiffHunks"
+ }
+ },
{
"context": "Editor && jupyter && !ContextEditor",
"bindings": {
@@ -349,7 +349,15 @@
"alt-cmd-]": "editor::UnfoldLines",
"cmd-k cmd-l": "editor::ToggleFold",
"cmd-k cmd-[": "editor::FoldRecursive",
- "cmd-k cmd-]": "editor::UnfoldRecursive",
+ "cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
+ "cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
+ "cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
+ "cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
+ "cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
+ "cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
+ "cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
+ "cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
+ "cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
"cmd-k cmd-0": "editor::FoldAll",
"cmd-k cmd-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -538,6 +546,13 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
+ {
+ "context": "ProposedChangesEditor",
+ "bindings": {
+ "cmd-shift-y": "editor::ApplyDiffHunk",
+ "cmd-shift-a": "editor::ApplyAllDiffHunks"
+ }
+ },
{
"context": "PromptEditor",
"bindings": {
@@ -88,7 +88,6 @@ origin: (f64, f64),
<edit>
<path>src/shapes/rectangle.rs</path>
-<description>Update the Rectangle's new function to take an origin parameter</description>
<operation>update</operation>
<old_text>
fn new(width: f64, height: f64) -> Self {
@@ -117,7 +116,6 @@ pub struct Circle {
<edit>
<path>src/shapes/circle.rs</path>
-<description>Update the Circle's new function to take an origin parameter</description>
<operation>update</operation>
<old_text>
fn new(radius: f64) -> Self {
@@ -134,7 +132,6 @@ fn new(origin: (f64, f64), radius: f64) -> Self {
<edit>
<path>src/shapes/rectangle.rs</path>
-<description>Add an import for the std::fmt module</description>
<operation>insert_before</operation>
<old_text>
struct Rectangle {
@@ -147,7 +144,10 @@ use std::fmt;
<edit>
<path>src/shapes/rectangle.rs</path>
-<description>Add a Display implementation for Rectangle</description>
+<description>
+Add a manual Display implementation for Rectangle.
+Currently, this is the same as a derived Display implementation.
+</description>
<operation>insert_after</operation>
<old_text>
Rectangle { width, height }
@@ -169,7 +169,6 @@ impl fmt::Display for Rectangle {
<edit>
<path>src/shapes/circle.rs</path>
-<description>Add an import for the `std::fmt` module</description>
<operation>insert_before</operation>
<old_text>
struct Circle {
@@ -181,7 +180,6 @@ use std::fmt;
<edit>
<path>src/shapes/circle.rs</path>
-<description>Add a Display implementation for Circle</description>
<operation>insert_after</operation>
<old_text>
Circle { radius }
@@ -346,6 +346,8 @@
"git_status": true,
// Amount of indentation for nested items.
"indent_size": 20,
+ // Whether to show indent guides in the project panel.
+ "indent_guides": true,
// Whether to reveal it in the project panel automatically,
// when a corresponding project entry becomes active.
// Gitignored entries are never auto revealed.
@@ -386,6 +388,8 @@
"git_status": true,
// Amount of indentation for nested items.
"indent_size": 20,
+ // Whether to show indent guides in the outline panel.
+ "indent_guides": true,
// Whether to reveal it in the outline panel automatically,
// when a corresponding outline entry becomes active.
// Gitignored entries are never auto revealed.
@@ -775,6 +779,7 @@
"tasks": {
"variables": {}
},
+ "toolchain": { "name": "default", "path": "default" },
// An object whose keys are language names, and whose values
// are arrays of filenames or extensions of files that should
// use those languages.
@@ -1099,13 +1104,13 @@
// }
"command_aliases": {},
// ssh_connections is an array of ssh connections.
- // By default this setting is null, which disables the direct ssh connection support.
// You can configure these from `project: Open Remote` in the command palette.
// Zed's ssh support will pull configuration from your ~/.ssh too.
// Examples:
// [
// {
// "host": "example-box",
+ // // "port": 22, "username": "test", "args": ["-i", "/home/user/.ssh/id_rsa"]
// "projects": [
// {
// "paths": ["/home/user/code/zed"]
@@ -1113,7 +1118,7 @@
// ]
// }
// ]
- "ssh_connections": null,
+ "ssh_connections": [],
// Configures the Context Server Protocol binaries
//
// Examples:
@@ -298,25 +298,64 @@ fn register_context_server_handlers(cx: &mut AppContext) {
return;
};
- if let Some(prompts) = protocol.list_prompts().await.log_err() {
- for prompt in prompts
- .into_iter()
- .filter(context_server_command::acceptable_prompt)
- {
- log::info!(
- "registering context server command: {:?}",
- prompt.name
- );
- context_server_registry.register_command(
- server.id.clone(),
- prompt.name.as_str(),
- );
- slash_command_registry.register_command(
- context_server_command::ContextServerSlashCommand::new(
- &server, prompt,
- ),
- true,
- );
+ if protocol.capable(context_servers::protocol::ServerCapability::Prompts) {
+ if let Some(prompts) = protocol.list_prompts().await.log_err() {
+ for prompt in prompts
+ .into_iter()
+ .filter(context_server_command::acceptable_prompt)
+ {
+ log::info!(
+ "registering context server command: {:?}",
+ prompt.name
+ );
+ context_server_registry.register_command(
+ server.id.clone(),
+ prompt.name.as_str(),
+ );
+ slash_command_registry.register_command(
+ context_server_command::ContextServerSlashCommand::new(
+ &server, prompt,
+ ),
+ true,
+ );
+ }
+ }
+ }
+ })
+ .detach();
+ }
+ },
+ );
+
+ cx.update_model(
+ &manager,
+ |manager: &mut context_servers::manager::ContextServerManager, cx| {
+ let tool_registry = ToolRegistry::global(cx);
+ let context_server_registry = ContextServerRegistry::global(cx);
+ if let Some(server) = manager.get_server(server_id) {
+ cx.spawn(|_, _| async move {
+ let Some(protocol) = server.client.read().clone() else {
+ return;
+ };
+
+ if protocol.capable(context_servers::protocol::ServerCapability::Tools) {
+ if let Some(tools) = protocol.list_tools().await.log_err() {
+ for tool in tools.tools {
+ log::info!(
+ "registering context server tool: {:?}",
+ tool.name
+ );
+ context_server_registry.register_tool(
+ server.id.clone(),
+ tool.name.as_str(),
+ );
+ tool_registry.register_tool(
+ tools::context_server_tool::ContextServerTool::new(
+ server.id.clone(),
+ tool
+ ),
+ );
+ }
}
}
})
@@ -334,6 +373,14 @@ fn register_context_server_handlers(cx: &mut AppContext) {
context_server_registry.unregister_command(&server_id, &command_name);
}
}
+
+ if let Some(tools) = context_server_registry.get_tools(server_id) {
+ let tool_registry = ToolRegistry::global(cx);
+ for tool_name in tools {
+ tool_registry.unregister_tool_by_name(&tool_name);
+ context_server_registry.unregister_tool(&server_id, &tool_name);
+ }
+ }
}
},
)
@@ -26,8 +26,8 @@ use collections::{BTreeSet, HashMap, HashSet};
use editor::{
actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt},
display_map::{
- BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease,
- CreaseMetadata, CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
+ BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
+ CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
},
scroll::{Autoscroll, AutoscrollStrategy},
Anchor, Editor, EditorEvent, ProposedChangeLocation, ProposedChangesEditor, RowExt,
@@ -963,7 +963,7 @@ impl AssistantPanel {
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
let project = self.project.read(cx);
- if project.is_via_collab() && project.dev_server_project_id().is_none() {
+ if project.is_via_collab() {
let task = self
.context_store
.update(cx, |store, cx| store.create_remote_context(cx));
@@ -2009,13 +2009,12 @@ impl ContextEditor {
})
.map(|(command, error_message)| BlockProperties {
style: BlockStyle::Fixed,
- position: Anchor {
+ height: 1,
+ placement: BlockPlacement::Below(Anchor {
buffer_id: Some(buffer_id),
excerpt_id,
text_anchor: command.source_range.start,
- },
- height: 1,
- disposition: BlockDisposition::Below,
+ }),
render: slash_command_error_block_renderer(error_message),
priority: 0,
}),
@@ -2242,11 +2241,10 @@ impl ContextEditor {
} else {
let block_ids = editor.insert_blocks(
[BlockProperties {
- position: patch_start,
height: path_count as u32 + 1,
style: BlockStyle::Flex,
render: render_block,
- disposition: BlockDisposition::Below,
+ placement: BlockPlacement::Below(patch_start),
priority: 0,
}],
None,
@@ -2731,12 +2729,13 @@ impl ContextEditor {
})
};
let create_block_properties = |message: &Message| BlockProperties {
- position: buffer
- .anchor_in_excerpt(excerpt_id, message.anchor_range.start)
- .unwrap(),
height: 2,
style: BlockStyle::Sticky,
- disposition: BlockDisposition::Above,
+ placement: BlockPlacement::Above(
+ buffer
+ .anchor_in_excerpt(excerpt_id, message.anchor_range.start)
+ .unwrap(),
+ ),
priority: usize::MAX,
render: render_block(MessageMetadata::from(message)),
};
@@ -3372,7 +3371,7 @@ impl ContextEditor {
let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap();
let image = render_image.clone();
anchor.is_valid(&buffer).then(|| BlockProperties {
- position: anchor,
+ placement: BlockPlacement::Above(anchor),
height: MAX_HEIGHT_IN_LINES,
style: BlockStyle::Sticky,
render: Box::new(move |cx| {
@@ -3393,8 +3392,6 @@ impl ContextEditor {
)
.into_any_element()
}),
-
- disposition: BlockDisposition::Above,
priority: 0,
})
})
@@ -3949,7 +3946,7 @@ impl Render for ContextEditor {
.bg(cx.theme().colors().editor_background)
.child(
h_flex()
- .gap_2()
+ .gap_1()
.child(render_inject_context_menu(cx.view().downgrade(), cx))
.child(
IconButton::new("quote-button", IconName::Quote)
@@ -4249,11 +4246,11 @@ fn render_inject_context_menu(
slash_command_picker::SlashCommandSelector::new(
commands.clone(),
active_context_editor,
- IconButton::new("trigger", IconName::SlashSquare)
+ Button::new("trigger", "Add Context")
+ .icon(IconName::Plus)
.icon_size(IconSize::Small)
- .tooltip(|cx| {
- Tooltip::with_meta("Insert Context", None, "Type / to insert via keyboard", cx)
- }),
+ .icon_position(IconPosition::Start)
+ .tooltip(|cx| Tooltip::text("Type / to insert via keyboard", cx)),
)
}
@@ -7,7 +7,7 @@ use crate::{
};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
- SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
+ SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
};
use assistant_tool::ToolRegistry;
use client::{self, proto, telemetry::Telemetry};
@@ -1677,7 +1677,7 @@ impl Context {
pub fn insert_command_output(
&mut self,
command_range: Range<language::Anchor>,
- output: Task<Result<SlashCommandOutput>>,
+ output: Task<SlashCommandResult>,
ensure_trailing_newline: bool,
expand_result: bool,
cx: &mut ModelContext<Self>,
@@ -1688,19 +1688,13 @@ impl Context {
let command_range = command_range.clone();
async move {
let output = output.await;
+ let output = match output {
+ Ok(output) => SlashCommandOutput::from_event_stream(output).await,
+ Err(err) => Err(err),
+ };
this.update(&mut cx, |this, cx| match output {
Ok(mut output) => {
- // Ensure section ranges are valid.
- for section in &mut output.sections {
- section.range.start = section.range.start.min(output.text.len());
- section.range.end = section.range.end.min(output.text.len());
- while !output.text.is_char_boundary(section.range.start) {
- section.range.start -= 1;
- }
- while !output.text.is_char_boundary(section.range.end) {
- section.range.end += 1;
- }
- }
+ output.ensure_valid_section_ranges();
// Ensure there is a newline after the last section.
if ensure_trailing_newline {
@@ -6,7 +6,7 @@ use crate::{
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
- SlashCommandRegistry,
+ SlashCommandRegistry, SlashCommandResult,
};
use collections::HashSet;
use fs::FakeFs;
@@ -636,7 +636,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn one".into(),
new_text: "fn two() {}".into(),
- description: "add a `two` function".into(),
+ description: Some("add a `two` function".into()),
},
}]],
cx,
@@ -690,7 +690,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
- description: "add a `two` function".into(),
+ description: Some("add a `two` function".into()),
},
}]],
cx,
@@ -754,7 +754,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
- description: "add a `two` function".into(),
+ description: Some("add a `two` function".into()),
},
}]],
cx,
@@ -798,7 +798,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
- description: "add a `two` function".into(),
+ description: Some("add a `two` function".into()),
},
}]],
cx,
@@ -1097,7 +1097,8 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
text: output_text,
sections,
run_commands_in_text: false,
- })),
+ }
+ .to_event_stream())),
true,
false,
cx,
@@ -1416,11 +1417,12 @@ impl SlashCommand for FakeSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
Task::ready(Ok(SlashCommandOutput {
text: format!("Executed fake command: {}", self.0),
sections: vec![],
run_commands_in_text: false,
- }))
+ }
+ .to_event_stream()))
}
}
@@ -9,7 +9,7 @@ use collections::{hash_map, HashMap, HashSet, VecDeque};
use editor::{
actions::{MoveDown, MoveUp, SelectAll},
display_map::{
- BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
+ BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
ToDisplayPoint,
},
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode,
@@ -54,7 +54,7 @@ use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
use terminal_view::terminal_panel::TerminalPanel;
use text::{OffsetRangeExt, ToPoint as _};
use theme::ThemeSettings;
-use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
+use ui::{prelude::*, text_for_action, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
use util::{RangeExt, ResultExt};
use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace};
@@ -446,15 +446,14 @@ impl InlineAssistant {
let assist_blocks = vec![
BlockProperties {
style: BlockStyle::Sticky,
- position: range.start,
+ placement: BlockPlacement::Above(range.start),
height: prompt_editor_height,
render: build_assist_editor_renderer(prompt_editor),
- disposition: BlockDisposition::Above,
priority: 0,
},
BlockProperties {
style: BlockStyle::Sticky,
- position: range.end,
+ placement: BlockPlacement::Below(range.end),
height: 0,
render: Box::new(|cx| {
v_flex()
@@ -464,7 +463,6 @@ impl InlineAssistant {
.border_color(cx.theme().status().info_border)
.into_any_element()
}),
- disposition: BlockDisposition::Below,
priority: 0,
},
];
@@ -1179,7 +1177,7 @@ impl InlineAssistant {
let height =
deleted_lines_editor.update(cx, |editor, cx| editor.max_point(cx).row().0 + 1);
new_blocks.push(BlockProperties {
- position: new_row,
+ placement: BlockPlacement::Above(new_row),
height,
style: BlockStyle::Flex,
render: Box::new(move |cx| {
@@ -1191,7 +1189,6 @@ impl InlineAssistant {
.child(deleted_lines_editor.clone())
.into_any_element()
}),
- disposition: BlockDisposition::Above,
priority: 0,
});
}
@@ -1599,7 +1596,7 @@ impl PromptEditor {
// always show the cursor (even when it isn't focused) because
// typing in one will make what you typed appear in all of them.
editor.set_show_cursor_when_unfocused(true, cx);
- editor.set_placeholder_text("Add a prompt…", cx);
+ editor.set_placeholder_text(Self::placeholder_text(codegen.read(cx), cx), cx);
editor
});
@@ -1656,6 +1653,7 @@ impl PromptEditor {
self.editor = cx.new_view(|cx| {
let mut editor = Editor::auto_height(Self::MAX_LINES as usize, cx);
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
+ editor.set_placeholder_text(Self::placeholder_text(self.codegen.read(cx), cx), cx);
editor.set_placeholder_text("Add a prompt…", cx);
editor.set_text(prompt, cx);
if focus {
@@ -1666,6 +1664,20 @@ impl PromptEditor {
self.subscribe_to_editor(cx);
}
+ fn placeholder_text(codegen: &Codegen, cx: &WindowContext) -> String {
+ let context_keybinding = text_for_action(&crate::ToggleFocus, cx)
+ .map(|keybinding| format!(" • {keybinding} for context"))
+ .unwrap_or_default();
+
+ let action = if codegen.is_insertion {
+ "Generate"
+ } else {
+ "Transform"
+ };
+
+ format!("{action}…{context_keybinding} • ↓↑ for history")
+ }
+
fn prompt(&self, cx: &AppContext) -> String {
self.editor.read(cx).text(cx)
}
@@ -2263,6 +2275,7 @@ pub struct Codegen {
initial_transaction_id: Option<TransactionId>,
telemetry: Option<Arc<Telemetry>>,
builder: Arc<PromptBuilder>,
+ is_insertion: bool,
}
impl Codegen {
@@ -2285,6 +2298,7 @@ impl Codegen {
)
});
let mut this = Self {
+ is_insertion: range.to_offset(&buffer.read(cx).snapshot(cx)).is_empty(),
alternatives: vec![codegen],
active_alternative: 0,
seen_alternatives: HashSet::default(),
@@ -2686,7 +2700,7 @@ impl CodegenAlternative {
let prompt = self
.builder
- .generate_content_prompt(user_prompt, language_name, buffer, range)
+ .generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
let mut messages = Vec::new();
@@ -158,39 +158,34 @@ impl PickerDelegate for ModelPickerDelegate {
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.start_slot(
- div().pr_1().child(
+ div().pr_0p5().child(
Icon::new(model_info.icon)
.color(Color::Muted)
.size(IconSize::Medium),
),
)
.child(
- h_flex()
- .w_full()
- .justify_between()
- .font_buffer(cx)
- .min_w(px(240.))
- .child(
- h_flex()
- .gap_2()
- .child(Label::new(model_info.model.name().0.clone()))
- .child(
- Label::new(provider_name)
- .size(LabelSize::XSmall)
- .color(Color::Muted),
- )
- .children(match model_info.availability {
- LanguageModelAvailability::Public => None,
- LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
- LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
- show_badges.then(|| {
- Label::new("Pro")
- .size(LabelSize::XSmall)
- .color(Color::Muted)
- })
- }
- }),
- ),
+ h_flex().w_full().justify_between().min_w(px(200.)).child(
+ h_flex()
+ .gap_1p5()
+ .child(Label::new(model_info.model.name().0.clone()))
+ .child(
+ Label::new(provider_name)
+ .size(LabelSize::XSmall)
+ .color(Color::Muted),
+ )
+ .children(match model_info.availability {
+ LanguageModelAvailability::Public => None,
+ LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
+ LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
+ show_badges.then(|| {
+ Label::new("Pro")
+ .size(LabelSize::XSmall)
+ .color(Color::Muted)
+ })
+ }
+ }),
+ ),
)
.end_slot(div().when(model_info.is_selected, |this| {
this.child(
@@ -212,7 +207,7 @@ impl PickerDelegate for ModelPickerDelegate {
h_flex()
.w_full()
.border_t_1()
- .border_color(cx.theme().colors().border)
+ .border_color(cx.theme().colors().border_variant)
.p_1()
.gap_4()
.justify_between()
@@ -33,21 +33,21 @@ pub enum AssistantEditKind {
Update {
old_text: String,
new_text: String,
- description: String,
+ description: Option<String>,
},
Create {
new_text: String,
- description: String,
+ description: Option<String>,
},
InsertBefore {
old_text: String,
new_text: String,
- description: String,
+ description: Option<String>,
},
InsertAfter {
old_text: String,
new_text: String,
- description: String,
+ description: Option<String>,
},
Delete {
old_text: String,
@@ -86,19 +86,37 @@ enum SearchDirection {
Diagonal,
}
-// A measure of the currently quality of an in-progress fuzzy search.
-//
-// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding
-// operation in the search.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
struct SearchState {
- score: u32,
+ cost: u32,
direction: SearchDirection,
}
impl SearchState {
- fn new(score: u32, direction: SearchDirection) -> Self {
- Self { score, direction }
+ fn new(cost: u32, direction: SearchDirection) -> Self {
+ Self { cost, direction }
+ }
+}
+
+struct SearchMatrix {
+ cols: usize,
+ data: Vec<SearchState>,
+}
+
+impl SearchMatrix {
+ fn new(rows: usize, cols: usize) -> Self {
+ SearchMatrix {
+ cols,
+ data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
+ }
+ }
+
+ fn get(&self, row: usize, col: usize) -> SearchState {
+ self.data[row * self.cols + col]
+ }
+
+ fn set(&mut self, row: usize, col: usize, cost: SearchState) {
+ self.data[row * self.cols + col] = cost;
}
}
@@ -187,23 +205,23 @@ impl AssistantEdit {
"update" => AssistantEditKind::Update {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
- description: description.ok_or_else(|| anyhow!("missing description"))?,
+ description,
},
"insert_before" => AssistantEditKind::InsertBefore {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
- description: description.ok_or_else(|| anyhow!("missing description"))?,
+ description,
},
"insert_after" => AssistantEditKind::InsertAfter {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
- description: description.ok_or_else(|| anyhow!("missing description"))?,
+ description,
},
"delete" => AssistantEditKind::Delete {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
},
"create" => AssistantEditKind::Create {
- description: description.ok_or_else(|| anyhow!("missing description"))?,
+ description,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
},
_ => Err(anyhow!("unknown operation {operation:?}"))?,
@@ -264,7 +282,7 @@ impl AssistantEditKind {
ResolvedEdit {
range,
new_text,
- description: Some(description),
+ description,
}
}
Self::Create {
@@ -272,7 +290,7 @@ impl AssistantEditKind {
description,
} => ResolvedEdit {
range: text::Anchor::MIN..text::Anchor::MAX,
- description: Some(description),
+ description,
new_text,
},
Self::InsertBefore {
@@ -285,7 +303,7 @@ impl AssistantEditKind {
ResolvedEdit {
range: range.start..range.start,
new_text,
- description: Some(description),
+ description,
}
}
Self::InsertAfter {
@@ -298,7 +316,7 @@ impl AssistantEditKind {
ResolvedEdit {
range: range.end..range.end,
new_text,
- description: Some(description),
+ description,
}
}
Self::Delete { old_text } => {
@@ -314,44 +332,29 @@ impl AssistantEditKind {
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
const INSERTION_COST: u32 = 3;
+ const DELETION_COST: u32 = 10;
const WHITESPACE_INSERTION_COST: u32 = 1;
- const DELETION_COST: u32 = 3;
const WHITESPACE_DELETION_COST: u32 = 1;
- const EQUALITY_BONUS: u32 = 5;
-
- struct Matrix {
- cols: usize,
- data: Vec<SearchState>,
- }
-
- impl Matrix {
- fn new(rows: usize, cols: usize) -> Self {
- Matrix {
- cols,
- data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
- }
- }
-
- fn get(&self, row: usize, col: usize) -> SearchState {
- self.data[row * self.cols + col]
- }
-
- fn set(&mut self, row: usize, col: usize, cost: SearchState) {
- self.data[row * self.cols + col] = cost;
- }
- }
let buffer_len = buffer.len();
let query_len = search_query.len();
- let mut matrix = Matrix::new(query_len + 1, buffer_len + 1);
-
+ let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
+ let mut leading_deletion_cost = 0_u32;
for (row, query_byte) in search_query.bytes().enumerate() {
+ let deletion_cost = if query_byte.is_ascii_whitespace() {
+ WHITESPACE_DELETION_COST
+ } else {
+ DELETION_COST
+ };
+
+ leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
+ matrix.set(
+ row + 1,
+ 0,
+ SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
+ );
+
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
- let deletion_cost = if query_byte.is_ascii_whitespace() {
- WHITESPACE_DELETION_COST
- } else {
- DELETION_COST
- };
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
WHITESPACE_INSERTION_COST
} else {
@@ -359,38 +362,35 @@ impl AssistantEditKind {
};
let up = SearchState::new(
- matrix.get(row, col + 1).score.saturating_sub(deletion_cost),
+ matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
SearchDirection::Up,
);
let left = SearchState::new(
- matrix
- .get(row + 1, col)
- .score
- .saturating_sub(insertion_cost),
+ matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
SearchDirection::Left,
);
let diagonal = SearchState::new(
if query_byte == *buffer_byte {
- matrix.get(row, col).score.saturating_add(EQUALITY_BONUS)
+ matrix.get(row, col).cost
} else {
matrix
.get(row, col)
- .score
- .saturating_sub(deletion_cost + insertion_cost)
+ .cost
+ .saturating_add(deletion_cost + insertion_cost)
},
SearchDirection::Diagonal,
);
- matrix.set(row + 1, col + 1, up.max(left).max(diagonal));
+ matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
}
}
// Traceback to find the best match
let mut best_buffer_end = buffer_len;
- let mut best_score = 0;
+ let mut best_cost = u32::MAX;
for col in 1..=buffer_len {
- let score = matrix.get(query_len, col).score;
- if score > best_score {
- best_score = score;
+ let cost = matrix.get(query_len, col).cost;
+ if cost < best_cost {
+ best_cost = cost;
best_buffer_end = col;
}
}
@@ -560,89 +560,84 @@ mod tests {
language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher,
};
use settings::SettingsStore;
- use text::{OffsetRangeExt, Point};
use ui::BorrowAppContext;
use unindent::Unindent as _;
+ use util::test::{generate_marked_text, marked_text_ranges};
#[gpui::test]
fn test_resolve_location(cx: &mut AppContext) {
- {
- let buffer = cx.new_model(|cx| {
- Buffer::local(
- concat!(
- " Lorem\n",
- " ipsum\n",
- " dolor sit amet\n",
- " consecteur",
- ),
- cx,
- )
- });
- let snapshot = buffer.read(cx).snapshot();
- assert_eq!(
- AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot),
- Point::new(1, 0)..Point::new(2, 18)
- );
- }
+ assert_location_resolution(
+ concat!(
+ " Lorem\n",
+ "« ipsum\n",
+ " dolor sit amet»\n",
+ " consecteur",
+ ),
+ "ipsum\ndolor",
+ cx,
+ );
- {
- let buffer = cx.new_model(|cx| {
- Buffer::local(
- concat!(
- "fn foo1(a: usize) -> usize {\n",
- " 40\n",
- "}\n",
- "\n",
- "fn foo2(b: usize) -> usize {\n",
- " 42\n",
- "}\n",
- ),
- cx,
- )
- });
- let snapshot = buffer.read(cx).snapshot();
- assert_eq!(
- AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}")
- .to_point(&snapshot),
- Point::new(0, 0)..Point::new(2, 1)
- );
- }
+ assert_location_resolution(
+ &"
+ «fn foo1(a: usize) -> usize {
+ 40
+ }»
- {
- let buffer = cx.new_model(|cx| {
- Buffer::local(
- concat!(
- "fn main() {\n",
- " Foo\n",
- " .bar()\n",
- " .baz()\n",
- " .qux()\n",
- "}\n",
- "\n",
- "fn foo2(b: usize) -> usize {\n",
- " 42\n",
- "}\n",
- ),
- cx,
- )
- });
- let snapshot = buffer.read(cx).snapshot();
- assert_eq!(
- AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()")
- .to_point(&snapshot),
- Point::new(1, 0)..Point::new(4, 14)
- );
- }
+ fn foo2(b: usize) -> usize {
+ 42
+ }
+ "
+ .unindent(),
+ "fn foo1(b: usize) {\n40\n}",
+ cx,
+ );
+
+ assert_location_resolution(
+ &"
+ fn main() {
+ « Foo
+ .bar()
+ .baz()
+ .qux()»
+ }
+
+ fn foo2(b: usize) -> usize {
+ 42
+ }
+ "
+ .unindent(),
+ "Foo.bar.baz.qux()",
+ cx,
+ );
+
+ assert_location_resolution(
+ &"
+ class Something {
+ one() { return 1; }
+ « two() { return 2222; }
+ three() { return 333; }
+ four() { return 4444; }
+ five() { return 5555; }
+ six() { return 6666; }
+ » seven() { return 7; }
+ eight() { return 8; }
+ }
+ "
+ .unindent(),
+ &"
+ two() { return 2222; }
+ four() { return 4444; }
+ five() { return 5555; }
+ six() { return 6666; }
+ "
+ .unindent(),
+ cx,
+ );
}
#[gpui::test]
fn test_resolve_edits(cx: &mut AppContext) {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
- language::init(cx);
- cx.update_global::<SettingsStore, _>(|settings, cx| {
- settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
- });
+ init_test(cx);
assert_edits(
"
@@ -675,7 +670,7 @@ mod tests {
last_name: String,
"
.unindent(),
- description: "".into(),
+ description: None,
},
AssistantEditKind::Update {
old_text: "
@@ -690,7 +685,7 @@ mod tests {
}
"
.unindent(),
- description: "".into(),
+ description: None,
},
],
"
@@ -734,7 +729,7 @@ mod tests {
qux();
}"
.unindent(),
- description: "implement bar".into(),
+ description: Some("implement bar".into()),
},
AssistantEditKind::Update {
old_text: "
@@ -747,7 +742,7 @@ mod tests {
bar();
}"
.unindent(),
- description: "call bar in foo".into(),
+ description: Some("call bar in foo".into()),
},
AssistantEditKind::InsertAfter {
old_text: "
@@ -762,7 +757,7 @@ mod tests {
}
"
.unindent(),
- description: "implement qux".into(),
+ description: Some("implement qux".into()),
},
],
"
@@ -814,7 +809,7 @@ mod tests {
}
"
.unindent(),
- description: "pick better number".into(),
+ description: None,
},
AssistantEditKind::Update {
old_text: "
@@ -829,7 +824,7 @@ mod tests {
}
"
.unindent(),
- description: "pick better number".into(),
+ description: None,
},
AssistantEditKind::Update {
old_text: "
@@ -844,7 +839,7 @@ mod tests {
}
"
.unindent(),
- description: "pick better number".into(),
+ description: None,
},
],
"
@@ -865,6 +860,69 @@ mod tests {
.unindent(),
cx,
);
+
+ assert_edits(
+ "
+ impl Person {
+ fn set_name(&mut self, name: String) {
+ self.name = name;
+ }
+
+ fn name(&self) -> String {
+ return self.name;
+ }
+ }
+ "
+ .unindent(),
+ vec![
+ AssistantEditKind::Update {
+ old_text: "self.name = name;".unindent(),
+ new_text: "self._name = name;".unindent(),
+ description: None,
+ },
+ AssistantEditKind::Update {
+ old_text: "return self.name;\n".unindent(),
+ new_text: "return self._name;\n".unindent(),
+ description: None,
+ },
+ ],
+ "
+ impl Person {
+ fn set_name(&mut self, name: String) {
+ self._name = name;
+ }
+
+ fn name(&self) -> String {
+ return self._name;
+ }
+ }
+ "
+ .unindent(),
+ cx,
+ );
+ }
+
+ fn init_test(cx: &mut AppContext) {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ language::init(cx);
+ cx.update_global::<SettingsStore, _>(|settings, cx| {
+ settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
+ });
+ }
+
+ #[track_caller]
+ fn assert_location_resolution(
+ text_with_expected_range: &str,
+ query: &str,
+ cx: &mut AppContext,
+ ) {
+ let (text, _) = marked_text_ranges(text_with_expected_range, false);
+ let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx));
+ let snapshot = buffer.read(cx).snapshot();
+ let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
+ let text_with_actual_range = generate_marked_text(&text, &[range], false);
+ pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
}
#[track_caller]
@@ -204,7 +204,7 @@ impl PromptBuilder {
Ok(())
}
- pub fn generate_content_prompt(
+ pub fn generate_inline_transformation_prompt(
&self,
user_prompt: String,
language_name: Option<&LanguageName>,
@@ -1,7 +1,8 @@
-use super::create_label_for_command;
-use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use feature_flags::FeatureFlag;
use futures::StreamExt;
use gpui::{AppContext, AsyncAppContext, Task, WeakView};
@@ -17,6 +18,8 @@ use ui::{BorrowAppContext, WindowContext};
use util::ResultExt;
use workspace::Workspace;
+use crate::slash_command::create_label_for_command;
+
pub struct AutoSlashCommandFeatureFlag;
impl FeatureFlag for AutoSlashCommandFeatureFlag {
@@ -92,7 +95,7 @@ impl SlashCommand for AutoCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -144,7 +147,8 @@ impl SlashCommand for AutoCommand {
text: prompt,
sections: Vec::new(),
run_commands_in_text: true,
- })
+ }
+ .to_event_stream())
})
}
}
@@ -1,6 +1,8 @@
-use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use fs::Fs;
use gpui::{AppContext, Model, Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
@@ -123,7 +125,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let output = workspace.update(cx, |workspace, cx| {
let project = workspace.project().clone();
let fs = workspace.project().read(cx).fs().clone();
@@ -145,7 +147,8 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
- })
+ }
+ .to_event_stream())
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))
@@ -1,8 +1,7 @@
-use super::create_label_for_command;
use anyhow::{anyhow, Result};
use assistant_slash_command::{
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
- SlashCommandOutputSection,
+ SlashCommandOutputSection, SlashCommandResult,
};
use collections::HashMap;
use context_servers::{
@@ -17,6 +16,8 @@ use text::LineEnding;
use ui::{IconName, SharedString};
use workspace::Workspace;
+use crate::slash_command::create_label_for_command;
+
pub struct ContextServerSlashCommand {
server_id: String,
prompt: Prompt,
@@ -128,7 +129,7 @@ impl SlashCommand for ContextServerSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let server_id = self.server_id.clone();
let prompt_name = self.prompt.name.clone();
@@ -184,7 +185,8 @@ impl SlashCommand for ContextServerSlashCommand {
}],
text: prompt,
run_commands_in_text: false,
- })
+ }
+ .to_event_stream())
})
} else {
Task::ready(Err(anyhow!("Context server not found")))
@@ -1,7 +1,9 @@
-use super::{SlashCommand, SlashCommandOutput};
use crate::prompt_library::PromptStore;
use anyhow::{anyhow, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use std::{
@@ -48,7 +50,7 @@ impl SlashCommand for DefaultSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let store = PromptStore::global(cx);
cx.background_executor().spawn(async move {
let store = store.await?;
@@ -76,7 +78,8 @@ impl SlashCommand for DefaultSlashCommand {
}],
text,
run_commands_in_text: true,
- })
+ }
+ .to_event_stream())
})
}
}
@@ -2,6 +2,7 @@ use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
};
use collections::HashSet;
use futures::future;
@@ -48,7 +49,7 @@ impl SlashCommand for DeltaSlashCommand {
workspace: WeakView<Workspace>,
delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let mut paths = HashSet::default();
let mut file_command_old_outputs = Vec::new();
let mut file_command_new_outputs = Vec::new();
@@ -85,25 +86,28 @@ impl SlashCommand for DeltaSlashCommand {
.zip(file_command_new_outputs)
{
if let Ok(new_output) = new_output {
- if let Some(file_command_range) = new_output.sections.first() {
- let new_text = &new_output.text[file_command_range.range.clone()];
- if old_text.chars().ne(new_text.chars()) {
- output.sections.extend(new_output.sections.into_iter().map(
- |section| SlashCommandOutputSection {
- range: output.text.len() + section.range.start
- ..output.text.len() + section.range.end,
- icon: section.icon,
- label: section.label,
- metadata: section.metadata,
- },
- ));
- output.text.push_str(&new_output.text);
+ if let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await
+ {
+ if let Some(file_command_range) = new_output.sections.first() {
+ let new_text = &new_output.text[file_command_range.range.clone()];
+ if old_text.chars().ne(new_text.chars()) {
+ output.sections.extend(new_output.sections.into_iter().map(
+ |section| SlashCommandOutputSection {
+ range: output.text.len() + section.range.start
+ ..output.text.len() + section.range.end,
+ icon: section.icon,
+ label: section.label,
+ metadata: section.metadata,
+ },
+ ));
+ output.text.push_str(&new_output.text);
+ }
}
}
}
}
- Ok(output)
+ Ok(output.to_event_stream())
})
}
}
@@ -1,6 +1,8 @@
-use super::{create_label_for_command, SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use fuzzy::{PathMatch, StringMatchCandidate};
use gpui::{AppContext, Model, Task, View, WeakView};
use language::{
@@ -19,6 +21,8 @@ use util::paths::PathMatcher;
use util::ResultExt;
use workspace::Workspace;
+use crate::slash_command::create_label_for_command;
+
pub(crate) struct DiagnosticsSlashCommand;
impl DiagnosticsSlashCommand {
@@ -167,7 +171,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace was dropped")));
};
@@ -176,7 +180,11 @@ impl SlashCommand for DiagnosticsSlashCommand {
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
- cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) })
+ cx.spawn(move |_| async move {
+ task.await?
+ .map(|output| output.to_event_stream())
+ .ok_or_else(|| anyhow!("No diagnostics found"))
+ })
}
}
@@ -6,6 +6,7 @@ use std::time::Duration;
use anyhow::{anyhow, bail, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
};
use gpui::{AppContext, BackgroundExecutor, Model, Task, WeakView};
use indexed_docs::{
@@ -274,7 +275,7 @@ impl SlashCommand for DocsSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
if arguments.is_empty() {
return Task::ready(Err(anyhow!("missing an argument")));
};
@@ -355,7 +356,8 @@ impl SlashCommand for DocsSlashCommand {
})
.collect(),
run_commands_in_text: false,
- })
+ }
+ .to_event_stream())
})
}
}
@@ -6,6 +6,7 @@ use std::sync::Arc;
use anyhow::{anyhow, bail, Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
};
use futures::AsyncReadExt;
use gpui::{Task, WeakView};
@@ -133,7 +134,7 @@ impl SlashCommand for FetchSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let Some(argument) = arguments.first() else {
return Task::ready(Err(anyhow!("missing URL")));
};
@@ -166,7 +167,8 @@ impl SlashCommand for FetchSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
- })
+ }
+ .to_event_stream())
})
}
}
@@ -1,11 +1,16 @@
-use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context as _, Result};
-use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandContent, SlashCommandEvent,
+ SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult,
+};
+use futures::channel::mpsc;
+use futures::Stream;
use fuzzy::PathMatch;
use gpui::{AppContext, Model, Task, View, WeakView};
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
use project::{PathMatchCandidateSet, Project};
use serde::{Deserialize, Serialize};
+use smol::stream::StreamExt;
use std::{
fmt::Write,
ops::{Range, RangeInclusive},
@@ -16,6 +21,8 @@ use ui::prelude::*;
use util::ResultExt;
use workspace::Workspace;
+use crate::slash_command::diagnostics_command::collect_buffer_diagnostics;
+
pub(crate) struct FileSlashCommand;
impl FileSlashCommand {
@@ -181,7 +188,7 @@ impl SlashCommand for FileSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace was dropped")));
};
@@ -190,7 +197,12 @@ impl SlashCommand for FileSlashCommand {
return Task::ready(Err(anyhow!("missing path")));
};
- collect_files(workspace.read(cx).project().clone(), arguments, cx)
+ Task::ready(Ok(collect_files(
+ workspace.read(cx).project().clone(),
+ arguments,
+ cx,
+ )
+ .boxed()))
}
}
@@ -198,7 +210,7 @@ fn collect_files(
project: Model<Project>,
glob_inputs: &[String],
cx: &mut AppContext,
-) -> Task<Result<SlashCommandOutput>> {
+) -> impl Stream<Item = Result<SlashCommandEvent>> {
let Ok(matchers) = glob_inputs
.into_iter()
.map(|glob_input| {
@@ -207,7 +219,7 @@ fn collect_files(
})
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
else {
- return Task::ready(Err(anyhow!("invalid path")));
+ return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed();
};
let project_handle = project.downgrade();
@@ -217,11 +229,11 @@ fn collect_files(
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>();
+ let (events_tx, events_rx) = mpsc::unbounded();
cx.spawn(|mut cx| async move {
- let mut output = SlashCommandOutput::default();
for snapshot in snapshots {
let worktree_id = snapshot.id();
- let mut directory_stack: Vec<(Arc<Path>, String, usize)> = Vec::new();
+ let mut directory_stack: Vec<Arc<Path>> = Vec::new();
let mut folded_directory_names_stack = Vec::new();
let mut is_top_level_directory = true;
@@ -237,17 +249,19 @@ fn collect_files(
continue;
}
- while let Some((dir, _, _)) = directory_stack.last() {
+ while let Some(dir) = directory_stack.last() {
if entry.path.starts_with(dir) {
break;
}
- let (_, entry_name, start) = directory_stack.pop().unwrap();
- output.sections.push(build_entry_output_section(
- start..output.text.len().saturating_sub(1),
- Some(&PathBuf::from(entry_name)),
- true,
- None,
- ));
+ directory_stack.pop().unwrap();
+ events_tx
+ .unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
+ events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
+ SlashCommandContent::Text {
+ text: "\n".into(),
+ run_commands_in_text: false,
+ },
+ )))?;
}
let filename = entry
@@ -279,23 +293,46 @@ fn collect_files(
continue;
}
let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/");
- let entry_start = output.text.len();
if prefix_paths.is_empty() {
- if is_top_level_directory {
- output
- .text
- .push_str(&path_including_worktree_name.to_string_lossy());
+ let label = if is_top_level_directory {
is_top_level_directory = false;
+ path_including_worktree_name.to_string_lossy().to_string()
} else {
- output.text.push_str(&filename);
- }
- directory_stack.push((entry.path.clone(), filename, entry_start));
+ filename
+ };
+ events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
+ icon: IconName::Folder,
+ label: label.clone().into(),
+ metadata: None,
+ }))?;
+ events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
+ SlashCommandContent::Text {
+ text: label,
+ run_commands_in_text: false,
+ },
+ )))?;
+ directory_stack.push(entry.path.clone());
} else {
let entry_name = format!("{}/{}", prefix_paths, &filename);
- output.text.push_str(&entry_name);
- directory_stack.push((entry.path.clone(), entry_name, entry_start));
+ events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
+ icon: IconName::Folder,
+ label: entry_name.clone().into(),
+ metadata: None,
+ }))?;
+ events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
+ SlashCommandContent::Text {
+ text: entry_name,
+ run_commands_in_text: false,
+ },
+ )))?;
+ directory_stack.push(entry.path.clone());
}
- output.text.push('\n');
+ events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
+ SlashCommandContent::Text {
+ text: "\n".into(),
+ run_commands_in_text: false,
+ },
+ )))?;
} else if entry.is_file() {
let Some(open_buffer_task) = project_handle
.update(&mut cx, |project, cx| {
@@ -306,6 +343,7 @@ fn collect_files(
continue;
};
if let Some(buffer) = open_buffer_task.await.log_err() {
+ let mut output = SlashCommandOutput::default();
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output(
&snapshot,
@@ -313,33 +351,24 @@ fn collect_files(
&mut output,
)
.log_err();
+ let mut buffer_events = output.to_event_stream();
+ while let Some(event) = buffer_events.next().await {
+ events_tx.unbounded_send(event)?;
+ }
}
}
}
- while let Some((dir, entry, start)) = directory_stack.pop() {
- if directory_stack.is_empty() {
- let mut root_path = PathBuf::new();
- root_path.push(snapshot.root_name());
- root_path.push(&dir);
- output.sections.push(build_entry_output_section(
- start..output.text.len(),
- Some(&root_path),
- true,
- None,
- ));
- } else {
- output.sections.push(build_entry_output_section(
- start..output.text.len(),
- Some(&PathBuf::from(entry.as_str())),
- true,
- None,
- ));
- }
+ while let Some(_) = directory_stack.pop() {
+ events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
}
}
- Ok(output)
+
+ anyhow::Ok(())
})
+ .detach_and_log_err(cx);
+
+ events_rx.boxed()
}
pub fn codeblock_fence_for_path(
@@ -524,11 +553,14 @@ pub fn append_buffer_to_output(
#[cfg(test)]
mod test {
+ use assistant_slash_command::SlashCommandOutput;
use fs::FakeFs;
use gpui::TestAppContext;
+ use pretty_assertions::assert_eq;
use project::Project;
use serde_json::json;
use settings::SettingsStore;
+ use smol::stream::StreamExt;
use crate::slash_command::file_command::collect_files;
@@ -569,8 +601,9 @@ mod test {
let project = Project::test(fs, ["/root".as_ref()], cx).await;
- let result_1 = cx
- .update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx))
+ let result_1 =
+ cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx));
+ let result_1 = SlashCommandOutput::from_event_stream(result_1.boxed())
.await
.unwrap();
@@ -578,17 +611,17 @@ mod test {
// 4 files + 2 directories
assert_eq!(result_1.sections.len(), 6);
- let result_2 = cx
- .update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx))
+ let result_2 =
+ cx.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx));
+ let result_2 = SlashCommandOutput::from_event_stream(result_2.boxed())
.await
.unwrap();
assert_eq!(result_1, result_2);
- let result = cx
- .update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx))
- .await
- .unwrap();
+ let result =
+ cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed());
+ let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
assert!(result.text.starts_with("root/dir"));
// 5 files + 2 directories
@@ -631,8 +664,9 @@ mod test {
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
- let result = cx
- .update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
+ let result =
+ cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
+ let result = SlashCommandOutput::from_event_stream(result.boxed())
.await
.unwrap();
@@ -692,8 +726,9 @@ mod test {
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
- let result = cx
- .update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
+ let result =
+ cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
+ let result = SlashCommandOutput::from_event_stream(result.boxed())
.await
.unwrap();
@@ -716,6 +751,8 @@ mod test {
assert_eq!(result.sections[6].label, "summercamp");
assert_eq!(result.sections[7].label, "zed/assets/themes");
+ assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n");
+
// Ensure that the project lasts until after the last await
drop(project);
}
@@ -4,6 +4,7 @@ use std::sync::Arc;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
};
use chrono::Local;
use gpui::{Task, WeakView};
@@ -48,7 +49,7 @@ impl SlashCommand for NowSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let now = Local::now();
let text = format!("Today is {now}.", now = now.to_rfc2822());
let range = 0..text.len();
@@ -62,6 +63,7 @@ impl SlashCommand for NowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
- }))
+ }
+ .to_event_stream()))
}
}
@@ -4,7 +4,7 @@ use super::{
};
use crate::PromptBuilder;
use anyhow::{anyhow, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection, SlashCommandResult};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView, WindowContext};
use language::{Anchor, CodeLabel, LspAdapterDelegate};
@@ -76,7 +76,7 @@ impl SlashCommand for ProjectSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let model_registry = LanguageModelRegistry::read_global(cx);
let current_model = model_registry.active_model();
let prompt_builder = self.prompt_builder.clone();
@@ -162,7 +162,8 @@ impl SlashCommand for ProjectSlashCommand {
text: output,
sections,
run_commands_in_text: true,
- })
+ }
+ .to_event_stream())
})
.await
})
@@ -1,7 +1,9 @@
-use super::{SlashCommand, SlashCommandOutput};
use crate::prompt_library::PromptStore;
use anyhow::{anyhow, Context, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use std::sync::{atomic::AtomicBool, Arc};
@@ -61,7 +63,7 @@ impl SlashCommand for PromptSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let title = arguments.to_owned().join(" ");
if title.trim().is_empty() {
return Task::ready(Err(anyhow!("missing prompt name")));
@@ -100,7 +102,8 @@ impl SlashCommand for PromptSlashCommand {
metadata: None,
}],
run_commands_in_text: true,
- })
+ }
+ .to_event_stream())
})
}
}
@@ -1,10 +1,8 @@
-use super::{
- create_label_for_command,
- file_command::{build_entry_output_section, codeblock_fence_for_path},
- SlashCommand, SlashCommandOutput,
-};
use anyhow::Result;
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView};
use language::{CodeLabel, LspAdapterDelegate};
@@ -16,6 +14,9 @@ use std::{
use ui::{prelude::*, IconName};
use workspace::Workspace;
+use crate::slash_command::create_label_for_command;
+use crate::slash_command::file_command::{build_entry_output_section, codeblock_fence_for_path};
+
pub(crate) struct SearchSlashCommandFeatureFlag;
impl FeatureFlag for SearchSlashCommandFeatureFlag {
@@ -63,7 +64,7 @@ impl SlashCommand for SearchSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -129,6 +130,7 @@ impl SlashCommand for SearchSlashCommand {
sections,
run_commands_in_text: false,
}
+ .to_event_stream()
})
.await;
@@ -1,6 +1,8 @@
-use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context as _, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use editor::Editor;
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
@@ -46,7 +48,7 @@ impl SlashCommand for OutlineSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let output = workspace.update(cx, |workspace, cx| {
let Some(active_item) = workspace.active_item(cx) else {
return Task::ready(Err(anyhow!("no active tab")));
@@ -83,7 +85,8 @@ impl SlashCommand for OutlineSlashCommand {
}],
text: outline_text,
run_commands_in_text: false,
- })
+ }
+ .to_event_stream())
})
});
@@ -1,6 +1,8 @@
-use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput};
use anyhow::{Context, Result};
-use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
+use assistant_slash_command::{
+ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
+};
use collections::{HashMap, HashSet};
use editor::Editor;
use futures::future::join_all;
@@ -14,6 +16,8 @@ use ui::{ActiveTheme, WindowContext};
use util::ResultExt;
use workspace::Workspace;
+use crate::slash_command::file_command::append_buffer_to_output;
+
pub(crate) struct TabSlashCommand;
const ALL_TABS_COMPLETION_ITEM: &str = "all";
@@ -132,7 +136,7 @@ impl SlashCommand for TabSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let tab_items_search = tab_items_for_queries(
Some(workspace),
arguments,
@@ -146,7 +150,7 @@ impl SlashCommand for TabSlashCommand {
for (full_path, buffer, _) in tab_items_search.await? {
append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err();
}
- Ok(output)
+ Ok(output.to_event_stream())
})
}
}
@@ -4,6 +4,7 @@ use std::sync::Arc;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
};
use gpui::{AppContext, Task, View, WeakView};
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate};
@@ -62,7 +63,7 @@ impl SlashCommand for TerminalSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -96,7 +97,8 @@ impl SlashCommand for TerminalSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
- }))
+ }
+ .to_event_stream()))
}
}
@@ -1,18 +1,18 @@
-use crate::prompts::PromptBuilder;
-use std::sync::Arc;
-
use std::sync::atomic::AtomicBool;
+use std::sync::Arc;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use ui::prelude::*;
-
use workspace::Workspace;
+use crate::prompts::PromptBuilder;
+
pub(crate) struct WorkflowSlashCommand {
prompt_builder: Arc<PromptBuilder>,
}
@@ -60,7 +60,7 @@ impl SlashCommand for WorkflowSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let prompt_builder = self.prompt_builder.clone();
cx.spawn(|_cx| async move {
let text = prompt_builder.generate_workflow_prompt()?;
@@ -75,7 +75,8 @@ impl SlashCommand for WorkflowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
- })
+ }
+ .to_event_stream())
})
}
}
@@ -178,7 +178,7 @@ impl PickerDelegate for SlashCommandDelegate {
SlashCommandEntry::Info(info) => Some(
ListItem::new(ix)
.inset(true)
- .spacing(ListItemSpacing::Sparse)
+ .spacing(ListItemSpacing::Dense)
.selected(selected)
.child(
h_flex()
@@ -224,7 +224,7 @@ impl PickerDelegate for SlashCommandDelegate {
SlashCommandEntry::Advert { renderer, .. } => Some(
ListItem::new(ix)
.inset(true)
- .spacing(ListItemSpacing::Sparse)
+ .spacing(ListItemSpacing::Dense)
.selected(selected)
.child(renderer(cx)),
),
@@ -1 +1,2 @@
+pub mod context_server_tool;
pub mod now_tool;
@@ -0,0 +1,82 @@
+use anyhow::{anyhow, bail};
+use assistant_tool::Tool;
+use context_servers::manager::ContextServerManager;
+use context_servers::types;
+use gpui::Task;
+
+pub struct ContextServerTool {
+ server_id: String,
+ tool: types::Tool,
+}
+
+impl ContextServerTool {
+ pub fn new(server_id: impl Into<String>, tool: types::Tool) -> Self {
+ Self {
+ server_id: server_id.into(),
+ tool,
+ }
+ }
+}
+
+impl Tool for ContextServerTool {
+ fn name(&self) -> String {
+ self.tool.name.clone()
+ }
+
+ fn description(&self) -> String {
+ self.tool.description.clone().unwrap_or_default()
+ }
+
+ fn input_schema(&self) -> serde_json::Value {
+ match &self.tool.input_schema {
+ serde_json::Value::Null => {
+ serde_json::json!({ "type": "object", "properties": [] })
+ }
+ serde_json::Value::Object(map) if map.is_empty() => {
+ serde_json::json!({ "type": "object", "properties": [] })
+ }
+ _ => self.tool.input_schema.clone(),
+ }
+ }
+
+ fn run(
+ self: std::sync::Arc<Self>,
+ input: serde_json::Value,
+ _workspace: gpui::WeakView<workspace::Workspace>,
+ cx: &mut ui::WindowContext,
+ ) -> gpui::Task<gpui::Result<String>> {
+ let manager = ContextServerManager::global(cx);
+ let manager = manager.read(cx);
+ if let Some(server) = manager.get_server(&self.server_id) {
+ cx.foreground_executor().spawn({
+ let tool_name = self.tool.name.clone();
+ async move {
+ let Some(protocol) = server.client.read().clone() else {
+ bail!("Context server not initialized");
+ };
+
+ let arguments = if let serde_json::Value::Object(map) = input {
+ Some(map.into_iter().collect())
+ } else {
+ None
+ };
+
+ log::trace!(
+ "Running tool: {} with arguments: {:?}",
+ tool_name,
+ arguments
+ );
+ let response = protocol.run_tool(tool_name, arguments).await?;
+
+ let tool_result = match response.tool_result {
+ serde_json::Value::String(s) => s,
+ _ => serde_json::to_string(&response.tool_result)?,
+ };
+ Ok(tool_result)
+ }
+ })
+ } else {
+ Task::ready(Err(anyhow!("Context server not found")))
+ }
+ }
+}
@@ -15,9 +15,15 @@ path = "src/assistant_slash_command.rs"
anyhow.workspace = true
collections.workspace = true
derive_more.workspace = true
+futures.workspace = true
gpui.workspace = true
language.workspace = true
parking_lot.workspace = true
serde.workspace = true
serde_json.workspace = true
workspace.workspace = true
+
+[dev-dependencies]
+gpui = { workspace = true, features = ["test-support"] }
+pretty_assertions.workspace = true
+workspace = { workspace = true, features = ["test-support"] }
@@ -1,6 +1,8 @@
mod slash_command_registry;
use anyhow::Result;
+use futures::stream::{self, BoxStream};
+use futures::StreamExt;
use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext};
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
use serde::{Deserialize, Serialize};
@@ -56,6 +58,8 @@ pub struct ArgumentCompletion {
pub replace_previous_arguments: bool,
}
+pub type SlashCommandResult = Result<BoxStream<'static, Result<SlashCommandEvent>>>;
+
pub trait SlashCommand: 'static + Send + Sync {
fn name(&self) -> String;
fn label(&self, _cx: &AppContext) -> CodeLabel {
@@ -87,7 +91,7 @@ pub trait SlashCommand: 'static + Send + Sync {
// perhaps another kind of delegate is needed here.
delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>>;
+ ) -> Task<SlashCommandResult>;
}
pub type RenderFoldPlaceholder = Arc<
@@ -96,13 +100,146 @@ pub type RenderFoldPlaceholder = Arc<
+ Fn(ElementId, Arc<dyn Fn(&mut WindowContext)>, &mut WindowContext) -> AnyElement,
>;
-#[derive(Debug, Default, PartialEq)]
+#[derive(Debug, PartialEq, Eq)]
+pub enum SlashCommandContent {
+ Text {
+ text: String,
+ run_commands_in_text: bool,
+ },
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum SlashCommandEvent {
+ StartSection {
+ icon: IconName,
+ label: SharedString,
+ metadata: Option<serde_json::Value>,
+ },
+ Content(SlashCommandContent),
+ EndSection {
+ metadata: Option<serde_json::Value>,
+ },
+}
+
+#[derive(Debug, Default, PartialEq, Clone)]
pub struct SlashCommandOutput {
pub text: String,
pub sections: Vec<SlashCommandOutputSection<usize>>,
pub run_commands_in_text: bool,
}
+impl SlashCommandOutput {
+ pub fn ensure_valid_section_ranges(&mut self) {
+ for section in &mut self.sections {
+ section.range.start = section.range.start.min(self.text.len());
+ section.range.end = section.range.end.min(self.text.len());
+ while !self.text.is_char_boundary(section.range.start) {
+ section.range.start -= 1;
+ }
+ while !self.text.is_char_boundary(section.range.end) {
+ section.range.end += 1;
+ }
+ }
+ }
+
+ /// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s.
+ pub fn to_event_stream(mut self) -> BoxStream<'static, Result<SlashCommandEvent>> {
+ self.ensure_valid_section_ranges();
+
+ let mut events = Vec::new();
+ let mut last_section_end = 0;
+
+ for section in self.sections {
+ if last_section_end < section.range.start {
+ events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: self
+ .text
+ .get(last_section_end..section.range.start)
+ .unwrap_or_default()
+ .to_string(),
+ run_commands_in_text: self.run_commands_in_text,
+ })));
+ }
+
+ events.push(Ok(SlashCommandEvent::StartSection {
+ icon: section.icon,
+ label: section.label,
+ metadata: section.metadata.clone(),
+ }));
+ events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: self
+ .text
+ .get(section.range.start..section.range.end)
+ .unwrap_or_default()
+ .to_string(),
+ run_commands_in_text: self.run_commands_in_text,
+ })));
+ events.push(Ok(SlashCommandEvent::EndSection {
+ metadata: section.metadata,
+ }));
+
+ last_section_end = section.range.end;
+ }
+
+ if last_section_end < self.text.len() {
+ events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: self.text[last_section_end..].to_string(),
+ run_commands_in_text: self.run_commands_in_text,
+ })));
+ }
+
+ stream::iter(events).boxed()
+ }
+
+ pub async fn from_event_stream(
+ mut events: BoxStream<'static, Result<SlashCommandEvent>>,
+ ) -> Result<SlashCommandOutput> {
+ let mut output = SlashCommandOutput::default();
+ let mut section_stack = Vec::new();
+
+ while let Some(event) = events.next().await {
+ match event? {
+ SlashCommandEvent::StartSection {
+ icon,
+ label,
+ metadata,
+ } => {
+ let start = output.text.len();
+ section_stack.push(SlashCommandOutputSection {
+ range: start..start,
+ icon,
+ label,
+ metadata,
+ });
+ }
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text,
+ run_commands_in_text,
+ }) => {
+ output.text.push_str(&text);
+ output.run_commands_in_text = run_commands_in_text;
+
+ if let Some(section) = section_stack.last_mut() {
+ section.range.end = output.text.len();
+ }
+ }
+ SlashCommandEvent::EndSection { metadata } => {
+ if let Some(mut section) = section_stack.pop() {
+ section.metadata = metadata;
+ output.sections.push(section);
+ }
+ }
+ }
+ }
+
+ while let Some(section) = section_stack.pop() {
+ output.sections.push(section);
+ }
+
+ Ok(output)
+ }
+}
+
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct SlashCommandOutputSection<T> {
pub range: Range<T>,
@@ -116,3 +253,243 @@ impl SlashCommandOutputSection<language::Anchor> {
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
}
}
+
+#[cfg(test)]
+mod tests {
+ use pretty_assertions::assert_eq;
+ use serde_json::json;
+
+ use super::*;
+
+ #[gpui::test]
+ async fn test_slash_command_output_to_events_round_trip() {
+ // Test basic output consisting of a single section.
+ {
+ let text = "Hello, world!".to_string();
+ let range = 0..text.len();
+ let output = SlashCommandOutput {
+ text,
+ sections: vec![SlashCommandOutputSection {
+ range,
+ icon: IconName::Code,
+ label: "Section 1".into(),
+ metadata: None,
+ }],
+ run_commands_in_text: false,
+ };
+
+ let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
+ let events = events
+ .into_iter()
+ .filter_map(|event| event.ok())
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ events,
+ vec![
+ SlashCommandEvent::StartSection {
+ icon: IconName::Code,
+ label: "Section 1".into(),
+ metadata: None
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Hello, world!".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::EndSection { metadata: None }
+ ]
+ );
+
+ let new_output =
+ SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
+ .await
+ .unwrap();
+
+ assert_eq!(new_output, output);
+ }
+
+ // Test output where the sections do not comprise all of the text.
+ {
+ let text = "Apple\nCucumber\nBanana\n".to_string();
+ let output = SlashCommandOutput {
+ text,
+ sections: vec![
+ SlashCommandOutputSection {
+ range: 0..6,
+ icon: IconName::Check,
+ label: "Fruit".into(),
+ metadata: None,
+ },
+ SlashCommandOutputSection {
+ range: 15..22,
+ icon: IconName::Check,
+ label: "Fruit".into(),
+ metadata: None,
+ },
+ ],
+ run_commands_in_text: false,
+ };
+
+ let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
+ let events = events
+ .into_iter()
+ .filter_map(|event| event.ok())
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ events,
+ vec![
+ SlashCommandEvent::StartSection {
+ icon: IconName::Check,
+ label: "Fruit".into(),
+ metadata: None
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Apple\n".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::EndSection { metadata: None },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Cucumber\n".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::StartSection {
+ icon: IconName::Check,
+ label: "Fruit".into(),
+ metadata: None
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Banana\n".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::EndSection { metadata: None }
+ ]
+ );
+
+ let new_output =
+ SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
+ .await
+ .unwrap();
+
+ assert_eq!(new_output, output);
+ }
+
+ // Test output consisting of multiple sections.
+ {
+ let text = "Line 1\nLine 2\nLine 3\nLine 4\n".to_string();
+ let output = SlashCommandOutput {
+ text,
+ sections: vec![
+ SlashCommandOutputSection {
+ range: 0..6,
+ icon: IconName::FileCode,
+ label: "Section 1".into(),
+ metadata: Some(json!({ "a": true })),
+ },
+ SlashCommandOutputSection {
+ range: 7..13,
+ icon: IconName::FileDoc,
+ label: "Section 2".into(),
+ metadata: Some(json!({ "b": true })),
+ },
+ SlashCommandOutputSection {
+ range: 14..20,
+ icon: IconName::FileGit,
+ label: "Section 3".into(),
+ metadata: Some(json!({ "c": true })),
+ },
+ SlashCommandOutputSection {
+ range: 21..27,
+ icon: IconName::FileToml,
+ label: "Section 4".into(),
+ metadata: Some(json!({ "d": true })),
+ },
+ ],
+ run_commands_in_text: false,
+ };
+
+ let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
+ let events = events
+ .into_iter()
+ .filter_map(|event| event.ok())
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ events,
+ vec![
+ SlashCommandEvent::StartSection {
+ icon: IconName::FileCode,
+ label: "Section 1".into(),
+ metadata: Some(json!({ "a": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Line 1".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::EndSection {
+ metadata: Some(json!({ "a": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "\n".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::StartSection {
+ icon: IconName::FileDoc,
+ label: "Section 2".into(),
+ metadata: Some(json!({ "b": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Line 2".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::EndSection {
+ metadata: Some(json!({ "b": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "\n".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::StartSection {
+ icon: IconName::FileGit,
+ label: "Section 3".into(),
+ metadata: Some(json!({ "c": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Line 3".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::EndSection {
+ metadata: Some(json!({ "c": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "\n".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::StartSection {
+ icon: IconName::FileToml,
+ label: "Section 4".into(),
+ metadata: Some(json!({ "d": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "Line 4".into(),
+ run_commands_in_text: false
+ }),
+ SlashCommandEvent::EndSection {
+ metadata: Some(json!({ "d": true }))
+ },
+ SlashCommandEvent::Content(SlashCommandContent::Text {
+ text: "\n".into(),
+ run_commands_in_text: false
+ }),
+ ]
+ );
+
+ let new_output =
+ SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
+ .await
+ .unwrap();
+
+ assert_eq!(new_output, output);
+ }
+ }
+}
@@ -432,10 +432,11 @@ impl AutoUpdater {
cx.notify();
}
- pub async fn get_latest_remote_server_release(
+ pub async fn download_remote_server_release(
os: &str,
arch: &str,
- mut release_channel: ReleaseChannel,
+ release_channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
cx: &mut AsyncAppContext,
) -> Result<PathBuf> {
let this = cx.update(|cx| {
@@ -445,15 +446,12 @@ impl AutoUpdater {
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
- if release_channel == ReleaseChannel::Dev {
- release_channel = ReleaseChannel::Nightly;
- }
-
- let release = Self::get_latest_release(
+ let release = Self::get_release(
&this,
"zed-remote-server",
os,
arch,
+ version,
Some(release_channel),
cx,
)
@@ -468,17 +466,21 @@ impl AutoUpdater {
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
if smol::fs::metadata(&version_path).await.is_err() {
- log::info!("downloading zed-remote-server {os} {arch}");
+ log::info!(
+ "downloading zed-remote-server {os} {arch} version {}",
+ release.version
+ );
download_remote_server_binary(&version_path, release, client, cx).await?;
}
Ok(version_path)
}
- pub async fn get_latest_remote_server_release_url(
+ pub async fn get_remote_server_release_url(
os: &str,
arch: &str,
- mut release_channel: ReleaseChannel,
+ release_channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
cx: &mut AsyncAppContext,
) -> Result<(String, String)> {
let this = cx.update(|cx| {
@@ -488,15 +490,12 @@ impl AutoUpdater {
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
- if release_channel == ReleaseChannel::Dev {
- release_channel = ReleaseChannel::Nightly;
- }
-
- let release = Self::get_latest_release(
+ let release = Self::get_release(
&this,
"zed-remote-server",
os,
arch,
+ version,
Some(release_channel),
cx,
)
@@ -508,46 +507,65 @@ impl AutoUpdater {
Ok((release.url, body))
}
- async fn get_latest_release(
+ async fn get_release(
this: &Model<Self>,
asset: &str,
os: &str,
arch: &str,
+ version: Option<SemanticVersion>,
release_channel: Option<ReleaseChannel>,
cx: &mut AsyncAppContext,
) -> Result<JsonRelease> {
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
- let mut url_string = client.build_url(&format!(
- "/api/releases/latest?asset={}&os={}&arch={}",
- asset, os, arch
- ));
- if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
- url_string += "&";
- url_string += param;
- }
- let mut response = client.get(&url_string, Default::default(), true).await?;
+ if let Some(version) = version {
+ let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
- let mut body = Vec::new();
- response
- .body_mut()
- .read_to_end(&mut body)
- .await
- .context("error reading release")?;
+ let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
+
+ Ok(JsonRelease {
+ version: version.to_string(),
+ url: client.build_url(&url),
+ })
+ } else {
+ let mut url_string = client.build_url(&format!(
+ "/api/releases/latest?asset={}&os={}&arch={}",
+ asset, os, arch
+ ));
+ if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
+ url_string += "&";
+ url_string += param;
+ }
+
+ let mut response = client.get(&url_string, Default::default(), true).await?;
+ let mut body = Vec::new();
+ response.body_mut().read_to_end(&mut body).await?;
+
+ if !response.status().is_success() {
+ return Err(anyhow!(
+ "failed to fetch release: {:?}",
+ String::from_utf8_lossy(&body),
+ ));
+ }
- if !response.status().is_success() {
- Err(anyhow!(
- "failed to fetch release: {:?}",
- String::from_utf8_lossy(&body),
- ))?;
+ serde_json::from_slice(body.as_slice()).with_context(|| {
+ format!(
+ "error deserializing release {:?}",
+ String::from_utf8_lossy(&body),
+ )
+ })
}
+ }
- serde_json::from_slice(body.as_slice()).with_context(|| {
- format!(
- "error deserializing release {:?}",
- String::from_utf8_lossy(&body),
- )
- })
+ async fn get_latest_release(
+ this: &Model<Self>,
+ asset: &str,
+ os: &str,
+ arch: &str,
+ release_channel: Option<ReleaseChannel>,
+ cx: &mut AsyncAppContext,
+ ) -> Result<JsonRelease> {
+ Self::get_release(this, asset, os, arch, None, release_channel, cx).await
}
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
@@ -1194,26 +1194,15 @@ impl Room {
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<u64>> {
- let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id()
- {
- self.client.request(proto::ShareProject {
- room_id: self.id(),
- worktrees: vec![],
- dev_server_project_id: Some(dev_server_project_id.0),
- is_ssh_project: false,
- })
- } else {
- if let Some(project_id) = project.read(cx).remote_id() {
- return Task::ready(Ok(project_id));
- }
+ if let Some(project_id) = project.read(cx).remote_id() {
+ return Task::ready(Ok(project_id));
+ }
- self.client.request(proto::ShareProject {
- room_id: self.id(),
- worktrees: project.read(cx).worktree_metadata_protos(cx),
- dev_server_project_id: None,
- is_ssh_project: project.read(cx).is_via_ssh(),
- })
- };
+ let request = self.client.request(proto::ShareProject {
+ room_id: self.id(),
+ worktrees: project.read(cx).worktree_metadata_protos(cx),
+ is_ssh_project: project.read(cx).is_via_ssh(),
+ });
cx.spawn(|this, mut cx| async move {
let response = request.await?;
@@ -3,7 +3,7 @@ mod channel_index;
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
use anyhow::{anyhow, Result};
use channel_index::ChannelIndex;
-use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
+use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore};
use collections::{hash_map, HashMap, HashSet};
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
use gpui::{
@@ -33,30 +33,11 @@ struct NotesVersion {
version: clock::Global,
}
-#[derive(Debug, Clone)]
-pub struct HostedProject {
- project_id: ProjectId,
- channel_id: ChannelId,
- name: SharedString,
- _visibility: proto::ChannelVisibility,
-}
-impl From<proto::HostedProject> for HostedProject {
- fn from(project: proto::HostedProject) -> Self {
- Self {
- project_id: ProjectId(project.project_id),
- channel_id: ChannelId(project.channel_id),
- _visibility: project.visibility(),
- name: project.name.into(),
- }
- }
-}
pub struct ChannelStore {
pub channel_index: ChannelIndex,
channel_invitations: Vec<Arc<Channel>>,
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
channel_states: HashMap<ChannelId, ChannelState>,
- hosted_projects: HashMap<ProjectId, HostedProject>,
-
outgoing_invites: HashSet<(ChannelId, UserId)>,
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
@@ -85,7 +66,6 @@ pub struct ChannelState {
observed_notes_version: NotesVersion,
observed_chat_message: Option<u64>,
role: Option<ChannelRole>,
- projects: HashSet<ProjectId>,
}
impl Channel {
@@ -216,7 +196,6 @@ impl ChannelStore {
channel_invitations: Vec::default(),
channel_index: ChannelIndex::default(),
channel_participants: Default::default(),
- hosted_projects: Default::default(),
outgoing_invites: Default::default(),
opened_buffers: Default::default(),
opened_chats: Default::default(),
@@ -316,19 +295,6 @@ impl ChannelStore {
self.channel_index.by_id().get(&channel_id)
}
- pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> {
- let mut projects: Vec<(SharedString, ProjectId)> = self
- .channel_states
- .get(&channel_id)
- .map(|state| state.projects.clone())
- .unwrap_or_default()
- .into_iter()
- .flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id)))
- .collect();
- projects.sort();
- projects
- }
-
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
if let OpenedModelHandle::Open(buffer) = buffer {
@@ -1102,9 +1068,7 @@ impl ChannelStore {
let channels_changed = !payload.channels.is_empty()
|| !payload.delete_channels.is_empty()
|| !payload.latest_channel_message_ids.is_empty()
- || !payload.latest_channel_buffer_versions.is_empty()
- || !payload.hosted_projects.is_empty()
- || !payload.deleted_hosted_projects.is_empty();
+ || !payload.latest_channel_buffer_versions.is_empty();
if channels_changed {
if !payload.delete_channels.is_empty() {
@@ -1161,34 +1125,6 @@ impl ChannelStore {
.or_default()
.update_latest_message_id(latest_channel_message.message_id);
}
-
- for hosted_project in payload.hosted_projects {
- let hosted_project: HostedProject = hosted_project.into();
- if let Some(old_project) = self
- .hosted_projects
- .insert(hosted_project.project_id, hosted_project.clone())
- {
- self.channel_states
- .entry(old_project.channel_id)
- .or_default()
- .remove_hosted_project(old_project.project_id);
- }
- self.channel_states
- .entry(hosted_project.channel_id)
- .or_default()
- .add_hosted_project(hosted_project.project_id);
- }
-
- for hosted_project_id in payload.deleted_hosted_projects {
- let hosted_project_id = ProjectId(hosted_project_id);
-
- if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) {
- self.channel_states
- .entry(old_project.channel_id)
- .or_default()
- .remove_hosted_project(old_project.project_id);
- }
- }
}
cx.notify();
@@ -1295,12 +1231,4 @@ impl ChannelState {
};
}
}
-
- fn add_hosted_project(&mut self, project_id: ProjectId) {
- self.projects.insert(project_id);
- }
-
- fn remove_hosted_project(&mut self, project_id: ProjectId) {
- self.projects.remove(&project_id);
- }
}
@@ -15,7 +15,6 @@ pub enum CliRequest {
urls: Vec<String>,
wait: bool,
open_new_workspace: Option<bool>,
- dev_server_token: Option<String>,
env: Option<HashMap<String, String>>,
},
}
@@ -151,6 +151,12 @@ fn main() -> Result<()> {
}
}
+ if let Some(_) = args.dev_server_token {
+ return Err(anyhow::anyhow!(
+ "Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
+ ))?;
+ }
+
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
let exit_status = exit_status.clone();
move || {
@@ -162,7 +168,6 @@ fn main() -> Result<()> {
urls,
wait: args.wait,
open_new_workspace,
- dev_server_token: args.dev_server_token,
env,
})?;
@@ -30,7 +30,6 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use socks::connect_socks_proxy_stream;
-use std::fmt;
use std::pin::Pin;
use std::{
any::TypeId,
@@ -54,15 +53,6 @@ pub use rpc::*;
pub use telemetry_events::Event;
pub use user::*;
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct DevServerToken(pub String);
-
-impl fmt::Display for DevServerToken {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "{}", self.0)
- }
-}
-
static ZED_SERVER_URL: LazyLock<Option<String>> =
LazyLock::new(|| std::env::var("ZED_SERVER_URL").ok());
static ZED_RPC_URL: LazyLock<Option<String>> = LazyLock::new(|| std::env::var("ZED_RPC_URL").ok());
@@ -304,20 +294,14 @@ struct ClientState {
}
#[derive(Clone, Debug, Eq, PartialEq)]
-pub enum Credentials {
- DevServer { token: DevServerToken },
- User { user_id: u64, access_token: String },
+pub struct Credentials {
+ pub user_id: u64,
+ pub access_token: String,
}
impl Credentials {
pub fn authorization_header(&self) -> String {
- match self {
- Credentials::DevServer { token } => format!("dev-server-token {}", token),
- Credentials::User {
- user_id,
- access_token,
- } => format!("{} {}", user_id, access_token),
- }
+ format!("{} {}", self.user_id, self.access_token)
}
}
@@ -600,11 +584,11 @@ impl Client {
}
pub fn user_id(&self) -> Option<u64> {
- if let Some(Credentials::User { user_id, .. }) = self.state.read().credentials.as_ref() {
- Some(*user_id)
- } else {
- None
- }
+ self.state
+ .read()
+ .credentials
+ .as_ref()
+ .map(|credentials| credentials.user_id)
}
pub fn peer_id(&self) -> Option<PeerId> {
@@ -793,11 +777,6 @@ impl Client {
.is_some()
}
- pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
- self.state.write().credentials = Some(Credentials::DevServer { token });
- self
- }
-
#[async_recursion(?Send)]
pub async fn authenticate_and_connect(
self: &Arc<Self>,
@@ -848,9 +827,7 @@ impl Client {
}
}
let credentials = credentials.unwrap();
- if let Credentials::User { user_id, .. } = &credentials {
- self.set_id(*user_id);
- }
+ self.set_id(credentials.user_id);
if was_disconnected {
self.set_status(Status::Connecting, cx);
@@ -866,9 +843,8 @@ impl Client {
Ok(conn) => {
self.state.write().credentials = Some(credentials.clone());
if !read_from_provider && IMPERSONATE_LOGIN.is_none() {
- if let Credentials::User{user_id, access_token} = credentials {
- self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err();
- }
+ self.credentials_provider.write_credentials(credentials.user_id, credentials.access_token, cx).await.log_err();
+
}
futures::select_biased! {
@@ -1301,7 +1277,7 @@ impl Client {
.decrypt_string(&access_token)
.context("failed to decrypt access token")?;
- Ok(Credentials::User {
+ Ok(Credentials {
user_id: user_id.parse()?,
access_token,
})
@@ -1422,7 +1398,7 @@ impl Client {
// Use the admin API token to authenticate as the impersonated user.
api_token.insert_str(0, "ADMIN_TOKEN:");
- Ok(Credentials::User {
+ Ok(Credentials {
user_id: response.user.id,
access_token: api_token,
})
@@ -1667,7 +1643,7 @@ impl CredentialsProvider for DevelopmentCredentialsProvider {
let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?;
- Some(Credentials::User {
+ Some(Credentials {
user_id: credentials.user_id,
access_token: credentials.access_token,
})
@@ -1721,7 +1697,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
.await
.log_err()??;
- Some(Credentials::User {
+ Some(Credentials {
user_id: user_id.parse().ok()?,
access_token: String::from_utf8(access_token).ok()?,
})
@@ -1855,7 +1831,7 @@ mod tests {
// Time out when client tries to connect.
client.override_authenticate(move |cx| {
cx.background_executor().spawn(async move {
- Ok(Credentials::User {
+ Ok(Credentials {
user_id,
access_token: "token".into(),
})
@@ -49,7 +49,7 @@ impl FakeServer {
let mut state = state.lock();
state.auth_count += 1;
let access_token = state.access_token.to_string();
- Ok(Credentials::User {
+ Ok(Credentials {
user_id: client_user_id,
access_token,
})
@@ -73,7 +73,7 @@ impl FakeServer {
}
if credentials
- != (Credentials::User {
+ != (Credentials {
user_id: client_user_id,
access_token: state.lock().access_token.to_string(),
})
@@ -28,9 +28,6 @@ impl std::fmt::Display for ChannelId {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct ProjectId(pub u64);
-#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
-pub struct DevServerId(pub u64);
-
#[derive(
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
)]
@@ -86,7 +86,6 @@ client = { workspace = true, features = ["test-support"] }
collab_ui = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
ctor.workspace = true
-dev_server_projects.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
file_finder.workspace = true
@@ -94,7 +93,6 @@ fs = { workspace = true, features = ["test-support"] }
git = { workspace = true, features = ["test-support"] }
git_hosting_providers.workspace = true
gpui = { workspace = true, features = ["test-support"] }
-headless.workspace = true
hyper.workspace = true
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
@@ -252,7 +252,10 @@ async fn create_billing_subscription(
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
let stripe_model = stripe_billing.register_model(default_model).await?;
- let success_url = format!("{}/account", app.config.zed_dot_dev_url());
+ let success_url = format!(
+ "{}/account?checkout_complete=1",
+ app.config.zed_dot_dev_url()
+ );
let checkout_session_url = stripe_billing
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
.await?;
@@ -1,5 +1,5 @@
use crate::{
- db::{self, dev_server, AccessTokenId, Database, DevServerId, UserId},
+ db::{self, AccessTokenId, Database, UserId},
rpc::Principal,
AppState, Error, Result,
};
@@ -44,19 +44,10 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
let first = auth_header.next().unwrap_or("");
if first == "dev-server-token" {
- let dev_server_token = auth_header.next().ok_or_else(|| {
- Error::http(
- StatusCode::BAD_REQUEST,
- "missing dev-server-token token in authorization header".to_string(),
- )
- })?;
- let dev_server = verify_dev_server_token(dev_server_token, &state.db)
- .await
- .map_err(|e| Error::http(StatusCode::UNAUTHORIZED, format!("{}", e)))?;
-
- req.extensions_mut()
- .insert(Principal::DevServer(dev_server));
- return Ok::<_, Error>(next.run(req).await);
+ Err(Error::http(
+ StatusCode::UNAUTHORIZED,
+ "Dev servers were removed in Zed 0.157 please upgrade to SSH remoting".to_string(),
+ ))?;
}
let user_id = UserId(first.parse().map_err(|_| {
@@ -240,41 +231,6 @@ pub async fn verify_access_token(
})
}
-pub fn generate_dev_server_token(id: usize, access_token: String) -> String {
- format!("{}.{}", id, access_token)
-}
-
-pub async fn verify_dev_server_token(
- dev_server_token: &str,
- db: &Arc<Database>,
-) -> anyhow::Result<dev_server::Model> {
- let (id, token) = split_dev_server_token(dev_server_token)?;
- let token_hash = hash_access_token(token);
- let server = db.get_dev_server(id).await?;
-
- if server
- .hashed_token
- .as_bytes()
- .ct_eq(token_hash.as_ref())
- .into()
- {
- Ok(server)
- } else {
- Err(anyhow!("wrong token for dev server"))
- }
-}
-
-// a dev_server_token has the format <id>.<base64>. This is to make them
-// relatively easy to copy/paste around.
-pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> {
- let mut parts = dev_server_token.splitn(2, '.');
- let id = DevServerId(parts.next().unwrap_or_default().parse()?);
- let token = parts
- .next()
- .ok_or_else(|| anyhow!("invalid dev server token format"))?;
- Ok((id, token))
-}
-
#[cfg(test)]
mod test {
use rand::thread_rng;
@@ -617,7 +617,6 @@ pub struct ChannelsForUser {
pub channels: Vec<Channel>,
pub channel_memberships: Vec<channel_member::Model>,
pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
- pub hosted_projects: Vec<proto::HostedProject>,
pub invited_channels: Vec<Channel>,
pub observed_buffer_versions: Vec<proto::ChannelBufferVersion>,
@@ -726,7 +725,6 @@ pub struct Project {
pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: BTreeMap<u64, Worktree>,
pub language_servers: Vec<proto::LanguageServer>,
- pub dev_server_project_id: Option<DevServerProjectId>,
}
pub struct ProjectCollaborator {
@@ -79,7 +79,6 @@ id_type!(ChannelChatParticipantId);
id_type!(ChannelId);
id_type!(ChannelMemberId);
id_type!(ContactId);
-id_type!(DevServerId);
id_type!(ExtensionId);
id_type!(FlagId);
id_type!(FollowerId);
@@ -89,7 +88,6 @@ id_type!(NotificationId);
id_type!(NotificationKindId);
id_type!(ProjectCollaboratorId);
id_type!(ProjectId);
-id_type!(DevServerProjectId);
id_type!(ReplicaId);
id_type!(RoomId);
id_type!(RoomParticipantId);
@@ -277,12 +275,6 @@ impl From<ChannelVisibility> for i32 {
}
}
-#[derive(Copy, Clone, Debug, Serialize, PartialEq)]
-pub enum PrincipalId {
- UserId(UserId),
- DevServerId(DevServerId),
-}
-
/// Indicate whether a [Buffer] has permissions to edit.
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum Capability {
@@ -8,11 +8,8 @@ pub mod buffers;
pub mod channels;
pub mod contacts;
pub mod contributors;
-pub mod dev_server_projects;
-pub mod dev_servers;
pub mod embeddings;
pub mod extensions;
-pub mod hosted_projects;
pub mod messages;
pub mod notifications;
pub mod processed_stripe_events;
@@ -615,15 +615,10 @@ impl Database {
.observed_channel_messages(&channel_ids, user_id, tx)
.await?;
- let hosted_projects = self
- .get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
- .await?;
-
Ok(ChannelsForUser {
channel_memberships,
channels,
invited_channels,
- hosted_projects,
channel_participants,
latest_buffer_versions,
latest_channel_messages,
@@ -1,365 +1 @@
-use anyhow::anyhow;
-use rpc::{
- proto::{self},
- ConnectionId,
-};
-use sea_orm::{
- ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
- IntoActiveModel, ModelTrait, QueryFilter,
-};
-use crate::db::ProjectId;
-
-use super::{
- dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId,
- DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId,
-};
-
-impl Database {
- pub async fn get_dev_server_project(
- &self,
- dev_server_project_id: DevServerProjectId,
- ) -> crate::Result<dev_server_project::Model> {
- self.transaction(|tx| async move {
- Ok(
- dev_server_project::Entity::find_by_id(dev_server_project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| {
- anyhow!("no dev server project with id {}", dev_server_project_id)
- })?,
- )
- })
- .await
- }
-
- pub async fn get_projects_for_dev_server(
- &self,
- dev_server_id: DevServerId,
- ) -> crate::Result<Vec<proto::DevServerProject>> {
- self.transaction(|tx| async move {
- self.get_projects_for_dev_server_internal(dev_server_id, &tx)
- .await
- })
- .await
- }
-
- pub async fn get_projects_for_dev_server_internal(
- &self,
- dev_server_id: DevServerId,
- tx: &DatabaseTransaction,
- ) -> crate::Result<Vec<proto::DevServerProject>> {
- let servers = dev_server_project::Entity::find()
- .filter(dev_server_project::Column::DevServerId.eq(dev_server_id))
- .find_also_related(project::Entity)
- .all(tx)
- .await?;
- Ok(servers
- .into_iter()
- .map(|(dev_server_project, project)| dev_server_project.to_proto(project))
- .collect())
- }
-
- pub async fn dev_server_project_ids_for_user(
- &self,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> crate::Result<Vec<DevServerProjectId>> {
- let dev_servers = dev_server::Entity::find()
- .filter(dev_server::Column::UserId.eq(user_id))
- .find_with_related(dev_server_project::Entity)
- .all(tx)
- .await?;
-
- Ok(dev_servers
- .into_iter()
- .flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
- .collect())
- }
-
- pub async fn owner_for_dev_server_project(
- &self,
- dev_server_project_id: DevServerProjectId,
- tx: &DatabaseTransaction,
- ) -> crate::Result<UserId> {
- let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id)
- .find_also_related(dev_server::Entity)
- .one(tx)
- .await?
- .and_then(|(_, dev_server)| dev_server)
- .ok_or_else(|| anyhow!("no dev server project"))?;
-
- Ok(dev_server.user_id)
- }
-
- pub async fn get_stale_dev_server_projects(
- &self,
- connection: ConnectionId,
- ) -> crate::Result<Vec<ProjectId>> {
- self.transaction(|tx| async move {
- let projects = project::Entity::find()
- .filter(
- Condition::all()
- .add(project::Column::HostConnectionId.eq(connection.id))
- .add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
- )
- .all(&*tx)
- .await?;
-
- Ok(projects.into_iter().map(|p| p.id).collect())
- })
- .await
- }
-
- pub async fn create_dev_server_project(
- &self,
- dev_server_id: DevServerId,
- path: &str,
- user_id: UserId,
- ) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
- self.transaction(|tx| async move {
- let dev_server = dev_server::Entity::find_by_id(dev_server_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
- if dev_server.user_id != user_id {
- return Err(anyhow!("not your dev server"))?;
- }
-
- let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel {
- id: ActiveValue::NotSet,
- dev_server_id: ActiveValue::Set(dev_server_id),
- paths: ActiveValue::Set(dev_server_project::JSONPaths(vec![path.to_string()])),
- })
- .exec_with_returning(&*tx)
- .await?;
-
- let status = self
- .dev_server_projects_update_internal(user_id, &tx)
- .await?;
-
- Ok((project, status))
- })
- .await
- }
-
- pub async fn update_dev_server_project(
- &self,
- id: DevServerProjectId,
- paths: &[String],
- user_id: UserId,
- ) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
- self.transaction(move |tx| async move {
- let paths = paths.to_owned();
- let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id)
- .find_also_related(dev_server::Entity)
- .one(&*tx)
- .await?
- else {
- return Err(anyhow!("no such dev server project"))?;
- };
-
- if dev_server.user_id != user_id {
- return Err(anyhow!("not your dev server"))?;
- }
- let mut project = project.into_active_model();
- project.paths = ActiveValue::Set(dev_server_project::JSONPaths(paths));
- let project = project.update(&*tx).await?;
-
- let status = self
- .dev_server_projects_update_internal(user_id, &tx)
- .await?;
-
- Ok((project, status))
- })
- .await
- }
-
- pub async fn delete_dev_server_project(
- &self,
- dev_server_project_id: DevServerProjectId,
- dev_server_id: DevServerId,
- user_id: UserId,
- ) -> crate::Result<(Vec<proto::DevServerProject>, proto::DevServerProjectsUpdate)> {
- self.transaction(|tx| async move {
- project::Entity::delete_many()
- .filter(project::Column::DevServerProjectId.eq(dev_server_project_id))
- .exec(&*tx)
- .await?;
- let result = dev_server_project::Entity::delete_by_id(dev_server_project_id)
- .exec(&*tx)
- .await?;
- if result.rows_affected != 1 {
- return Err(anyhow!(
- "no dev server project with id {}",
- dev_server_project_id
- ))?;
- }
-
- let status = self
- .dev_server_projects_update_internal(user_id, &tx)
- .await?;
-
- let projects = self
- .get_projects_for_dev_server_internal(dev_server_id, &tx)
- .await?;
- Ok((projects, status))
- })
- .await
- }
-
- pub async fn share_dev_server_project(
- &self,
- dev_server_project_id: DevServerProjectId,
- dev_server_id: DevServerId,
- connection: ConnectionId,
- worktrees: &[proto::WorktreeMetadata],
- ) -> crate::Result<(
- proto::DevServerProject,
- UserId,
- proto::DevServerProjectsUpdate,
- )> {
- self.transaction(|tx| async move {
- let dev_server = dev_server::Entity::find_by_id(dev_server_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
-
- let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| {
- anyhow!("no dev server project with id {}", dev_server_project_id)
- })?;
-
- if dev_server_project.dev_server_id != dev_server_id {
- return Err(anyhow!("dev server project shared from wrong server"))?;
- }
-
- let project = project::ActiveModel {
- room_id: ActiveValue::Set(None),
- host_user_id: ActiveValue::Set(None),
- host_connection_id: ActiveValue::set(Some(connection.id as i32)),
- host_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- id: ActiveValue::NotSet,
- hosted_project_id: ActiveValue::Set(None),
- dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)),
- }
- .insert(&*tx)
- .await?;
-
- if !worktrees.is_empty() {
- worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
- worktree::ActiveModel {
- id: ActiveValue::set(worktree.id as i64),
- project_id: ActiveValue::set(project.id),
- abs_path: ActiveValue::set(worktree.abs_path.clone()),
- root_name: ActiveValue::set(worktree.root_name.clone()),
- visible: ActiveValue::set(worktree.visible),
- scan_id: ActiveValue::set(0),
- completed_scan_id: ActiveValue::set(0),
- }
- }))
- .exec(&*tx)
- .await?;
- }
-
- let status = self
- .dev_server_projects_update_internal(dev_server.user_id, &tx)
- .await?;
-
- Ok((
- dev_server_project.to_proto(Some(project)),
- dev_server.user_id,
- status,
- ))
- })
- .await
- }
-
- pub async fn reshare_dev_server_projects(
- &self,
- reshared_projects: &Vec<proto::UpdateProject>,
- dev_server_id: DevServerId,
- connection: ConnectionId,
- ) -> crate::Result<Vec<ResharedProject>> {
- self.transaction(|tx| async move {
- let mut ret = Vec::new();
- for reshared_project in reshared_projects {
- let project_id = ProjectId::from_proto(reshared_project.project_id);
- let (project, dev_server_project) = project::Entity::find_by_id(project_id)
- .find_also_related(dev_server_project::Entity)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("project does not exist"))?;
-
- if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
- return Err(anyhow!("dev server project reshared from wrong server"))?;
- }
-
- let Ok(old_connection_id) = project.host_connection() else {
- return Err(anyhow!("dev server project was not shared"))?;
- };
-
- project::Entity::update(project::ActiveModel {
- id: ActiveValue::set(project_id),
- host_connection_id: ActiveValue::set(Some(connection.id as i32)),
- host_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
-
- let collaborators = project
- .find_related(project_collaborator::Entity)
- .all(&*tx)
- .await?;
-
- self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
- .await?;
-
- ret.push(super::ResharedProject {
- id: project_id,
- old_connection_id,
- collaborators: collaborators
- .iter()
- .map(|collaborator| super::ProjectCollaborator {
- connection_id: collaborator.connection(),
- user_id: collaborator.user_id,
- replica_id: collaborator.replica_id,
- is_host: collaborator.is_host,
- })
- .collect(),
- worktrees: reshared_project.worktrees.clone(),
- });
- }
- Ok(ret)
- })
- .await
- }
-
- pub async fn rejoin_dev_server_projects(
- &self,
- rejoined_projects: &Vec<proto::RejoinProject>,
- user_id: UserId,
- connection_id: ConnectionId,
- ) -> crate::Result<Vec<RejoinedProject>> {
- self.transaction(|tx| async move {
- let mut ret = Vec::new();
- for rejoined_project in rejoined_projects {
- if let Some(project) = self
- .rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
- .await?
- {
- ret.push(project);
- }
- }
- Ok(ret)
- })
- .await
- }
-}
@@ -1,222 +1 @@
-use rpc::proto;
-use sea_orm::{
- ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter,
-};
-use super::{dev_server, dev_server_project, Database, DevServerId, UserId};
-
-impl Database {
- pub async fn get_dev_server(
- &self,
- dev_server_id: DevServerId,
- ) -> crate::Result<dev_server::Model> {
- self.transaction(|tx| async move {
- Ok(dev_server::Entity::find_by_id(dev_server_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?)
- })
- .await
- }
-
- pub async fn get_dev_server_for_user(
- &self,
- dev_server_id: DevServerId,
- user_id: UserId,
- ) -> crate::Result<dev_server::Model> {
- self.transaction(|tx| async move {
- let server = dev_server::Entity::find_by_id(dev_server_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?;
- if server.user_id != user_id {
- return Err(anyhow::anyhow!(
- "dev server {} is not owned by user {}",
- dev_server_id,
- user_id
- ))?;
- }
- Ok(server)
- })
- .await
- }
-
- pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result<Vec<dev_server::Model>> {
- self.transaction(|tx| async move {
- Ok(dev_server::Entity::find()
- .filter(dev_server::Column::UserId.eq(user_id))
- .all(&*tx)
- .await?)
- })
- .await
- }
-
- pub async fn dev_server_projects_update(
- &self,
- user_id: UserId,
- ) -> crate::Result<proto::DevServerProjectsUpdate> {
- self.transaction(|tx| async move {
- self.dev_server_projects_update_internal(user_id, &tx).await
- })
- .await
- }
-
- pub async fn dev_server_projects_update_internal(
- &self,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> crate::Result<proto::DevServerProjectsUpdate> {
- let dev_servers = dev_server::Entity::find()
- .filter(dev_server::Column::UserId.eq(user_id))
- .all(tx)
- .await?;
-
- let dev_server_projects = dev_server_project::Entity::find()
- .filter(
- dev_server_project::Column::DevServerId
- .is_in(dev_servers.iter().map(|d| d.id).collect::<Vec<_>>()),
- )
- .find_also_related(super::project::Entity)
- .all(tx)
- .await?;
-
- Ok(proto::DevServerProjectsUpdate {
- dev_servers: dev_servers
- .into_iter()
- .map(|d| d.to_proto(proto::DevServerStatus::Offline))
- .collect(),
- dev_server_projects: dev_server_projects
- .into_iter()
- .map(|(dev_server_project, project)| dev_server_project.to_proto(project))
- .collect(),
- })
- }
-
- pub async fn create_dev_server(
- &self,
- name: &str,
- ssh_connection_string: Option<&str>,
- hashed_access_token: &str,
- user_id: UserId,
- ) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> {
- self.transaction(|tx| async move {
- if name.trim().is_empty() {
- return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
- }
-
- let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
- id: ActiveValue::NotSet,
- hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
- name: ActiveValue::Set(name.trim().to_string()),
- user_id: ActiveValue::Set(user_id),
- ssh_connection_string: ActiveValue::Set(
- ssh_connection_string.map(ToOwned::to_owned),
- ),
- })
- .exec_with_returning(&*tx)
- .await?;
-
- let dev_server_projects = self
- .dev_server_projects_update_internal(user_id, &tx)
- .await?;
-
- Ok((dev_server, dev_server_projects))
- })
- .await
- }
-
- pub async fn update_dev_server_token(
- &self,
- id: DevServerId,
- hashed_token: &str,
- user_id: UserId,
- ) -> crate::Result<proto::DevServerProjectsUpdate> {
- self.transaction(|tx| async move {
- let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
- return Err(anyhow::anyhow!("no dev server with id {}", id))?;
- };
- if dev_server.user_id != user_id {
- return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
- }
-
- dev_server::Entity::update(dev_server::ActiveModel {
- hashed_token: ActiveValue::Set(hashed_token.to_string()),
- ..dev_server.clone().into_active_model()
- })
- .exec(&*tx)
- .await?;
-
- let dev_server_projects = self
- .dev_server_projects_update_internal(user_id, &tx)
- .await?;
-
- Ok(dev_server_projects)
- })
- .await
- }
-
- pub async fn rename_dev_server(
- &self,
- id: DevServerId,
- name: &str,
- ssh_connection_string: Option<&str>,
- user_id: UserId,
- ) -> crate::Result<proto::DevServerProjectsUpdate> {
- self.transaction(|tx| async move {
- let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
- return Err(anyhow::anyhow!("no dev server with id {}", id))?;
- };
- if dev_server.user_id != user_id || name.trim().is_empty() {
- return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
- }
-
- dev_server::Entity::update(dev_server::ActiveModel {
- name: ActiveValue::Set(name.trim().to_string()),
- ssh_connection_string: ActiveValue::Set(
- ssh_connection_string.map(ToOwned::to_owned),
- ),
- ..dev_server.clone().into_active_model()
- })
- .exec(&*tx)
- .await?;
-
- let dev_server_projects = self
- .dev_server_projects_update_internal(user_id, &tx)
- .await?;
-
- Ok(dev_server_projects)
- })
- .await
- }
-
- pub async fn delete_dev_server(
- &self,
- id: DevServerId,
- user_id: UserId,
- ) -> crate::Result<proto::DevServerProjectsUpdate> {
- self.transaction(|tx| async move {
- let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
- return Err(anyhow::anyhow!("no dev server with id {}", id))?;
- };
- if dev_server.user_id != user_id {
- return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
- }
-
- dev_server_project::Entity::delete_many()
- .filter(dev_server_project::Column::DevServerId.eq(id))
- .exec(&*tx)
- .await?;
-
- dev_server::Entity::delete(dev_server.into_active_model())
- .exec(&*tx)
- .await?;
-
- let dev_server_projects = self
- .dev_server_projects_update_internal(user_id, &tx)
- .await?;
-
- Ok(dev_server_projects)
- })
- .await
- }
-}
@@ -1,85 +0,0 @@
-use rpc::{proto, ErrorCode};
-
-use super::*;
-
-impl Database {
- pub async fn get_hosted_projects(
- &self,
- channel_ids: &[ChannelId],
- roles: &HashMap<ChannelId, ChannelRole>,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<proto::HostedProject>> {
- let projects = hosted_project::Entity::find()
- .find_also_related(project::Entity)
- .filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
- .all(tx)
- .await?
- .into_iter()
- .flat_map(|(hosted_project, project)| {
- if hosted_project.deleted_at.is_some() {
- return None;
- }
- match hosted_project.visibility {
- ChannelVisibility::Public => {}
- ChannelVisibility::Members => {
- let is_visible = roles
- .get(&hosted_project.channel_id)
- .map(|role| role.can_see_all_descendants())
- .unwrap_or(false);
- if !is_visible {
- return None;
- }
- }
- };
- Some(proto::HostedProject {
- project_id: project?.id.to_proto(),
- channel_id: hosted_project.channel_id.to_proto(),
- name: hosted_project.name.clone(),
- visibility: hosted_project.visibility.into(),
- })
- })
- .collect();
-
- Ok(projects)
- }
-
- pub async fn get_hosted_project(
- &self,
- hosted_project_id: HostedProjectId,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> Result<(hosted_project::Model, ChannelRole)> {
- let project = hosted_project::Entity::find_by_id(hosted_project_id)
- .one(tx)
- .await?
- .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?;
- let channel = channel::Entity::find_by_id(project.channel_id)
- .one(tx)
- .await?
- .ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?;
-
- let role = match project.visibility {
- ChannelVisibility::Public => {
- self.check_user_is_channel_participant(&channel, user_id, tx)
- .await?
- }
- ChannelVisibility::Members => {
- self.check_user_is_channel_member(&channel, user_id, tx)
- .await?
- }
- };
-
- Ok((project, role))
- }
-
- pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result<bool> {
- self.transaction(|tx| async move {
- Ok(project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .map(|project| project.hosted_project_id.is_some())
- .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?)
- })
- .await
- }
-}
@@ -32,7 +32,6 @@ impl Database {
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
is_ssh_project: bool,
- dev_server_project_id: Option<DevServerProjectId>,
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
@@ -61,38 +60,6 @@ impl Database {
return Err(anyhow!("guests cannot share projects"))?;
}
- if let Some(dev_server_project_id) = dev_server_project_id {
- let project = project::Entity::find()
- .filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id)))
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no remote project"))?;
-
- let (_, dev_server) = dev_server_project::Entity::find_by_id(dev_server_project_id)
- .find_also_related(dev_server::Entity)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no dev_server_project"))?;
-
- if !dev_server.is_some_and(|dev_server| dev_server.user_id == participant.user_id) {
- return Err(anyhow!("not your dev server"))?;
- }
-
- if project.room_id.is_some() {
- return Err(anyhow!("project already shared"))?;
- };
-
- let project = project::Entity::update(project::ActiveModel {
- room_id: ActiveValue::Set(Some(room_id)),
- ..project.into_active_model()
- })
- .exec(&*tx)
- .await?;
-
- let room = self.get_room(room_id, &tx).await?;
- return Ok((project.id, room));
- }
-
let project = project::ActiveModel {
room_id: ActiveValue::set(Some(participant.room_id)),
host_user_id: ActiveValue::set(Some(participant.user_id)),
@@ -101,8 +68,6 @@ impl Database {
connection.owner_id as i32,
))),
id: ActiveValue::NotSet,
- hosted_project_id: ActiveValue::Set(None),
- dev_server_project_id: ActiveValue::Set(None),
}
.insert(&*tx)
.await?;
@@ -156,7 +121,6 @@ impl Database {
&self,
project_id: ProjectId,
connection: ConnectionId,
- user_id: Option<UserId>,
) -> Result<TransactionGuard<(bool, Option<proto::Room>, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
@@ -172,25 +136,6 @@ impl Database {
if project.host_connection()? == connection {
return Ok((true, room, guest_connection_ids));
}
- if let Some(dev_server_project_id) = project.dev_server_project_id {
- if let Some(user_id) = user_id {
- if user_id
- != self
- .owner_for_dev_server_project(dev_server_project_id, &tx)
- .await?
- {
- Err(anyhow!("cannot unshare a project hosted by another user"))?
- }
- project::Entity::update(project::ActiveModel {
- room_id: ActiveValue::Set(None),
- ..project.into_active_model()
- })
- .exec(&*tx)
- .await?;
- return Ok((false, room, guest_connection_ids));
- }
- }
-
Err(anyhow!("cannot unshare a project hosted by another user"))?
})
.await
@@ -590,39 +535,6 @@ impl Database {
.await
}
- /// Adds the given connection to the specified hosted project
- pub async fn join_hosted_project(
- &self,
- id: ProjectId,
- user_id: UserId,
- connection: ConnectionId,
- ) -> Result<(Project, ReplicaId)> {
- self.transaction(|tx| async move {
- let (project, hosted_project) = project::Entity::find_by_id(id)
- .find_also_related(hosted_project::Entity)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("hosted project is no longer shared"))?;
-
- let Some(hosted_project) = hosted_project else {
- return Err(anyhow!("project is not hosted"))?;
- };
-
- let channel = channel::Entity::find_by_id(hosted_project.channel_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such channel"))?;
-
- let role = self
- .check_user_is_channel_participant(&channel, user_id, &tx)
- .await?;
-
- self.join_project_internal(project, user_id, connection, role, &tx)
- .await
- })
- .await
- }
-
pub async fn get_project(&self, id: ProjectId) -> Result<project::Model> {
self.transaction(|tx| async move {
Ok(project::Entity::find_by_id(id)
@@ -633,17 +545,6 @@ impl Database {
.await
}
- pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result<project::Model> {
- self.transaction(|tx| async move {
- Ok(project::Entity::find()
- .filter(project::Column::DevServerProjectId.eq(id))
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?)
- })
- .await
- }
-
/// Adds the given connection to the specified project
/// in the current room.
pub async fn join_project(
@@ -654,13 +555,7 @@ impl Database {
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
self.project_transaction(project_id, |tx| async move {
let (project, role) = self
- .access_project(
- project_id,
- connection,
- PrincipalId::UserId(user_id),
- Capability::ReadOnly,
- &tx,
- )
+ .access_project(project_id, connection, Capability::ReadOnly, &tx)
.await?;
self.join_project_internal(project, user_id, connection, role, &tx)
.await
@@ -851,7 +746,6 @@ impl Database {
worktree_id: None,
})
.collect(),
- dev_server_project_id: project.dev_server_project_id,
};
Ok((project, replica_id as ReplicaId))
}
@@ -1007,29 +901,14 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
- principal_id: PrincipalId,
capability: Capability,
tx: &DatabaseTransaction,
) -> Result<(project::Model, ChannelRole)> {
- let (mut project, dev_server_project) = project::Entity::find_by_id(project_id)
- .find_also_related(dev_server_project::Entity)
+ let project = project::Entity::find_by_id(project_id)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
- let user_id = match principal_id {
- PrincipalId::DevServerId(_) => {
- if project
- .host_connection()
- .is_ok_and(|connection| connection == connection_id)
- {
- return Ok((project, ChannelRole::Admin));
- }
- return Err(anyhow!("not the project host"))?;
- }
- PrincipalId::UserId(user_id) => user_id,
- };
-
let role_from_room = if let Some(room_id) = project.room_id {
room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
@@ -1040,34 +919,8 @@ impl Database {
} else {
None
};
- let role_from_dev_server = if let Some(dev_server_project) = dev_server_project {
- let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
- .one(tx)
- .await?
- .ok_or_else(|| anyhow!("no such channel"))?;
- if user_id == dev_server.user_id {
- // If the user left the room "uncleanly" they may rejoin the
- // remote project before leave_room runs. IN that case kick
- // the project out of the room pre-emptively.
- if role_from_room.is_none() {
- project = project::Entity::update(project::ActiveModel {
- room_id: ActiveValue::Set(None),
- ..project.into_active_model()
- })
- .exec(tx)
- .await?;
- }
- Some(ChannelRole::Admin)
- } else {
- None
- }
- } else {
- None
- };
- let role = role_from_dev_server
- .or(role_from_room)
- .unwrap_or(ChannelRole::Banned);
+ let role = role_from_room.unwrap_or(ChannelRole::Banned);
match capability {
Capability::ReadWrite => {
@@ -1090,17 +943,10 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
- user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
- .access_project(
- project_id,
- connection_id,
- PrincipalId::UserId(user_id),
- Capability::ReadOnly,
- &tx,
- )
+ .access_project(project_id, connection_id, Capability::ReadOnly, &tx)
.await?;
project.host_connection()
})
@@ -1113,17 +959,10 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
- user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
- .access_project(
- project_id,
- connection_id,
- PrincipalId::UserId(user_id),
- Capability::ReadWrite,
- &tx,
- )
+ .access_project(project_id, connection_id, Capability::ReadWrite, &tx)
.await?;
project.host_connection()
})
@@ -1131,47 +970,16 @@ impl Database {
.map(|guard| guard.into_inner())
}
- /// Returns the host connection for a request to join a shared project.
- pub async fn host_for_owner_project_request(
- &self,
- project_id: ProjectId,
- _connection_id: ConnectionId,
- user_id: UserId,
- ) -> Result<ConnectionId> {
- self.project_transaction(project_id, |tx| async move {
- let (project, dev_server_project) = project::Entity::find_by_id(project_id)
- .find_also_related(dev_server_project::Entity)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
-
- let Some(dev_server_project) = dev_server_project else {
- return Err(anyhow!("not a dev server project"))?;
- };
- let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such dev server"))?;
- if dev_server.user_id != user_id {
- return Err(anyhow!("not your project"))?;
- }
- project.host_connection()
- })
- .await
- .map(|guard| guard.into_inner())
- }
-
pub async fn connections_for_buffer_update(
&self,
project_id: ProjectId,
- principal_id: PrincipalId,
connection_id: ConnectionId,
capability: Capability,
) -> Result<TransactionGuard<(ConnectionId, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
// Authorize
let (project, _) = self
- .access_project(project_id, connection_id, principal_id, capability, &tx)
+ .access_project(project_id, connection_id, capability, &tx)
.await?;
let host_connection_id = project.host_connection()?;
@@ -858,25 +858,6 @@ impl Database {
.all(&*tx)
.await?;
- // if any project in the room has a remote-project-id that belongs to a dev server that this user owns.
- let dev_server_projects_for_user = self
- .dev_server_project_ids_for_user(leaving_participant.user_id, &tx)
- .await?;
-
- let dev_server_projects_to_unshare = project::Entity::find()
- .filter(
- Condition::all()
- .add(project::Column::RoomId.eq(room_id))
- .add(
- project::Column::DevServerProjectId
- .is_in(dev_server_projects_for_user.clone()),
- ),
- )
- .all(&*tx)
- .await?
- .into_iter()
- .map(|project| project.id)
- .collect::<HashSet<_>>();
let mut left_projects = HashMap::default();
let mut collaborators = project_collaborator::Entity::find()
.filter(project_collaborator::Column::ProjectId.is_in(project_ids))
@@ -899,9 +880,7 @@ impl Database {
left_project.connection_ids.push(collaborator_connection_id);
}
- if (collaborator.is_host && collaborator.connection() == connection)
- || dev_server_projects_to_unshare.contains(&collaborator.project_id)
- {
+ if collaborator.is_host && collaborator.connection() == connection {
left_project.should_unshare = true;
}
}
@@ -944,17 +923,6 @@ impl Database {
.exec(&*tx)
.await?;
- if !dev_server_projects_to_unshare.is_empty() {
- project::Entity::update_many()
- .filter(project::Column::Id.is_in(dev_server_projects_to_unshare))
- .set(project::ActiveModel {
- room_id: ActiveValue::Set(None),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- }
-
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
let deleted = if room.participants.is_empty() {
let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
@@ -1323,26 +1291,6 @@ impl Database {
project.worktree_root_names.push(db_worktree.root_name);
}
}
- } else if let Some(dev_server_project_id) = db_project.dev_server_project_id {
- let host = self
- .owner_for_dev_server_project(dev_server_project_id, tx)
- .await?;
- if let Some((_, participant)) = participants
- .iter_mut()
- .find(|(_, v)| v.user_id == host.to_proto())
- {
- participant.projects.push(proto::ParticipantProject {
- id: db_project.id.to_proto(),
- worktree_root_names: Default::default(),
- });
- let project = participant.projects.last_mut().unwrap();
-
- for db_worktree in db_worktrees {
- if db_worktree.visible {
- project.worktree_root_names.push(db_worktree.root_name);
- }
- }
- }
}
}
@@ -13,14 +13,11 @@ pub mod channel_message;
pub mod channel_message_mention;
pub mod contact;
pub mod contributor;
-pub mod dev_server;
-pub mod dev_server_project;
pub mod embedding;
pub mod extension;
pub mod extension_version;
pub mod feature_flag;
pub mod follower;
-pub mod hosted_project;
pub mod language_server;
pub mod notification;
pub mod notification_kind;
@@ -1,39 +0,0 @@
-use crate::db::{DevServerId, UserId};
-use rpc::proto;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "dev_servers")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: DevServerId,
- pub name: String,
- pub user_id: UserId,
- pub hashed_token: String,
- pub ssh_connection_string: Option<String>,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_many = "super::dev_server_project::Entity")]
- RemoteProject,
-}
-
-impl Related<super::dev_server_project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::RemoteProject.def()
- }
-}
-
-impl Model {
- pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
- proto::DevServer {
- dev_server_id: self.id.to_proto(),
- name: self.name.clone(),
- status: status as i32,
- ssh_connection_string: self.ssh_connection_string.clone(),
- }
- }
-}
@@ -1,59 +0,0 @@
-use super::project;
-use crate::db::{DevServerId, DevServerProjectId};
-use rpc::proto;
-use sea_orm::{entity::prelude::*, FromJsonQueryResult};
-use serde::{Deserialize, Serialize};
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "dev_server_projects")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: DevServerProjectId,
- pub dev_server_id: DevServerId,
- pub paths: JSONPaths,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
-pub struct JSONPaths(pub Vec<String>);
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_one = "super::project::Entity")]
- Project,
- #[sea_orm(
- belongs_to = "super::dev_server::Entity",
- from = "Column::DevServerId",
- to = "super::dev_server::Column::Id"
- )]
- DevServer,
-}
-
-impl Related<super::project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Project.def()
- }
-}
-
-impl Related<super::dev_server::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::DevServer.def()
- }
-}
-
-impl Model {
- pub fn to_proto(&self, project: Option<project::Model>) -> proto::DevServerProject {
- proto::DevServerProject {
- id: self.id.to_proto(),
- project_id: project.map(|p| p.id.to_proto()),
- dev_server_id: self.dev_server_id.to_proto(),
- path: self.paths().first().cloned().unwrap_or_default(),
- paths: self.paths().clone(),
- }
- }
-
- pub fn paths(&self) -> &Vec<String> {
- &self.paths.0
- }
-}
@@ -1,27 +0,0 @@
-use crate::db::{ChannelId, ChannelVisibility, HostedProjectId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "hosted_projects")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: HostedProjectId,
- pub channel_id: ChannelId,
- pub name: String,
- pub visibility: ChannelVisibility,
- pub deleted_at: Option<DateTime>,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_one = "super::project::Entity")]
- Project,
-}
-
-impl Related<super::project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Project.def()
- }
-}
@@ -1,4 +1,4 @@
-use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
+use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
use anyhow::anyhow;
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
@@ -12,8 +12,6 @@ pub struct Model {
pub host_user_id: Option<UserId>,
pub host_connection_id: Option<i32>,
pub host_connection_server_id: Option<ServerId>,
- pub hosted_project_id: Option<HostedProjectId>,
- pub dev_server_project_id: Option<DevServerProjectId>,
}
impl Model {
@@ -51,18 +49,6 @@ pub enum Relation {
Collaborators,
#[sea_orm(has_many = "super::language_server::Entity")]
LanguageServers,
- #[sea_orm(
- belongs_to = "super::hosted_project::Entity",
- from = "Column::HostedProjectId",
- to = "super::hosted_project::Column::Id"
- )]
- HostedProject,
- #[sea_orm(
- belongs_to = "super::dev_server_project::Entity",
- from = "Column::DevServerProjectId",
- to = "super::dev_server_project::Column::Id"
- )]
- RemoteProject,
}
impl Related<super::user::Entity> for Entity {
@@ -95,16 +81,4 @@ impl Related<super::language_server::Entity> for Entity {
}
}
-impl Related<super::hosted_project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::HostedProject.def()
- }
-}
-
-impl Related<super::dev_server_project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::RemoteProject.def()
- }
-}
-
impl ActiveModelBehavior for ActiveModel {}
@@ -540,18 +540,18 @@ async fn test_project_count(db: &Arc<Database>) {
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
+ db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
+ db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
// Projects shared by admins aren't counted.
- db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, None)
+ db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
@@ -449,6 +449,10 @@ async fn check_usage_limit(
model_name: &str,
claims: &LlmTokenClaims,
) -> Result<()> {
+ if claims.is_staff {
+ return Ok(());
+ }
+
let model = state.db.model(provider, model_name)?;
let usage = state
.db
@@ -513,11 +517,6 @@ async fn check_usage_limit(
];
for (used, limit, usage_measure) in checks {
- // Temporarily bypass rate-limiting for staff members.
- if claims.is_staff {
- continue;
- }
-
if used > limit {
let resource = match usage_measure {
UsageMeasure::RequestsPerMinute => "requests_per_minute",
@@ -5,11 +5,10 @@ use crate::llm::LlmTokenClaims;
use crate::{
auth,
db::{
- self, dev_server, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser,
- CreatedChannelMessage, Database, DevServerId, DevServerProjectId, InviteMemberResult,
- MembershipUpdated, MessageId, NotificationId, PrincipalId, Project, ProjectId,
- RejoinedProject, RemoveChannelMemberResult, ReplicaId, RespondToChannelInvite, RoomId,
- ServerId, UpdatedChannelMessage, User, UserId,
+ self, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser,
+ CreatedChannelMessage, Database, InviteMemberResult, MembershipUpdated, MessageId,
+ NotificationId, Project, ProjectId, RejoinedProject, RemoveChannelMemberResult, ReplicaId,
+ RespondToChannelInvite, RoomId, ServerId, UpdatedChannelMessage, User, UserId,
},
executor::Executor,
AppState, Config, Error, RateLimit, Result,
@@ -42,10 +41,8 @@ use sha2::Digest;
use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi};
use futures::{
- channel::oneshot,
- future::{self, BoxFuture},
- stream::FuturesUnordered,
- FutureExt, SinkExt, StreamExt, TryStreamExt,
+ channel::oneshot, future::BoxFuture, stream::FuturesUnordered, FutureExt, SinkExt, StreamExt,
+ TryStreamExt,
};
use prometheus::{register_int_gauge, IntGauge};
use rpc::{
@@ -109,7 +106,6 @@ impl<R: RequestMessage> Response<R> {
pub enum Principal {
User(User),
Impersonated { user: User, admin: User },
- DevServer(dev_server::Model),
}
impl Principal {
@@ -124,9 +120,6 @@ impl Principal {
span.record("login", &user.github_login);
span.record("impersonator", &admin.github_login);
}
- Principal::DevServer(dev_server) => {
- span.record("dev_server_id", dev_server.id.0);
- }
}
}
}
@@ -167,27 +160,10 @@ impl Session {
}
}
- fn for_user(self) -> Option<UserSession> {
- UserSession::new(self)
- }
-
- fn for_dev_server(self) -> Option<DevServerSession> {
- DevServerSession::new(self)
- }
-
- fn user_id(&self) -> Option<UserId> {
- match &self.principal {
- Principal::User(user) => Some(user.id),
- Principal::Impersonated { user, .. } => Some(user.id),
- Principal::DevServer(_) => None,
- }
- }
-
fn is_staff(&self) -> bool {
match &self.principal {
Principal::User(user) => user.admin,
Principal::Impersonated { .. } => true,
- Principal::DevServer(_) => false,
}
}
@@ -199,9 +175,7 @@ impl Session {
return Ok(true);
}
- let Some(user_id) = self.user_id() else {
- return Ok(false);
- };
+ let user_id = self.user_id();
Ok(db.has_active_billing_subscription(user_id).await?)
}
@@ -217,18 +191,17 @@ impl Session {
}
}
- fn dev_server_id(&self) -> Option<DevServerId> {
+ fn user_id(&self) -> UserId {
match &self.principal {
- Principal::User(_) | Principal::Impersonated { .. } => None,
- Principal::DevServer(dev_server) => Some(dev_server.id),
+ Principal::User(user) => user.id,
+ Principal::Impersonated { user, .. } => user.id,
}
}
- fn principal_id(&self) -> PrincipalId {
+ pub fn email(&self) -> Option<String> {
match &self.principal {
- Principal::User(user) => PrincipalId::UserId(user.id),
- Principal::Impersonated { user, .. } => PrincipalId::UserId(user.id),
- Principal::DevServer(dev_server) => PrincipalId::DevServerId(dev_server.id),
+ Principal::User(user) => user.email_address.clone(),
+ Principal::Impersonated { user, .. } => user.email_address.clone(),
}
}
}
@@ -244,143 +217,11 @@ impl Debug for Session {
result.field("user", &user.github_login);
result.field("impersonator", &admin.github_login);
}
- Principal::DevServer(dev_server) => {
- result.field("dev_server", &dev_server.id);
- }
}
result.field("connection_id", &self.connection_id).finish()
}
}
-struct UserSession(Session);
-
-impl UserSession {
- pub fn new(s: Session) -> Option<Self> {
- s.user_id().map(|_| UserSession(s))
- }
- pub fn user_id(&self) -> UserId {
- self.0.user_id().unwrap()
- }
-
- pub fn email(&self) -> Option<String> {
- match &self.0.principal {
- Principal::User(user) => user.email_address.clone(),
- Principal::Impersonated { user, .. } => user.email_address.clone(),
- Principal::DevServer(..) => None,
- }
- }
-}
-
-impl Deref for UserSession {
- type Target = Session;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-impl DerefMut for UserSession {
- fn deref_mut(&mut self) -> &mut Self::Target {
- &mut self.0
- }
-}
-
-struct DevServerSession(Session);
-
-impl DevServerSession {
- pub fn new(s: Session) -> Option<Self> {
- s.dev_server_id().map(|_| DevServerSession(s))
- }
- pub fn dev_server_id(&self) -> DevServerId {
- self.0.dev_server_id().unwrap()
- }
-
- fn dev_server(&self) -> &dev_server::Model {
- match &self.0.principal {
- Principal::DevServer(dev_server) => dev_server,
- _ => unreachable!(),
- }
- }
-}
-
-impl Deref for DevServerSession {
- type Target = Session;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-impl DerefMut for DevServerSession {
- fn deref_mut(&mut self) -> &mut Self::Target {
- &mut self.0
- }
-}
-
-fn user_handler<M: RequestMessage, Fut>(
- handler: impl 'static + Send + Sync + Fn(M, Response<M>, UserSession) -> Fut,
-) -> impl 'static + Send + Sync + Fn(M, Response<M>, Session) -> BoxFuture<'static, Result<()>>
-where
- Fut: Send + Future<Output = Result<()>>,
-{
- let handler = Arc::new(handler);
- move |message, response, session| {
- let handler = handler.clone();
- Box::pin(async move {
- if let Some(user_session) = session.for_user() {
- Ok(handler(message, response, user_session).await?)
- } else {
- Err(Error::Internal(anyhow!(
- "must be a user to call {}",
- M::NAME
- )))
- }
- })
- }
-}
-
-fn dev_server_handler<M: RequestMessage, Fut>(
- handler: impl 'static + Send + Sync + Fn(M, Response<M>, DevServerSession) -> Fut,
-) -> impl 'static + Send + Sync + Fn(M, Response<M>, Session) -> BoxFuture<'static, Result<()>>
-where
- Fut: Send + Future<Output = Result<()>>,
-{
- let handler = Arc::new(handler);
- move |message, response, session| {
- let handler = handler.clone();
- Box::pin(async move {
- if let Some(dev_server_session) = session.for_dev_server() {
- Ok(handler(message, response, dev_server_session).await?)
- } else {
- Err(Error::Internal(anyhow!(
- "must be a dev server to call {}",
- M::NAME
- )))
- }
- })
- }
-}
-
-fn user_message_handler<M: EnvelopedMessage, InnertRetFut>(
- handler: impl 'static + Send + Sync + Fn(M, UserSession) -> InnertRetFut,
-) -> impl 'static + Send + Sync + Fn(M, Session) -> BoxFuture<'static, Result<()>>
-where
- InnertRetFut: Send + Future<Output = Result<()>>,
-{
- let handler = Arc::new(handler);
- move |message, session| {
- let handler = handler.clone();
- Box::pin(async move {
- if let Some(user_session) = session.for_user() {
- Ok(handler(message, user_session).await?)
- } else {
- Err(Error::Internal(anyhow!(
- "must be a user to call {}",
- M::NAME
- )))
- }
- })
- }
-}
-
struct DbHandle(Arc<Database>);
impl Deref for DbHandle {
@@ -434,141 +275,65 @@ impl Server {
server
.add_request_handler(ping)
- .add_request_handler(user_handler(create_room))
- .add_request_handler(user_handler(join_room))
- .add_request_handler(user_handler(rejoin_room))
- .add_request_handler(user_handler(leave_room))
- .add_request_handler(user_handler(set_room_participant_role))
- .add_request_handler(user_handler(call))
- .add_request_handler(user_handler(cancel_call))
- .add_message_handler(user_message_handler(decline_call))
- .add_request_handler(user_handler(update_participant_location))
- .add_request_handler(user_handler(share_project))
+ .add_request_handler(create_room)
+ .add_request_handler(join_room)
+ .add_request_handler(rejoin_room)
+ .add_request_handler(leave_room)
+ .add_request_handler(set_room_participant_role)
+ .add_request_handler(call)
+ .add_request_handler(cancel_call)
+ .add_message_handler(decline_call)
+ .add_request_handler(update_participant_location)
+ .add_request_handler(share_project)
.add_message_handler(unshare_project)
- .add_request_handler(user_handler(join_project))
- .add_request_handler(user_handler(join_hosted_project))
- .add_request_handler(user_handler(rejoin_dev_server_projects))
- .add_request_handler(user_handler(create_dev_server_project))
- .add_request_handler(user_handler(update_dev_server_project))
- .add_request_handler(user_handler(delete_dev_server_project))
- .add_request_handler(user_handler(create_dev_server))
- .add_request_handler(user_handler(regenerate_dev_server_token))
- .add_request_handler(user_handler(rename_dev_server))
- .add_request_handler(user_handler(delete_dev_server))
- .add_request_handler(user_handler(list_remote_directory))
- .add_request_handler(dev_server_handler(share_dev_server_project))
- .add_request_handler(dev_server_handler(shutdown_dev_server))
- .add_request_handler(dev_server_handler(reconnect_dev_server))
- .add_message_handler(user_message_handler(leave_project))
+ .add_request_handler(join_project)
+ .add_message_handler(leave_project)
.add_request_handler(update_project)
.add_request_handler(update_worktree)
.add_message_handler(start_language_server)
.add_message_handler(update_language_server)
.add_message_handler(update_diagnostic_summary)
.add_message_handler(update_worktree_settings)
- .add_request_handler(user_handler(
- forward_project_request_for_owner::<proto::TaskContextForLocation>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::GetHover>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::GetDefinition>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::GetTypeDefinition>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::GetReferences>,
- ))
- .add_request_handler(user_handler(forward_find_search_candidates_request))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::GetDocumentHighlights>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::GetProjectSymbols>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::OpenBufferForSymbol>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::OpenBufferById>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::SynchronizeBuffers>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::InlayHints>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::ResolveInlayHint>,
- ))
- .add_request_handler(user_handler(
- forward_read_only_project_request::<proto::OpenBufferByPath>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::GetCompletions>,
- ))
- .add_request_handler(user_handler(
+ .add_request_handler(forward_read_only_project_request::<proto::GetHover>)
+ .add_request_handler(forward_read_only_project_request::<proto::GetDefinition>)
+ .add_request_handler(forward_read_only_project_request::<proto::GetTypeDefinition>)
+ .add_request_handler(forward_read_only_project_request::<proto::GetReferences>)
+ .add_request_handler(forward_find_search_candidates_request)
+ .add_request_handler(forward_read_only_project_request::<proto::GetDocumentHighlights>)
+ .add_request_handler(forward_read_only_project_request::<proto::GetProjectSymbols>)
+ .add_request_handler(forward_read_only_project_request::<proto::OpenBufferForSymbol>)
+ .add_request_handler(forward_read_only_project_request::<proto::OpenBufferById>)
+ .add_request_handler(forward_read_only_project_request::<proto::SynchronizeBuffers>)
+ .add_request_handler(forward_read_only_project_request::<proto::InlayHints>)
+ .add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
+ .add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
+ .add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
+ .add_request_handler(forward_mutating_project_request::<proto::UpdateGitBranch>)
+ .add_request_handler(forward_mutating_project_request::<proto::GetCompletions>)
+ .add_request_handler(
forward_mutating_project_request::<proto::ApplyCompletionAdditionalEdits>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::OpenNewBuffer>,
- ))
- .add_request_handler(user_handler(
+ )
+ .add_request_handler(forward_mutating_project_request::<proto::OpenNewBuffer>)
+ .add_request_handler(
forward_mutating_project_request::<proto::ResolveCompletionDocumentation>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::GetCodeActions>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::ApplyCodeAction>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::PrepareRename>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::PerformRename>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::ReloadBuffers>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::FormatBuffers>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::CreateProjectEntry>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::RenameProjectEntry>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::CopyProjectEntry>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::DeleteProjectEntry>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::ExpandProjectEntry>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::OnTypeFormatting>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::SaveBuffer>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::BlameBuffer>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::MultiLspQuery>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::RestartLanguageServers>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::LinkedEditingRange>,
- ))
+ )
+ .add_request_handler(forward_mutating_project_request::<proto::GetCodeActions>)
+ .add_request_handler(forward_mutating_project_request::<proto::ApplyCodeAction>)
+ .add_request_handler(forward_mutating_project_request::<proto::PrepareRename>)
+ .add_request_handler(forward_mutating_project_request::<proto::PerformRename>)
+ .add_request_handler(forward_mutating_project_request::<proto::ReloadBuffers>)
+ .add_request_handler(forward_mutating_project_request::<proto::FormatBuffers>)
+ .add_request_handler(forward_mutating_project_request::<proto::CreateProjectEntry>)
+ .add_request_handler(forward_mutating_project_request::<proto::RenameProjectEntry>)
+ .add_request_handler(forward_mutating_project_request::<proto::CopyProjectEntry>)
+ .add_request_handler(forward_mutating_project_request::<proto::DeleteProjectEntry>)
+ .add_request_handler(forward_mutating_project_request::<proto::ExpandProjectEntry>)
+ .add_request_handler(forward_mutating_project_request::<proto::OnTypeFormatting>)
+ .add_request_handler(forward_mutating_project_request::<proto::SaveBuffer>)
+ .add_request_handler(forward_mutating_project_request::<proto::BlameBuffer>)
+ .add_request_handler(forward_mutating_project_request::<proto::MultiLspQuery>)
+ .add_request_handler(forward_mutating_project_request::<proto::RestartLanguageServers>)
+ .add_request_handler(forward_mutating_project_request::<proto::LinkedEditingRange>)
.add_message_handler(create_buffer_for_peer)
.add_request_handler(update_buffer)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
@@ -577,53 +342,47 @@ impl Server {
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBase>)
.add_request_handler(get_users)
- .add_request_handler(user_handler(fuzzy_search_users))
- .add_request_handler(user_handler(request_contact))
- .add_request_handler(user_handler(remove_contact))
- .add_request_handler(user_handler(respond_to_contact_request))
+ .add_request_handler(fuzzy_search_users)
+ .add_request_handler(request_contact)
+ .add_request_handler(remove_contact)
+ .add_request_handler(respond_to_contact_request)
.add_message_handler(subscribe_to_channels)
- .add_request_handler(user_handler(create_channel))
- .add_request_handler(user_handler(delete_channel))
- .add_request_handler(user_handler(invite_channel_member))
- .add_request_handler(user_handler(remove_channel_member))
- .add_request_handler(user_handler(set_channel_member_role))
- .add_request_handler(user_handler(set_channel_visibility))
- .add_request_handler(user_handler(rename_channel))
- .add_request_handler(user_handler(join_channel_buffer))
- .add_request_handler(user_handler(leave_channel_buffer))
- .add_message_handler(user_message_handler(update_channel_buffer))
- .add_request_handler(user_handler(rejoin_channel_buffers))
- .add_request_handler(user_handler(get_channel_members))
- .add_request_handler(user_handler(respond_to_channel_invite))
- .add_request_handler(user_handler(join_channel))
- .add_request_handler(user_handler(join_channel_chat))
- .add_message_handler(user_message_handler(leave_channel_chat))
- .add_request_handler(user_handler(send_channel_message))
- .add_request_handler(user_handler(remove_channel_message))
- .add_request_handler(user_handler(update_channel_message))
- .add_request_handler(user_handler(get_channel_messages))
- .add_request_handler(user_handler(get_channel_messages_by_id))
- .add_request_handler(user_handler(get_notifications))
- .add_request_handler(user_handler(mark_notification_as_read))
- .add_request_handler(user_handler(move_channel))
- .add_request_handler(user_handler(follow))
- .add_message_handler(user_message_handler(unfollow))
- .add_message_handler(user_message_handler(update_followers))
- .add_request_handler(user_handler(get_private_user_info))
- .add_request_handler(user_handler(get_llm_api_token))
- .add_request_handler(user_handler(accept_terms_of_service))
- .add_message_handler(user_message_handler(acknowledge_channel_message))
- .add_message_handler(user_message_handler(acknowledge_buffer_version))
- .add_request_handler(user_handler(get_supermaven_api_key))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::OpenContext>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::CreateContext>,
- ))
- .add_request_handler(user_handler(
- forward_mutating_project_request::<proto::SynchronizeContexts>,
- ))
+ .add_request_handler(create_channel)
+ .add_request_handler(delete_channel)
+ .add_request_handler(invite_channel_member)
+ .add_request_handler(remove_channel_member)
+ .add_request_handler(set_channel_member_role)
+ .add_request_handler(set_channel_visibility)
+ .add_request_handler(rename_channel)
+ .add_request_handler(join_channel_buffer)
+ .add_request_handler(leave_channel_buffer)
+ .add_message_handler(update_channel_buffer)
+ .add_request_handler(rejoin_channel_buffers)
+ .add_request_handler(get_channel_members)
+ .add_request_handler(respond_to_channel_invite)
+ .add_request_handler(join_channel)
+ .add_request_handler(join_channel_chat)
+ .add_message_handler(leave_channel_chat)
+ .add_request_handler(send_channel_message)
+ .add_request_handler(remove_channel_message)
+ .add_request_handler(update_channel_message)
+ .add_request_handler(get_channel_messages)
+ .add_request_handler(get_channel_messages_by_id)
+ .add_request_handler(get_notifications)
+ .add_request_handler(mark_notification_as_read)
+ .add_request_handler(move_channel)
+ .add_request_handler(follow)
+ .add_message_handler(unfollow)
+ .add_message_handler(update_followers)
+ .add_request_handler(get_private_user_info)
+ .add_request_handler(get_llm_api_token)
+ .add_request_handler(accept_terms_of_service)
+ .add_message_handler(acknowledge_channel_message)
+ .add_message_handler(acknowledge_buffer_version)
+ .add_request_handler(get_supermaven_api_key)
+ .add_request_handler(forward_mutating_project_request::<proto::OpenContext>)
+ .add_request_handler(forward_mutating_project_request::<proto::CreateContext>)
+ .add_request_handler(forward_mutating_project_request::<proto::SynchronizeContexts>)
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
.add_message_handler(update_context)
.add_request_handler({
@@ -636,21 +395,17 @@ impl Server {
}
}
})
- .add_request_handler({
- user_handler(move |request, response, session| {
- get_cached_embeddings(request, response, session)
- })
- })
+ .add_request_handler(get_cached_embeddings)
.add_request_handler({
let app_state = app_state.clone();
- user_handler(move |request, response, session| {
+ move |request, response, session| {
compute_embeddings(
request,
response,
session,
app_state.config.openai_api_key.clone(),
)
- })
+ }
});
Arc::new(server)
@@ -936,7 +691,6 @@ impl Server {
user_id=field::Empty,
login=field::Empty,
impersonator=field::Empty,
- dev_server_id=field::Empty,
geoip_country_code=field::Empty
);
principal.update_span(&span);
@@ -1031,7 +785,6 @@ impl Server {
user_id=field::Empty,
login=field::Empty,
impersonator=field::Empty,
- dev_server_id=field::Empty
);
principal.update_span(&span);
let span_enter = span.enter();
@@ -1100,11 +853,7 @@ impl Server {
update_user_plan(user.id, session).await?;
- let (contacts, dev_server_projects) = future::try_join(
- self.app_state.db.get_contacts(user.id),
- self.app_state.db.dev_server_projects_update(user.id),
- )
- .await?;
+ let contacts = self.app_state.db.get_contacts(user.id).await?;
{
let mut pool = self.connection_pool.lock();
@@ -1119,8 +868,6 @@ impl Server {
subscribe_user_to_channels(user.id, session).await?;
}
- send_dev_server_projects_update(user.id, dev_server_projects, session).await;
-
if let Some(incoming_call) =
self.app_state.db.incoming_call_for_user(user.id).await?
{
@@ -1129,39 +876,6 @@ impl Server {
update_user_contacts(user.id, session).await?;
}
- Principal::DevServer(dev_server) => {
- {
- let mut pool = self.connection_pool.lock();
- if let Some(stale_connection_id) = pool.dev_server_connection_id(dev_server.id)
- {
- self.peer.send(
- stale_connection_id,
- proto::ShutdownDevServer {
- reason: Some(
- "another dev server connected with the same token".to_string(),
- ),
- },
- )?;
- pool.remove_connection(stale_connection_id)?;
- };
- pool.add_dev_server(connection_id, dev_server.id, zed_version);
- }
-
- let projects = self
- .app_state
- .db
- .get_projects_for_dev_server(dev_server.id)
- .await?;
- self.peer
- .send(connection_id, proto::DevServerInstructions { projects })?;
-
- let status = self
- .app_state
- .db
- .dev_server_projects_update(dev_server.user_id)
- .await?;
- send_dev_server_projects_update(dev_server.user_id, status, session).await;
- }
}
Ok(())
@@ -1452,33 +1166,25 @@ async fn connection_lost(
futures::select_biased! {
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
- match &session.principal {
- Principal::User(_) | Principal::Impersonated{ user: _, admin:_ } => {
- let session = session.for_user().unwrap();
-
- log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id(), session.connection_id);
- leave_room_for_session(&session, session.connection_id).await.trace_err();
- leave_channel_buffers_for_session(&session)
- .await
- .trace_err();
-
- if !session
- .connection_pool()
- .await
- .is_user_online(session.user_id())
- {
- let db = session.db().await;
- if let Some(room) = db.decline_call(None, session.user_id()).await.trace_err().flatten() {
- room_updated(&room, &session.peer);
- }
- }
- update_user_contacts(session.user_id(), &session).await?;
- },
- Principal::DevServer(_) => {
- lost_dev_server_connection(&session.for_dev_server().unwrap()).await?;
- },
- }
+ log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id(), session.connection_id);
+ leave_room_for_session(&session, session.connection_id).await.trace_err();
+ leave_channel_buffers_for_session(&session)
+ .await
+ .trace_err();
+
+ if !session
+ .connection_pool()
+ .await
+ .is_user_online(session.user_id())
+ {
+ let db = session.db().await;
+ if let Some(room) = db.decline_call(None, session.user_id()).await.trace_err().flatten() {
+ room_updated(&room, &session.peer);
+ }
+ }
+
+ update_user_contacts(session.user_id(), &session).await?;
},
_ = teardown.changed().fuse() => {}
}
@@ -1496,7 +1202,7 @@ async fn ping(_: proto::Ping, response: Response<proto::Ping>, _session: Session
async fn create_room(
_request: proto::CreateRoom,
response: Response<proto::CreateRoom>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let live_kit_room = nanoid::nanoid!(30);
@@ -1536,7 +1242,7 @@ async fn create_room(
async fn join_room(
request: proto::JoinRoom,
response: Response<proto::JoinRoom>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let room_id = RoomId::from_proto(request.id);
@@ -1603,7 +1309,7 @@ async fn join_room(
async fn rejoin_room(
request: proto::RejoinRoom,
response: Response<proto::RejoinRoom>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let room;
let channel;
@@ -1693,7 +1399,7 @@ async fn rejoin_room(
fn notify_rejoined_projects(
rejoined_projects: &mut Vec<RejoinedProject>,
- session: &UserSession,
+ session: &Session,
) -> Result<()> {
for project in rejoined_projects.iter() {
for collaborator in &project.collaborators {
@@ -1778,7 +1484,7 @@ fn notify_rejoined_projects(
async fn leave_room(
_: proto::LeaveRoom,
response: Response<proto::LeaveRoom>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
leave_room_for_session(&session, session.connection_id).await?;
response.send(proto::Ack {})?;
@@ -1789,7 +1495,7 @@ async fn leave_room(
async fn set_room_participant_role(
request: proto::SetRoomParticipantRole,
response: Response<proto::SetRoomParticipantRole>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let user_id = UserId::from_proto(request.user_id);
let role = ChannelRole::from(request.role());
@@ -1837,7 +1543,7 @@ async fn set_room_participant_role(
async fn call(
request: proto::Call,
response: Response<proto::Call>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let room_id = RoomId::from_proto(request.room_id);
let calling_user_id = session.user_id();
@@ -1906,7 +1612,7 @@ async fn call(
async fn cancel_call(
request: proto::CancelCall,
response: Response<proto::CancelCall>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let called_user_id = UserId::from_proto(request.called_user_id);
let room_id = RoomId::from_proto(request.room_id);
@@ -1941,7 +1647,7 @@ async fn cancel_call(
}
/// Decline an incoming call.
-async fn decline_call(message: proto::DeclineCall, session: UserSession) -> Result<()> {
+async fn decline_call(message: proto::DeclineCall, session: Session) -> Result<()> {
let room_id = RoomId::from_proto(message.room_id);
{
let room = session
@@ -1976,7 +1682,7 @@ async fn decline_call(message: proto::DeclineCall, session: UserSession) -> Resu
async fn update_participant_location(
request: proto::UpdateParticipantLocation,
response: Response<proto::UpdateParticipantLocation>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let room_id = RoomId::from_proto(request.room_id);
let location = request
@@ -1997,7 +1703,7 @@ async fn update_participant_location(
async fn share_project(
request: proto::ShareProject,
response: Response<proto::ShareProject>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let (project_id, room) = &*session
.db()
@@ -2007,9 +1713,6 @@ async fn share_project(
session.connection_id,
&request.worktrees,
request.is_ssh_project,
- request
- .dev_server_project_id
- .map(DevServerProjectId::from_proto),
)
.await?;
response.send(proto::ShareProjectResponse {
@@ -2023,26 +1726,19 @@ async fn share_project(
/// Unshare a project from the room.
async fn unshare_project(message: proto::UnshareProject, session: Session) -> Result<()> {
let project_id = ProjectId::from_proto(message.project_id);
- unshare_project_internal(
- project_id,
- session.connection_id,
- session.user_id(),
- &session,
- )
- .await
+ unshare_project_internal(project_id, session.connection_id, &session).await
}
async fn unshare_project_internal(
project_id: ProjectId,
connection_id: ConnectionId,
- user_id: Option<UserId>,
session: &Session,
) -> Result<()> {
let delete = {
let room_guard = session
.db()
.await
- .unshare_project(project_id, connection_id, user_id)
+ .unshare_project(project_id, connection_id)
.await?;
let (delete, room, guest_connection_ids) = &*room_guard;
@@ -2071,38 +1767,11 @@ async fn unshare_project_internal(
Ok(())
}
-/// DevServer makes a project available online
-async fn share_dev_server_project(
- request: proto::ShareDevServerProject,
- response: Response<proto::ShareDevServerProject>,
- session: DevServerSession,
-) -> Result<()> {
- let (dev_server_project, user_id, status) = session
- .db()
- .await
- .share_dev_server_project(
- DevServerProjectId::from_proto(request.dev_server_project_id),
- session.dev_server_id(),
- session.connection_id,
- &request.worktrees,
- )
- .await?;
- let Some(project_id) = dev_server_project.project_id else {
- return Err(anyhow!("failed to share remote project"))?;
- };
-
- send_dev_server_projects_update(user_id, status, &session).await;
-
- response.send(proto::ShareProjectResponse { project_id })?;
-
- Ok(())
-}
-
/// Join someone elses shared project.
async fn join_project(
request: proto::JoinProject,
response: Response<proto::JoinProject>,
- session: UserSession,
+ session: Session,
) -> Result<()> {
let project_id = ProjectId::from_proto(request.project_id);
@@ -2125,15 +1794,10 @@ impl JoinProjectInternalResponse for Response<proto::JoinProject> {
Response::<proto::JoinProject>::send(self, result)
}
}
-impl JoinProjectInternalResponse for Response<proto::JoinHostedProject> {
- fn send(self, result: proto::JoinProjectResponse) -> Result<()> {
- Response::<proto::JoinHostedProject>::send(self, result)
- }
-}
fn join_project_internal(
response: impl JoinProjectInternalResponse,
- session: UserSession,
+ session: Session,
project: &mut Project,
replica_id: &ReplicaId,
) -> Result<()> {
@@ -2184,9 +1848,6 @@ fn join_project_internal(
collaborators: collaborators.clone(),
language_servers: project.language_servers.clone(),
role: project.role.into(),
- dev_server_project_id: project
- .dev_server_project_id
- .map(|dev_server_project_id| dev_server_project_id.0 as u64),
})?;
for (worktree_id, worktree) in mem::take(&mut project.worktrees) {
@@ -2252,15 +1913,10 @@ fn join_project_internal(
}
/// Leave someone elses shared project.
-async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Result<()> {
+async fn leave_project(request: proto::LeaveProject, session: Session) -> Result<()> {
let sender_id = session.connection_id;
let project_id = ProjectId::from_proto(request.project_id);
let db = session.db().await;
- if db.is_hosted_project(project_id).await? {
- let project = db.leave_hosted_project(project_id, sender_id).await?;
- project_left(&project, &session);
- return Ok(());
- }
let (room, project) = &*db.leave_project(project_id, sender_id).await?;
tracing::info!(
@@ -2276,499 +1932,6 @@ async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Re
Ok(())
}
-async fn join_hosted_project(
- request: proto::JoinHostedProject,
- response: Response<proto::JoinHostedProject>,
- session: UserSession,
-) -> Result<()> {
- let (mut project, replica_id) = session
- .db()
- .await
- .join_hosted_project(
- ProjectId(request.project_id as i32),
- session.user_id(),
- session.connection_id,
- )
- .await?;
-
- join_project_internal(response, session, &mut project, &replica_id)
-}
-
-async fn list_remote_directory(
- request: proto::ListRemoteDirectory,
- response: Response<proto::ListRemoteDirectory>,
- session: UserSession,
-) -> Result<()> {
- let dev_server_id = DevServerId(request.dev_server_id as i32);
- let dev_server_connection_id = session
- .connection_pool()
- .await
- .online_dev_server_connection_id(dev_server_id)?;
-
- session
- .db()
- .await
- .get_dev_server_for_user(dev_server_id, session.user_id())
- .await?;
-
- response.send(
- session
- .peer
- .forward_request(session.connection_id, dev_server_connection_id, request)
- .await?,
- )?;
- Ok(())
-}
-
-async fn update_dev_server_project(
- request: proto::UpdateDevServerProject,
- response: Response<proto::UpdateDevServerProject>,
- session: UserSession,
-) -> Result<()> {
- let dev_server_project_id = DevServerProjectId(request.dev_server_project_id as i32);
-
- let (dev_server_project, update) = session
- .db()
- .await
- .update_dev_server_project(dev_server_project_id, &request.paths, session.user_id())
- .await?;
-
- let projects = session
- .db()
- .await
- .get_projects_for_dev_server(dev_server_project.dev_server_id)
- .await?;
-
- let dev_server_connection_id = session
- .connection_pool()
- .await
- .online_dev_server_connection_id(dev_server_project.dev_server_id)?;
-
- session.peer.send(
- dev_server_connection_id,
- proto::DevServerInstructions { projects },
- )?;
-
- send_dev_server_projects_update(session.user_id(), update, &session).await;
-
- response.send(proto::Ack {})
-}
-
-async fn create_dev_server_project(
- request: proto::CreateDevServerProject,
- response: Response<proto::CreateDevServerProject>,
- session: UserSession,
-) -> Result<()> {
- let dev_server_id = DevServerId(request.dev_server_id as i32);
- let dev_server_connection_id = session
- .connection_pool()
- .await
- .dev_server_connection_id(dev_server_id);
- let Some(dev_server_connection_id) = dev_server_connection_id else {
- Err(ErrorCode::DevServerOffline
- .message("Cannot create a remote project when the dev server is offline".to_string())
- .anyhow())?
- };
-
- let path = request.path.clone();
- //Check that the path exists on the dev server
- session
- .peer
- .forward_request(
- session.connection_id,
- dev_server_connection_id,
- proto::ValidateDevServerProjectRequest { path: path.clone() },
- )
- .await?;
-
- let (dev_server_project, update) = session
- .db()
- .await
- .create_dev_server_project(
- DevServerId(request.dev_server_id as i32),
- &request.path,
- session.user_id(),
- )
- .await?;
-
- let projects = session
- .db()
- .await
- .get_projects_for_dev_server(dev_server_project.dev_server_id)
- .await?;
-
- session.peer.send(
- dev_server_connection_id,
- proto::DevServerInstructions { projects },
- )?;
-
- send_dev_server_projects_update(session.user_id(), update, &session).await;
-
- response.send(proto::CreateDevServerProjectResponse {
- dev_server_project: Some(dev_server_project.to_proto(None)),
- })?;
- Ok(())
-}
-
-async fn create_dev_server(
- request: proto::CreateDevServer,
- response: Response<proto::CreateDevServer>,
- session: UserSession,
-) -> Result<()> {
- let access_token = auth::random_token();
- let hashed_access_token = auth::hash_access_token(&access_token);
-
- if request.name.is_empty() {
- return Err(proto::ErrorCode::Forbidden
- .message("Dev server name cannot be empty".to_string())
- .anyhow())?;
- }
-
- let (dev_server, status) = session
- .db()
- .await
- .create_dev_server(
- &request.name,
- request.ssh_connection_string.as_deref(),
- &hashed_access_token,
- session.user_id(),
- )
- .await?;
-
- send_dev_server_projects_update(session.user_id(), status, &session).await;
-
- response.send(proto::CreateDevServerResponse {
- dev_server_id: dev_server.id.0 as u64,
- access_token: auth::generate_dev_server_token(dev_server.id.0 as usize, access_token),
- name: request.name,
- })?;
- Ok(())
-}
-
-async fn regenerate_dev_server_token(
- request: proto::RegenerateDevServerToken,
- response: Response<proto::RegenerateDevServerToken>,
- session: UserSession,
-) -> Result<()> {
- let dev_server_id = DevServerId(request.dev_server_id as i32);
- let access_token = auth::random_token();
- let hashed_access_token = auth::hash_access_token(&access_token);
-
- let connection_id = session
- .connection_pool()
- .await
- .dev_server_connection_id(dev_server_id);
- if let Some(connection_id) = connection_id {
- shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?;
- session.peer.send(
- connection_id,
- proto::ShutdownDevServer {
- reason: Some("dev server token was regenerated".to_string()),
- },
- )?;
- let _ = remove_dev_server_connection(dev_server_id, &session).await;
- }
-
- let status = session
- .db()
- .await
- .update_dev_server_token(dev_server_id, &hashed_access_token, session.user_id())
- .await?;
-
- send_dev_server_projects_update(session.user_id(), status, &session).await;
-
- response.send(proto::RegenerateDevServerTokenResponse {
- dev_server_id: dev_server_id.to_proto(),
- access_token: auth::generate_dev_server_token(dev_server_id.0 as usize, access_token),
- })?;
- Ok(())
-}
-
-async fn rename_dev_server(
- request: proto::RenameDevServer,
- response: Response<proto::RenameDevServer>,
- session: UserSession,
-) -> Result<()> {
- if request.name.trim().is_empty() {
- return Err(proto::ErrorCode::Forbidden
- .message("Dev server name cannot be empty".to_string())
- .anyhow())?;
- }
-
- let dev_server_id = DevServerId(request.dev_server_id as i32);
- let dev_server = session.db().await.get_dev_server(dev_server_id).await?;
- if dev_server.user_id != session.user_id() {
- return Err(anyhow!(ErrorCode::Forbidden))?;
- }
-
- let status = session
- .db()
- .await
- .rename_dev_server(
- dev_server_id,
- &request.name,
- request.ssh_connection_string.as_deref(),
- session.user_id(),
- )
- .await?;
-
- send_dev_server_projects_update(session.user_id(), status, &session).await;
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn delete_dev_server(
- request: proto::DeleteDevServer,
- response: Response<proto::DeleteDevServer>,
- session: UserSession,
-) -> Result<()> {
- let dev_server_id = DevServerId(request.dev_server_id as i32);
- let dev_server = session.db().await.get_dev_server(dev_server_id).await?;
- if dev_server.user_id != session.user_id() {
- return Err(anyhow!(ErrorCode::Forbidden))?;
- }
-
- let connection_id = session
- .connection_pool()
- .await
- .dev_server_connection_id(dev_server_id);
- if let Some(connection_id) = connection_id {
- shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?;
- session.peer.send(
- connection_id,
- proto::ShutdownDevServer {
- reason: Some("dev server was deleted".to_string()),
- },
- )?;
- let _ = remove_dev_server_connection(dev_server_id, &session).await;
- }
-
- let status = session
- .db()
- .await
- .delete_dev_server(dev_server_id, session.user_id())
- .await?;
-
- send_dev_server_projects_update(session.user_id(), status, &session).await;
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn delete_dev_server_project(
- request: proto::DeleteDevServerProject,
- response: Response<proto::DeleteDevServerProject>,
- session: UserSession,
-) -> Result<()> {
- let dev_server_project_id = DevServerProjectId(request.dev_server_project_id as i32);
- let dev_server_project = session
- .db()
- .await
- .get_dev_server_project(dev_server_project_id)
- .await?;
-
- let dev_server = session
- .db()
- .await
- .get_dev_server(dev_server_project.dev_server_id)
- .await?;
- if dev_server.user_id != session.user_id() {
- return Err(anyhow!(ErrorCode::Forbidden))?;
- }
-
- let dev_server_connection_id = session
- .connection_pool()
- .await
- .dev_server_connection_id(dev_server.id);
-
- if let Some(dev_server_connection_id) = dev_server_connection_id {
- let project = session
- .db()
- .await
- .find_dev_server_project(dev_server_project_id)
- .await;
- if let Ok(project) = project {
- unshare_project_internal(
- project.id,
- dev_server_connection_id,
- Some(session.user_id()),
- &session,
- )
- .await?;
- }
- }
-
- let (projects, status) = session
- .db()
- .await
- .delete_dev_server_project(dev_server_project_id, dev_server.id, session.user_id())
- .await?;
-
- if let Some(dev_server_connection_id) = dev_server_connection_id {
- session.peer.send(
- dev_server_connection_id,
- proto::DevServerInstructions { projects },
- )?;
- }
-
- send_dev_server_projects_update(session.user_id(), status, &session).await;
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn rejoin_dev_server_projects(
- request: proto::RejoinRemoteProjects,
- response: Response<proto::RejoinRemoteProjects>,
- session: UserSession,
-) -> Result<()> {
- let mut rejoined_projects = {
- let db = session.db().await;
- db.rejoin_dev_server_projects(
- &request.rejoined_projects,
- session.user_id(),
- session.0.connection_id,
- )
- .await?
- };
- response.send(proto::RejoinRemoteProjectsResponse {
- rejoined_projects: rejoined_projects
- .iter()
- .map(|project| project.to_proto())
- .collect(),
- })?;
- notify_rejoined_projects(&mut rejoined_projects, &session)
-}
-
-async fn reconnect_dev_server(
- request: proto::ReconnectDevServer,
- response: Response<proto::ReconnectDevServer>,
- session: DevServerSession,
-) -> Result<()> {
- let reshared_projects = {
- let db = session.db().await;
- db.reshare_dev_server_projects(
- &request.reshared_projects,
- session.dev_server_id(),
- session.0.connection_id,
- )
- .await?
- };
-
- for project in &reshared_projects {
- for collaborator in &project.collaborators {
- session
- .peer
- .send(
- collaborator.connection_id,
- proto::UpdateProjectCollaborator {
- project_id: project.id.to_proto(),
- old_peer_id: Some(project.old_connection_id.into()),
- new_peer_id: Some(session.connection_id.into()),
- },
- )
- .trace_err();
- }
-
- broadcast(
- Some(session.connection_id),
- project
- .collaborators
- .iter()
- .map(|collaborator| collaborator.connection_id),
- |connection_id| {
- session.peer.forward_send(
- session.connection_id,
- connection_id,
- proto::UpdateProject {
- project_id: project.id.to_proto(),
- worktrees: project.worktrees.clone(),
- },
- )
- },
- );
- }
-
- response.send(proto::ReconnectDevServerResponse {
- reshared_projects: reshared_projects
- .iter()
- .map(|project| proto::ResharedProject {
- id: project.id.to_proto(),
- collaborators: project
- .collaborators
- .iter()
- .map(|collaborator| collaborator.to_proto())
- .collect(),
- })
- .collect(),
- })?;
-
- Ok(())
-}
-
-async fn shutdown_dev_server(
- _: proto::ShutdownDevServer,
- response: Response<proto::ShutdownDevServer>,
- session: DevServerSession,
-) -> Result<()> {
- response.send(proto::Ack {})?;
- shutdown_dev_server_internal(session.dev_server_id(), session.connection_id, &session).await?;
- remove_dev_server_connection(session.dev_server_id(), &session).await
-}
-
-async fn shutdown_dev_server_internal(
- dev_server_id: DevServerId,
- connection_id: ConnectionId,
- session: &Session,
-) -> Result<()> {
- let (dev_server_projects, dev_server) = {
- let db = session.db().await;
- let dev_server_projects = db.get_projects_for_dev_server(dev_server_id).await?;
- let dev_server = db.get_dev_server(dev_server_id).await?;
- (dev_server_projects, dev_server)
- };
-
- for project_id in dev_server_projects.iter().filter_map(|p| p.project_id) {
- unshare_project_internal(
- ProjectId::from_proto(project_id),
- connection_id,
- None,
- session,
- )
- .await?;
- }
-
- session
- .connection_pool()
- .await
- .set_dev_server_offline(dev_server_id);
-
- let status = session
- .db()
- .await
- .dev_server_projects_update(dev_server.user_id)
- .await?;
- send_dev_server_projects_update(dev_server.user_id, status, session).await;
-
- Ok(())
-}
-
-async fn remove_dev_server_connection(dev_server_id: DevServerId, session: &Session) -> Result<()> {
- let dev_server_connection = session
- .connection_pool()
- .await
- .dev_server_connection_id(dev_server_id);
-
- if let Some(dev_server_connection) = dev_server_connection {
- session
- .connection_pool()
- .await
- .remove_connection(dev_server_connection)?;
- }
- Ok(())
-}
-
/// Updates other participants with changes to the project
async fn update_project(
request: proto::UpdateProject,
@@ -1,7 +1,7 @@
-use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId};
+use crate::db::{ChannelId, ChannelRole, UserId};
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashMap, HashSet};
-use rpc::{proto, ConnectionId};
+use rpc::ConnectionId;
use semantic_version::SemanticVersion;
use serde::Serialize;
use std::fmt;
@@ -11,9 +11,7 @@ use tracing::instrument;
pub struct ConnectionPool {
connections: BTreeMap<ConnectionId, Connection>,
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
- connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
channels: ChannelPool,
- offline_dev_servers: HashSet<DevServerId>,
}
#[derive(Default, Serialize)]
@@ -32,13 +30,13 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
- self.0 >= SemanticVersion::new(0, 151, 0)
+ self.0 >= SemanticVersion::new(0, 157, 0)
}
}
#[derive(Serialize)]
pub struct Connection {
- pub principal_id: PrincipalId,
+ pub user_id: UserId,
pub admin: bool,
pub zed_version: ZedVersion,
}
@@ -47,7 +45,6 @@ impl ConnectionPool {
pub fn reset(&mut self) {
self.connections.clear();
self.connected_users.clear();
- self.connected_dev_servers.clear();
self.channels.clear();
}
@@ -66,7 +63,7 @@ impl ConnectionPool {
self.connections.insert(
connection_id,
Connection {
- principal_id: PrincipalId::UserId(user_id),
+ user_id,
admin,
zed_version,
},
@@ -75,25 +72,6 @@ impl ConnectionPool {
connected_user.connection_ids.insert(connection_id);
}
- pub fn add_dev_server(
- &mut self,
- connection_id: ConnectionId,
- dev_server_id: DevServerId,
- zed_version: ZedVersion,
- ) {
- self.connections.insert(
- connection_id,
- Connection {
- principal_id: PrincipalId::DevServerId(dev_server_id),
- admin: false,
- zed_version,
- },
- );
-
- self.connected_dev_servers
- .insert(dev_server_id, connection_id);
- }
-
#[instrument(skip(self))]
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
let connection = self
@@ -101,28 +79,18 @@ impl ConnectionPool {
.get_mut(&connection_id)
.ok_or_else(|| anyhow!("no such connection"))?;
- match connection.principal_id {
- PrincipalId::UserId(user_id) => {
- let connected_user = self.connected_users.get_mut(&user_id).unwrap();
- connected_user.connection_ids.remove(&connection_id);
- if connected_user.connection_ids.is_empty() {
- self.connected_users.remove(&user_id);
- self.channels.remove_user(&user_id);
- }
- }
- PrincipalId::DevServerId(dev_server_id) => {
- self.connected_dev_servers.remove(&dev_server_id);
- self.offline_dev_servers.remove(&dev_server_id);
- }
- }
+ let user_id = connection.user_id;
+
+ let connected_user = self.connected_users.get_mut(&user_id).unwrap();
+ connected_user.connection_ids.remove(&connection_id);
+ if connected_user.connection_ids.is_empty() {
+ self.connected_users.remove(&user_id);
+ self.channels.remove_user(&user_id);
+ };
self.connections.remove(&connection_id).unwrap();
Ok(())
}
- pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) {
- self.offline_dev_servers.insert(dev_server_id);
- }
-
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
self.connections.values()
}
@@ -147,42 +115,6 @@ impl ConnectionPool {
.copied()
}
- pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
- if self.dev_server_connection_id(dev_server_id).is_some()
- && !self.offline_dev_servers.contains(&dev_server_id)
- {
- proto::DevServerStatus::Online
- } else {
- proto::DevServerStatus::Offline
- }
- }
-
- pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option<ConnectionId> {
- self.connected_dev_servers.get(&dev_server_id).copied()
- }
-
- pub fn online_dev_server_connection_id(
- &self,
- dev_server_id: DevServerId,
- ) -> Result<ConnectionId> {
- match self.connected_dev_servers.get(&dev_server_id) {
- Some(cid) => Ok(*cid),
- None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
- }
- }
-
- pub fn dev_server_connection_id_supporting(
- &self,
- dev_server_id: DevServerId,
- required: ZedVersion,
- ) -> Result<ConnectionId> {
- match self.connected_dev_servers.get(&dev_server_id) {
- Some(cid) if self.connections[cid].zed_version >= required => Ok(*cid),
- Some(_) => Err(anyhow!(proto::ErrorCode::RemoteUpgradeRequired)),
- None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
- }
- }
-
pub fn channel_user_ids(
&self,
channel_id: ChannelId,
@@ -227,39 +159,22 @@ impl ConnectionPool {
#[cfg(test)]
pub fn check_invariants(&self) {
for (connection_id, connection) in &self.connections {
- match &connection.principal_id {
- PrincipalId::UserId(user_id) => {
- assert!(self
- .connected_users
- .get(user_id)
- .unwrap()
- .connection_ids
- .contains(connection_id));
- }
- PrincipalId::DevServerId(dev_server_id) => {
- assert_eq!(
- self.connected_dev_servers.get(dev_server_id).unwrap(),
- connection_id
- );
- }
- }
+ assert!(self
+ .connected_users
+ .get(&connection.user_id)
+ .unwrap()
+ .connection_ids
+ .contains(connection_id));
}
for (user_id, state) in &self.connected_users {
for connection_id in &state.connection_ids {
assert_eq!(
- self.connections.get(connection_id).unwrap().principal_id,
- PrincipalId::UserId(*user_id)
+ self.connections.get(connection_id).unwrap().user_id,
+ *user_id
);
}
}
-
- for (dev_server_id, connection_id) in &self.connected_dev_servers {
- assert_eq!(
- self.connections.get(connection_id).unwrap().principal_id,
- PrincipalId::DevServerId(*dev_server_id)
- );
- }
}
}
@@ -6575,3 +6575,95 @@ async fn test_context_collaboration_with_reconnect(
assert!(context.buffer().read(cx).read_only());
});
}
+
+#[gpui::test]
+async fn test_remote_git_branches(
+ executor: BackgroundExecutor,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ let mut server = TestServer::start(executor.clone()).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+
+ client_a
+ .fs()
+ .insert_tree("/project", serde_json::json!({ ".git":{} }))
+ .await;
+ let branches = ["main", "dev", "feature-1"];
+ client_a
+ .fs()
+ .insert_branches(Path::new("/project/.git"), &branches);
+
+ let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await;
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+ let project_b = client_b.join_remote_project(project_id, cx_b).await;
+
+ let root_path = ProjectPath::root_path(worktree_id);
+ // Client A sees that a guest has joined.
+ executor.run_until_parked();
+
+ let branches_b = cx_b
+ .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
+ .await
+ .unwrap();
+
+ let new_branch = branches[2];
+
+ let branches_b = branches_b
+ .into_iter()
+ .map(|branch| branch.name)
+ .collect::<Vec<_>>();
+
+ assert_eq!(&branches_b, &branches);
+
+ cx_b.update(|cx| {
+ project_b.update(cx, |project, cx| {
+ project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
+ })
+ })
+ .await
+ .unwrap();
+
+ executor.run_until_parked();
+
+ let host_branch = cx_a.update(|cx| {
+ project_a.update(cx, |project, cx| {
+ project.worktree_store().update(cx, |worktree_store, cx| {
+ worktree_store
+ .current_branch(root_path.clone(), cx)
+ .unwrap()
+ })
+ })
+ });
+
+ assert_eq!(host_branch.as_ref(), branches[2]);
+
+ // Also try creating a new branch
+ cx_b.update(|cx| {
+ project_b.update(cx, |project, cx| {
+ project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
+ })
+ })
+ .await
+ .unwrap();
+
+ executor.run_until_parked();
+
+ let host_branch = cx_a.update(|cx| {
+ project_a.update(cx, |project, cx| {
+ project.worktree_store().update(cx, |worktree_store, cx| {
+ worktree_store.current_branch(root_path, cx).unwrap()
+ })
+ })
+ });
+
+ assert_eq!(host_branch.as_ref(), "totally-new-branch");
+}
@@ -1,7 +1,7 @@
use crate::tests::TestServer;
use call::ActiveCall;
use fs::{FakeFs, Fs as _};
-use gpui::{Context as _, TestAppContext};
+use gpui::{BackgroundExecutor, Context as _, TestAppContext};
use http_client::BlockedHttpClient;
use language::{language_settings::language_settings, LanguageRegistry};
use node_runtime::NodeRuntime;
@@ -26,7 +26,7 @@ async fn test_sharing_an_ssh_remote_project(
.await;
// Set up project on remote FS
- let (port, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
+ let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
@@ -67,7 +67,7 @@ async fn test_sharing_an_ssh_remote_project(
)
});
- let client_ssh = SshRemoteClient::fake_client(port, cx_a).await;
+ let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
let (project_a, worktree_id) = client_a
.build_ssh_project("/code/project1", client_ssh, cx_a)
.await;
@@ -174,3 +174,133 @@ async fn test_sharing_an_ssh_remote_project(
);
});
}
+
+#[gpui::test]
+async fn test_ssh_collaboration_git_branches(
+ executor: BackgroundExecutor,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+ server_cx: &mut TestAppContext,
+) {
+ cx_a.set_name("a");
+ cx_b.set_name("b");
+ server_cx.set_name("server");
+
+ let mut server = TestServer::start(executor.clone()).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+
+ // Set up project on remote FS
+ let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
+ let remote_fs = FakeFs::new(server_cx.executor());
+ remote_fs
+ .insert_tree("/project", serde_json::json!({ ".git":{} }))
+ .await;
+
+ let branches = ["main", "dev", "feature-1"];
+ remote_fs.insert_branches(Path::new("/project/.git"), &branches);
+
+ // User A connects to the remote project via SSH.
+ server_cx.update(HeadlessProject::init);
+ let remote_http_client = Arc::new(BlockedHttpClient);
+ let node = NodeRuntime::unavailable();
+ let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
+ let headless_project = server_cx.new_model(|cx| {
+ client::init_settings(cx);
+ HeadlessProject::new(
+ HeadlessAppState {
+ session: server_ssh,
+ fs: remote_fs.clone(),
+ http_client: remote_http_client,
+ node_runtime: node,
+ languages,
+ },
+ cx,
+ )
+ });
+
+ let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
+ let (project_a, worktree_id) = client_a
+ .build_ssh_project("/project", client_ssh, cx_a)
+ .await;
+
+ // While the SSH worktree is being scanned, user A shares the remote project.
+ let active_call_a = cx_a.read(ActiveCall::global);
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+
+ // User B joins the project.
+ let project_b = client_b.join_remote_project(project_id, cx_b).await;
+
+ // Give client A sometime to see that B has joined, and that the headless server
+ // has some git repositories
+ executor.run_until_parked();
+
+ let root_path = ProjectPath::root_path(worktree_id);
+
+ let branches_b = cx_b
+ .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
+ .await
+ .unwrap();
+
+ let new_branch = branches[2];
+
+ let branches_b = branches_b
+ .into_iter()
+ .map(|branch| branch.name)
+ .collect::<Vec<_>>();
+
+ assert_eq!(&branches_b, &branches);
+
+ cx_b.update(|cx| {
+ project_b.update(cx, |project, cx| {
+ project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
+ })
+ })
+ .await
+ .unwrap();
+
+ executor.run_until_parked();
+
+ let server_branch = server_cx.update(|cx| {
+ headless_project.update(cx, |headless_project, cx| {
+ headless_project
+ .worktree_store
+ .update(cx, |worktree_store, cx| {
+ worktree_store
+ .current_branch(root_path.clone(), cx)
+ .unwrap()
+ })
+ })
+ });
+
+ assert_eq!(server_branch.as_ref(), branches[2]);
+
+ // Also try creating a new branch
+ cx_b.update(|cx| {
+ project_b.update(cx, |project, cx| {
+ project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
+ })
+ })
+ .await
+ .unwrap();
+
+ executor.run_until_parked();
+
+ let server_branch = server_cx.update(|cx| {
+ headless_project.update(cx, |headless_project, cx| {
+ headless_project
+ .worktree_store
+ .update(cx, |worktree_store, cx| {
+ worktree_store.current_branch(root_path, cx).unwrap()
+ })
+ })
+ });
+
+ assert_eq!(server_branch.as_ref(), "totally-new-branch");
+}
@@ -203,7 +203,7 @@ impl TestServer {
.override_authenticate(move |cx| {
cx.spawn(|_| async move {
let access_token = "the-token".to_string();
- Ok(Credentials::User {
+ Ok(Credentials {
user_id: user_id.to_proto(),
access_token,
})
@@ -212,7 +212,7 @@ impl TestServer {
.override_establish_connection(move |credentials, cx| {
assert_eq!(
credentials,
- &Credentials::User {
+ &Credentials {
user_id: user_id.0 as u64,
access_token: "the-token".into()
}
@@ -296,7 +296,6 @@ impl TestServer {
collab_ui::init(&app_state, cx);
file_finder::init(cx);
menu::init();
- dev_server_projects::init(client.clone(), cx);
settings::KeymapFile::load_asset(os_keymap, cx).unwrap();
language_model::LanguageModelRegistry::test(cx);
assistant::context_store::init(&client.clone().into());
@@ -5,7 +5,7 @@ use self::channel_modal::ChannelModal;
use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings};
use call::ActiveCall;
use channel::{Channel, ChannelEvent, ChannelStore};
-use client::{ChannelId, Client, Contact, ProjectId, User, UserStore};
+use client::{ChannelId, Client, Contact, User, UserStore};
use contact_finder::ContactFinder;
use db::kvp::KEY_VALUE_STORE;
use editor::{Editor, EditorElement, EditorStyle};
@@ -182,10 +182,6 @@ enum ListEntry {
ChannelEditor {
depth: usize,
},
- HostedProject {
- id: ProjectId,
- name: SharedString,
- },
Contact {
contact: Arc<Contact>,
calling: bool,
@@ -566,7 +562,6 @@ impl CollabPanel {
}
}
- let hosted_projects = channel_store.projects_for_id(channel.id);
let has_children = channel_store
.channel_at_index(mat.candidate_id + 1)
.map_or(false, |next_channel| {
@@ -600,10 +595,6 @@ impl CollabPanel {
});
}
}
-
- for (name, id) in hosted_projects {
- self.entries.push(ListEntry::HostedProject { id, name });
- }
}
}
@@ -1029,40 +1020,6 @@ impl CollabPanel {
.tooltip(move |cx| Tooltip::text("Open Chat", cx))
}
- fn render_channel_project(
- &self,
- id: ProjectId,
- name: &SharedString,
- is_selected: bool,
- cx: &mut ViewContext<Self>,
- ) -> impl IntoElement {
- ListItem::new(ElementId::NamedInteger(
- "channel-project".into(),
- id.0 as usize,
- ))
- .indent_level(2)
- .indent_step_size(px(20.))
- .selected(is_selected)
- .on_click(cx.listener(move |this, _, cx| {
- if let Some(workspace) = this.workspace.upgrade() {
- let app_state = workspace.read(cx).app_state().clone();
- workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err(
- "Failed to open project",
- cx,
- |_, _| None,
- )
- }
- }))
- .start_slot(
- h_flex()
- .relative()
- .gap_1()
- .child(IconButton::new(0, IconName::FileTree)),
- )
- .child(Label::new(name.clone()))
- .tooltip(move |cx| Tooltip::text("Open Project", cx))
- }
-
fn has_subchannels(&self, ix: usize) -> bool {
self.entries.get(ix).map_or(false, |entry| {
if let ListEntry::Channel { has_children, .. } = entry {
@@ -1538,12 +1495,6 @@ impl CollabPanel {
ListEntry::ChannelChat { channel_id } => {
self.join_channel_chat(*channel_id, cx)
}
- ListEntry::HostedProject {
- id: _id,
- name: _name,
- } => {
- // todo()
- }
ListEntry::OutgoingRequest(_) => {}
ListEntry::ChannelEditor { .. } => {}
}
@@ -2157,10 +2108,6 @@ impl CollabPanel {
ListEntry::ChannelChat { channel_id } => self
.render_channel_chat(*channel_id, is_selected, cx)
.into_any_element(),
-
- ListEntry::HostedProject { id, name } => self
- .render_channel_project(*id, name, is_selected, cx)
- .into_any_element(),
}
}
@@ -2898,11 +2845,6 @@ impl PartialEq for ListEntry {
return channel_1.id == channel_2.id;
}
}
- ListEntry::HostedProject { id, .. } => {
- if let ListEntry::HostedProject { id: other_id, .. } = other {
- return id == other_id;
- }
- }
ListEntry::ChannelNotes { channel_id } => {
if let ListEntry::ChannelNotes {
channel_id: other_id,
@@ -180,6 +180,39 @@ impl InitializedContextServerProtocol {
Ok(completion)
}
+
+ /// List MCP tools.
+ pub async fn list_tools(&self) -> Result<types::ListToolsResponse> {
+ self.check_capability(ServerCapability::Tools)?;
+
+ let response = self
+ .inner
+ .request::<types::ListToolsResponse>(types::RequestType::ListTools.as_str(), ())
+ .await?;
+
+ Ok(response)
+ }
+
+ /// Executes a tool with the given arguments
+ pub async fn run_tool<P: AsRef<str>>(
+ &self,
+ tool: P,
+ arguments: Option<HashMap<String, serde_json::Value>>,
+ ) -> Result<types::CallToolResponse> {
+ self.check_capability(ServerCapability::Tools)?;
+
+ let params = types::CallToolParams {
+ name: tool.as_ref().to_string(),
+ arguments,
+ };
+
+ let response: types::CallToolResponse = self
+ .inner
+ .request(types::RequestType::CallTool.as_str(), params)
+ .await?;
+
+ Ok(response)
+ }
}
impl InitializedContextServerProtocol {
@@ -9,7 +9,8 @@ struct GlobalContextServerRegistry(Arc<ContextServerRegistry>);
impl Global for GlobalContextServerRegistry {}
pub struct ContextServerRegistry {
- registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
+ command_registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
+ tool_registry: RwLock<HashMap<String, Vec<Arc<str>>>>,
}
impl ContextServerRegistry {
@@ -20,13 +21,14 @@ impl ContextServerRegistry {
pub fn register(cx: &mut AppContext) {
cx.set_global(GlobalContextServerRegistry(Arc::new(
ContextServerRegistry {
- registry: RwLock::new(HashMap::default()),
+ command_registry: RwLock::new(HashMap::default()),
+ tool_registry: RwLock::new(HashMap::default()),
},
)))
}
pub fn register_command(&self, server_id: String, command_name: &str) {
- let mut registry = self.registry.write();
+ let mut registry = self.command_registry.write();
registry
.entry(server_id)
.or_default()
@@ -34,14 +36,34 @@ impl ContextServerRegistry {
}
pub fn unregister_command(&self, server_id: &str, command_name: &str) {
- let mut registry = self.registry.write();
+ let mut registry = self.command_registry.write();
if let Some(commands) = registry.get_mut(server_id) {
commands.retain(|name| name.as_ref() != command_name);
}
}
pub fn get_commands(&self, server_id: &str) -> Option<Vec<Arc<str>>> {
- let registry = self.registry.read();
+ let registry = self.command_registry.read();
+ registry.get(server_id).cloned()
+ }
+
+ pub fn register_tool(&self, server_id: String, tool_name: &str) {
+ let mut registry = self.tool_registry.write();
+ registry
+ .entry(server_id)
+ .or_default()
+ .push(tool_name.into());
+ }
+
+ pub fn unregister_tool(&self, server_id: &str, tool_name: &str) {
+ let mut registry = self.tool_registry.write();
+ if let Some(tools) = registry.get_mut(server_id) {
+ tools.retain(|name| name.as_ref() != tool_name);
+ }
+ }
+
+ pub fn get_tools(&self, server_id: &str) -> Option<Vec<Arc<str>>> {
+ let registry = self.tool_registry.read();
registry.get(server_id).cloned()
}
}
@@ -16,6 +16,8 @@ pub enum RequestType {
PromptsList,
CompletionComplete,
Ping,
+ ListTools,
+ ListResourceTemplates,
}
impl RequestType {
@@ -32,6 +34,8 @@ impl RequestType {
RequestType::PromptsList => "prompts/list",
RequestType::CompletionComplete => "completion/complete",
RequestType::Ping => "ping",
+ RequestType::ListTools => "tools/list",
+ RequestType::ListResourceTemplates => "resources/templates/list",
}
}
}
@@ -402,3 +406,17 @@ pub struct Completion {
pub values: Vec<String>,
pub total: CompletionTotal,
}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CallToolResponse {
+ pub tool_result: serde_json::Value,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct ListToolsResponse {
+ pub tools: Vec<Tool>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub next_cursor: Option<String>,
+}
@@ -1,23 +0,0 @@
-[package]
-name = "dev_server_projects"
-version = "0.1.0"
-edition = "2021"
-publish = false
-license = "GPL-3.0-or-later"
-
-[lints]
-workspace = true
-
-[lib]
-path = "src/dev_server_projects.rs"
-doctest = false
-
-[dependencies]
-anyhow.workspace = true
-gpui.workspace = true
-serde.workspace = true
-client.workspace = true
-rpc.workspace = true
-
-[dev-dependencies]
-serde_json.workspace = true
@@ -1,249 +1 @@
-use anyhow::Result;
-use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ModelContext, SharedString, Task};
-use rpc::{
- proto::{self, DevServerStatus},
- TypedEnvelope,
-};
-use std::{collections::HashMap, sync::Arc};
-use client::{Client, ProjectId};
-pub use client::{DevServerId, DevServerProjectId};
-
-pub struct Store {
- dev_server_projects: HashMap<DevServerProjectId, DevServerProject>,
- dev_servers: HashMap<DevServerId, DevServer>,
- _subscriptions: Vec<client::Subscription>,
- client: Arc<Client>,
-}
-
-#[derive(Debug, Clone)]
-pub struct DevServerProject {
- pub id: DevServerProjectId,
- pub project_id: Option<ProjectId>,
- pub paths: Vec<SharedString>,
- pub dev_server_id: DevServerId,
-}
-
-impl From<proto::DevServerProject> for DevServerProject {
- fn from(project: proto::DevServerProject) -> Self {
- Self {
- id: DevServerProjectId(project.id),
- project_id: project.project_id.map(ProjectId),
- paths: project.paths.into_iter().map(|path| path.into()).collect(),
- dev_server_id: DevServerId(project.dev_server_id),
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub struct DevServer {
- pub id: DevServerId,
- pub name: SharedString,
- pub ssh_connection_string: Option<SharedString>,
- pub status: DevServerStatus,
-}
-
-impl From<proto::DevServer> for DevServer {
- fn from(dev_server: proto::DevServer) -> Self {
- Self {
- id: DevServerId(dev_server.dev_server_id),
- status: dev_server.status(),
- name: dev_server.name.into(),
- ssh_connection_string: dev_server.ssh_connection_string.map(|s| s.into()),
- }
- }
-}
-
-struct GlobalStore(Model<Store>);
-
-impl Global for GlobalStore {}
-
-pub fn init(client: Arc<Client>, cx: &mut AppContext) {
- let store = cx.new_model(|cx| Store::new(client, cx));
- cx.set_global(GlobalStore(store));
-}
-
-impl Store {
- pub fn global(cx: &AppContext) -> Model<Store> {
- cx.global::<GlobalStore>().0.clone()
- }
-
- pub fn new(client: Arc<Client>, cx: &ModelContext<Self>) -> Self {
- Self {
- dev_server_projects: Default::default(),
- dev_servers: Default::default(),
- _subscriptions: vec![client
- .add_message_handler(cx.weak_model(), Self::handle_dev_server_projects_update)],
- client,
- }
- }
-
- pub fn projects_for_server(&self, id: DevServerId) -> Vec<DevServerProject> {
- let mut projects: Vec<DevServerProject> = self
- .dev_server_projects
- .values()
- .filter(|project| project.dev_server_id == id)
- .cloned()
- .collect();
- projects.sort_by_key(|p| (p.paths.clone(), p.id));
- projects
- }
-
- pub fn dev_servers(&self) -> Vec<DevServer> {
- let mut dev_servers: Vec<DevServer> = self.dev_servers.values().cloned().collect();
- dev_servers.sort_by_key(|d| (d.status == DevServerStatus::Offline, d.name.clone(), d.id));
- dev_servers
- }
-
- pub fn dev_server(&self, id: DevServerId) -> Option<&DevServer> {
- self.dev_servers.get(&id)
- }
-
- pub fn dev_server_status(&self, id: DevServerId) -> DevServerStatus {
- self.dev_server(id)
- .map(|server| server.status)
- .unwrap_or(DevServerStatus::Offline)
- }
-
- pub fn dev_server_projects(&self) -> Vec<DevServerProject> {
- let mut projects: Vec<DevServerProject> =
- self.dev_server_projects.values().cloned().collect();
- projects.sort_by_key(|p| (p.paths.clone(), p.id));
- projects
- }
-
- pub fn dev_server_project(&self, id: DevServerProjectId) -> Option<&DevServerProject> {
- self.dev_server_projects.get(&id)
- }
-
- pub fn dev_server_for_project(&self, id: DevServerProjectId) -> Option<&DevServer> {
- self.dev_server_project(id)
- .and_then(|project| self.dev_server(project.dev_server_id))
- }
-
- async fn handle_dev_server_projects_update(
- this: Model<Self>,
- envelope: TypedEnvelope<proto::DevServerProjectsUpdate>,
- mut cx: AsyncAppContext,
- ) -> Result<()> {
- this.update(&mut cx, |this, cx| {
- this.dev_servers = envelope
- .payload
- .dev_servers
- .into_iter()
- .map(|dev_server| (DevServerId(dev_server.dev_server_id), dev_server.into()))
- .collect();
- this.dev_server_projects = envelope
- .payload
- .dev_server_projects
- .into_iter()
- .map(|project| (DevServerProjectId(project.id), project.into()))
- .collect();
-
- cx.notify();
- })?;
- Ok(())
- }
-
- pub fn create_dev_server_project(
- &mut self,
- dev_server_id: DevServerId,
- path: String,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<proto::CreateDevServerProjectResponse>> {
- let client = self.client.clone();
- cx.background_executor().spawn(async move {
- client
- .request(proto::CreateDevServerProject {
- dev_server_id: dev_server_id.0,
- path,
- })
- .await
- })
- }
-
- pub fn create_dev_server(
- &mut self,
- name: String,
- ssh_connection_string: Option<String>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<proto::CreateDevServerResponse>> {
- let client = self.client.clone();
- cx.background_executor().spawn(async move {
- let result = client
- .request(proto::CreateDevServer {
- name,
- ssh_connection_string,
- })
- .await?;
- Ok(result)
- })
- }
-
- pub fn rename_dev_server(
- &mut self,
- dev_server_id: DevServerId,
- name: String,
- ssh_connection_string: Option<String>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<()>> {
- let client = self.client.clone();
- cx.background_executor().spawn(async move {
- client
- .request(proto::RenameDevServer {
- dev_server_id: dev_server_id.0,
- name,
- ssh_connection_string,
- })
- .await?;
- Ok(())
- })
- }
-
- pub fn regenerate_dev_server_token(
- &mut self,
- dev_server_id: DevServerId,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<proto::RegenerateDevServerTokenResponse>> {
- let client = self.client.clone();
- cx.background_executor().spawn(async move {
- client
- .request(proto::RegenerateDevServerToken {
- dev_server_id: dev_server_id.0,
- })
- .await
- })
- }
-
- pub fn delete_dev_server(
- &mut self,
- id: DevServerId,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<()>> {
- let client = self.client.clone();
- cx.background_executor().spawn(async move {
- client
- .request(proto::DeleteDevServer {
- dev_server_id: id.0,
- })
- .await?;
- Ok(())
- })
- }
-
- pub fn delete_dev_server_project(
- &mut self,
- id: DevServerProjectId,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<()>> {
- let client = self.client.clone();
- cx.background_executor().spawn(async move {
- client
- .request(proto::DeleteDevServerProject {
- dev_server_project_id: id.0,
- })
- .await?;
- Ok(())
- })
- }
-}
@@ -9,7 +9,7 @@ use anyhow::Result;
use collections::{BTreeSet, HashSet};
use editor::{
diagnostic_block_renderer,
- display_map::{BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock},
+ display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock},
highlight_diagnostic_message,
scroll::Autoscroll,
Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
@@ -439,11 +439,10 @@ impl ProjectDiagnosticsEditor {
primary.message.split('\n').next().unwrap().to_string();
group_state.block_count += 1;
blocks_to_add.push(BlockProperties {
- position: header_position,
+ placement: BlockPlacement::Above(header_position),
height: 2,
style: BlockStyle::Sticky,
render: diagnostic_header_renderer(primary),
- disposition: BlockDisposition::Above,
priority: 0,
});
}
@@ -459,13 +458,15 @@ impl ProjectDiagnosticsEditor {
if !diagnostic.message.is_empty() {
group_state.block_count += 1;
blocks_to_add.push(BlockProperties {
- position: (excerpt_id, entry.range.start),
+ placement: BlockPlacement::Below((
+ excerpt_id,
+ entry.range.start,
+ )),
height: diagnostic.message.matches('\n').count() as u32 + 1,
style: BlockStyle::Fixed,
render: diagnostic_block_renderer(
diagnostic, None, true, true,
),
- disposition: BlockDisposition::Below,
priority: 0,
});
}
@@ -498,13 +499,24 @@ impl ProjectDiagnosticsEditor {
editor.remove_blocks(blocks_to_remove, None, cx);
let block_ids = editor.insert_blocks(
blocks_to_add.into_iter().flat_map(|block| {
- let (excerpt_id, text_anchor) = block.position;
+ let placement = match block.placement {
+ BlockPlacement::Above((excerpt_id, text_anchor)) => BlockPlacement::Above(
+ excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?,
+ ),
+ BlockPlacement::Below((excerpt_id, text_anchor)) => BlockPlacement::Below(
+ excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?,
+ ),
+ BlockPlacement::Replace(_) => {
+ unreachable!(
+ "no Replace block should have been pushed to blocks_to_add"
+ )
+ }
+ };
Some(BlockProperties {
- position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?,
+ placement,
height: block.height,
style: block.style,
render: block.render,
- disposition: block.disposition,
priority: 0,
})
}),
@@ -18,7 +18,7 @@ pub struct SelectPrevious {
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct MoveToBeginningOfLine {
#[serde(default = "default_true")]
- pub(super) stop_at_soft_wraps: bool,
+ pub stop_at_soft_wraps: bool,
}
#[derive(PartialEq, Clone, Deserialize, Default)]
@@ -153,6 +153,10 @@ pub struct DeleteToPreviousWordStart {
pub ignore_newlines: bool,
}
+#[derive(PartialEq, Clone, Deserialize, Default)]
+pub struct FoldAtLevel {
+ pub level: u32,
+}
impl_actions!(
editor,
[
@@ -182,6 +186,7 @@ impl_actions!(
ToggleCodeActions,
ToggleComments,
UnfoldAt,
+ FoldAtLevel
]
);
@@ -193,6 +198,7 @@ gpui::actions!(
AcceptPartialInlineCompletion,
AddSelectionAbove,
AddSelectionBelow,
+ ApplyAllDiffHunks,
ApplyDiffHunk,
Backspace,
Cancel,
@@ -28,8 +28,8 @@ use crate::{
hover_links::InlayHighlight, movement::TextLayoutDetails, EditorStyle, InlayId, RowExt,
};
pub use block_map::{
- Block, BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockDisposition, BlockId,
- BlockMap, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
+ Block, BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockId, BlockMap,
+ BlockPlacement, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
};
use block_map::{BlockRow, BlockSnapshot};
use collections::{HashMap, HashSet};
@@ -1156,6 +1156,7 @@ impl ToDisplayPoint for Anchor {
pub mod tests {
use super::*;
use crate::{movement, test::marked_display_snapshot};
+ use block_map::BlockPlacement;
use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla};
use language::{
language_settings::{AllLanguageSettings, AllLanguageSettingsContent},
@@ -1167,6 +1168,7 @@ pub mod tests {
use smol::stream::StreamExt;
use std::{env, sync::Arc};
use theme::{LoadThemes, SyntaxTheme};
+ use unindent::Unindent as _;
use util::test::{marked_text_ranges, sample_text};
use Bias::*;
@@ -1269,24 +1271,22 @@ pub mod tests {
Bias::Left,
));
- let disposition = if rng.gen() {
- BlockDisposition::Above
+ let placement = if rng.gen() {
+ BlockPlacement::Above(position)
} else {
- BlockDisposition::Below
+ BlockPlacement::Below(position)
};
let height = rng.gen_range(1..5);
log::info!(
- "inserting block {:?} {:?} with height {}",
- disposition,
- position.to_point(&buffer),
+ "inserting block {:?} with height {}",
+ placement.as_ref().map(|p| p.to_point(&buffer)),
height
);
let priority = rng.gen_range(1..100);
BlockProperties {
+ placement,
style: BlockStyle::Fixed,
- position,
height,
- disposition,
render: Box::new(|_| div().into_any()),
priority,
}
@@ -1625,8 +1625,6 @@ pub mod tests {
#[gpui::test]
async fn test_chunks(cx: &mut gpui::TestAppContext) {
- use unindent::Unindent as _;
-
let text = r#"
fn outer() {}
@@ -1723,12 +1721,110 @@ pub mod tests {
);
}
+ #[gpui::test]
+ async fn test_chunks_with_syntax_highlighting_across_blocks(cx: &mut gpui::TestAppContext) {
+ cx.background_executor
+ .set_block_on_ticks(usize::MAX..=usize::MAX);
+
+ let text = r#"
+ const A: &str = "
+ one
+ two
+ three
+ ";
+ const B: &str = "four";
+ "#
+ .unindent();
+
+ let theme = SyntaxTheme::new_test(vec![
+ ("string", Hsla::red()),
+ ("punctuation", Hsla::blue()),
+ ("keyword", Hsla::green()),
+ ]);
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::LANGUAGE.into()),
+ )
+ .with_highlights_query(
+ r#"
+ (string_literal) @string
+ "const" @keyword
+ [":" ";"] @punctuation
+ "#,
+ )
+ .unwrap(),
+ );
+ language.set_theme(&theme);
+
+ cx.update(|cx| init_test(cx, |_| {}));
+
+ let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
+ cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
+ let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
+
+ let map = cx.new_model(|cx| {
+ DisplayMap::new(
+ buffer,
+ font("Courier"),
+ px(16.0),
+ None,
+ true,
+ 1,
+ 1,
+ 0,
+ FoldPlaceholder::test(),
+ cx,
+ )
+ });
+
+ // Insert a block in the middle of a multi-line string literal
+ map.update(cx, |map, cx| {
+ map.insert_blocks(
+ [BlockProperties {
+ placement: BlockPlacement::Below(
+ buffer_snapshot.anchor_before(Point::new(1, 0)),
+ ),
+ height: 1,
+ style: BlockStyle::Sticky,
+ render: Box::new(|_| div().into_any()),
+ priority: 0,
+ }],
+ cx,
+ )
+ });
+
+ pretty_assertions::assert_eq!(
+ cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(7), &map, &theme, cx)),
+ [
+ ("const".into(), Some(Hsla::green())),
+ (" A".into(), None),
+ (":".into(), Some(Hsla::blue())),
+ (" &str = ".into(), None),
+ ("\"\n one\n".into(), Some(Hsla::red())),
+ ("\n".into(), None),
+ (" two\n three\n\"".into(), Some(Hsla::red())),
+ (";".into(), Some(Hsla::blue())),
+ ("\n".into(), None),
+ ("const".into(), Some(Hsla::green())),
+ (" B".into(), None),
+ (":".into(), Some(Hsla::blue())),
+ (" &str = ".into(), None),
+ ("\"four\"".into(), Some(Hsla::red())),
+ (";".into(), Some(Hsla::blue())),
+ ("\n".into(), None),
+ ]
+ );
+ }
+
// todo(linux) fails due to pixel differences in text rendering
#[cfg(target_os = "macos")]
#[gpui::test]
async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) {
- use unindent::Unindent as _;
-
cx.background_executor
.set_block_on_ticks(usize::MAX..=usize::MAX);
@@ -6,7 +6,9 @@ use crate::{EditorStyle, GutterDimensions};
use collections::{Bound, HashMap, HashSet};
use gpui::{AnyElement, EntityId, Pixels, WindowContext};
use language::{Chunk, Patch, Point};
-use multi_buffer::{Anchor, ExcerptId, ExcerptInfo, MultiBufferRow, ToPoint as _};
+use multi_buffer::{
+ Anchor, ExcerptId, ExcerptInfo, MultiBufferRow, MultiBufferSnapshot, ToPoint as _,
+};
use parking_lot::Mutex;
use std::{
cell::RefCell,
@@ -18,7 +20,7 @@ use std::{
Arc,
},
};
-use sum_tree::{Bias, SumTree, TreeMap};
+use sum_tree::{Bias, SumTree, Summary, TreeMap};
use text::Edit;
use ui::ElementId;
@@ -77,32 +79,173 @@ struct WrapRow(u32);
pub type RenderBlock = Box<dyn Send + FnMut(&mut BlockContext) -> AnyElement>;
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum BlockPlacement<T> {
+ Above(T),
+ Below(T),
+ Replace(Range<T>),
+}
+
+impl<T> BlockPlacement<T> {
+ fn start(&self) -> &T {
+ match self {
+ BlockPlacement::Above(position) => position,
+ BlockPlacement::Below(position) => position,
+ BlockPlacement::Replace(range) => &range.start,
+ }
+ }
+
+ fn end(&self) -> &T {
+ match self {
+ BlockPlacement::Above(position) => position,
+ BlockPlacement::Below(position) => position,
+ BlockPlacement::Replace(range) => &range.end,
+ }
+ }
+
+ pub fn as_ref(&self) -> BlockPlacement<&T> {
+ match self {
+ BlockPlacement::Above(position) => BlockPlacement::Above(position),
+ BlockPlacement::Below(position) => BlockPlacement::Below(position),
+ BlockPlacement::Replace(range) => BlockPlacement::Replace(&range.start..&range.end),
+ }
+ }
+
+ pub fn map<R>(self, mut f: impl FnMut(T) -> R) -> BlockPlacement<R> {
+ match self {
+ BlockPlacement::Above(position) => BlockPlacement::Above(f(position)),
+ BlockPlacement::Below(position) => BlockPlacement::Below(f(position)),
+ BlockPlacement::Replace(range) => BlockPlacement::Replace(f(range.start)..f(range.end)),
+ }
+ }
+}
+
+impl BlockPlacement<Anchor> {
+ fn cmp(&self, other: &Self, buffer: &MultiBufferSnapshot) -> Ordering {
+ match (self, other) {
+ (BlockPlacement::Above(anchor_a), BlockPlacement::Above(anchor_b))
+ | (BlockPlacement::Below(anchor_a), BlockPlacement::Below(anchor_b)) => {
+ anchor_a.cmp(anchor_b, buffer)
+ }
+ (BlockPlacement::Above(anchor_a), BlockPlacement::Below(anchor_b)) => {
+ anchor_a.cmp(anchor_b, buffer).then(Ordering::Less)
+ }
+ (BlockPlacement::Below(anchor_a), BlockPlacement::Above(anchor_b)) => {
+ anchor_a.cmp(anchor_b, buffer).then(Ordering::Greater)
+ }
+ (BlockPlacement::Above(anchor), BlockPlacement::Replace(range)) => {
+ anchor.cmp(&range.start, buffer).then(Ordering::Less)
+ }
+ (BlockPlacement::Replace(range), BlockPlacement::Above(anchor)) => {
+ range.start.cmp(anchor, buffer).then(Ordering::Greater)
+ }
+ (BlockPlacement::Below(anchor), BlockPlacement::Replace(range)) => {
+ anchor.cmp(&range.start, buffer).then(Ordering::Greater)
+ }
+ (BlockPlacement::Replace(range), BlockPlacement::Below(anchor)) => {
+ range.start.cmp(anchor, buffer).then(Ordering::Less)
+ }
+ (BlockPlacement::Replace(range_a), BlockPlacement::Replace(range_b)) => range_a
+ .start
+ .cmp(&range_b.start, buffer)
+ .then_with(|| range_b.end.cmp(&range_a.end, buffer)),
+ }
+ }
+
+ fn to_wrap_row(&self, wrap_snapshot: &WrapSnapshot) -> Option<BlockPlacement<WrapRow>> {
+ let buffer_snapshot = wrap_snapshot.buffer_snapshot();
+ match self {
+ BlockPlacement::Above(position) => {
+ let mut position = position.to_point(buffer_snapshot);
+ position.column = 0;
+ let wrap_row = WrapRow(wrap_snapshot.make_wrap_point(position, Bias::Left).row());
+ Some(BlockPlacement::Above(wrap_row))
+ }
+ BlockPlacement::Below(position) => {
+ let mut position = position.to_point(buffer_snapshot);
+ position.column = buffer_snapshot.line_len(MultiBufferRow(position.row));
+ let wrap_row = WrapRow(wrap_snapshot.make_wrap_point(position, Bias::Left).row());
+ Some(BlockPlacement::Below(wrap_row))
+ }
+ BlockPlacement::Replace(range) => {
+ let mut start = range.start.to_point(buffer_snapshot);
+ let mut end = range.end.to_point(buffer_snapshot);
+ if start == end {
+ None
+ } else {
+ start.column = 0;
+ let start_wrap_row =
+ WrapRow(wrap_snapshot.make_wrap_point(start, Bias::Left).row());
+ end.column = buffer_snapshot.line_len(MultiBufferRow(end.row));
+ let end_wrap_row =
+ WrapRow(wrap_snapshot.make_wrap_point(end, Bias::Left).row());
+ Some(BlockPlacement::Replace(start_wrap_row..end_wrap_row))
+ }
+ }
+ }
+ }
+}
+
+impl Ord for BlockPlacement<WrapRow> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ match (self, other) {
+ (BlockPlacement::Above(row_a), BlockPlacement::Above(row_b))
+ | (BlockPlacement::Below(row_a), BlockPlacement::Below(row_b)) => row_a.cmp(row_b),
+ (BlockPlacement::Above(row_a), BlockPlacement::Below(row_b)) => {
+ row_a.cmp(row_b).then(Ordering::Less)
+ }
+ (BlockPlacement::Below(row_a), BlockPlacement::Above(row_b)) => {
+ row_a.cmp(row_b).then(Ordering::Greater)
+ }
+ (BlockPlacement::Above(row), BlockPlacement::Replace(range)) => {
+ row.cmp(&range.start).then(Ordering::Less)
+ }
+ (BlockPlacement::Replace(range), BlockPlacement::Above(row)) => {
+ range.start.cmp(row).then(Ordering::Greater)
+ }
+ (BlockPlacement::Below(row), BlockPlacement::Replace(range)) => {
+ row.cmp(&range.start).then(Ordering::Greater)
+ }
+ (BlockPlacement::Replace(range), BlockPlacement::Below(row)) => {
+ range.start.cmp(row).then(Ordering::Less)
+ }
+ (BlockPlacement::Replace(range_a), BlockPlacement::Replace(range_b)) => range_a
+ .start
+ .cmp(&range_b.start)
+ .then_with(|| range_b.end.cmp(&range_a.end)),
+ }
+ }
+}
+
+impl PartialOrd for BlockPlacement<WrapRow> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
pub struct CustomBlock {
id: CustomBlockId,
- position: Anchor,
+ placement: BlockPlacement<Anchor>,
height: u32,
style: BlockStyle,
render: Arc<Mutex<RenderBlock>>,
- disposition: BlockDisposition,
priority: usize,
}
pub struct BlockProperties<P> {
- pub position: P,
+ pub placement: BlockPlacement<P>,
pub height: u32,
pub style: BlockStyle,
pub render: RenderBlock,
- pub disposition: BlockDisposition,
pub priority: usize,
}
impl<P: Debug> Debug for BlockProperties<P> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("BlockProperties")
- .field("position", &self.position)
+ .field("placement", &self.placement)
.field("height", &self.height)
.field("style", &self.style)
- .field("disposition", &self.disposition)
.finish()
}
}
@@ -125,10 +268,10 @@ pub struct BlockContext<'a, 'b> {
pub editor_style: &'b EditorStyle,
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
pub enum BlockId {
- Custom(CustomBlockId),
ExcerptBoundary(Option<ExcerptId>),
+ Custom(CustomBlockId),
}
impl From<BlockId> for ElementId {
@@ -152,30 +295,12 @@ impl std::fmt::Display for BlockId {
}
}
-/// Whether the block should be considered above or below the anchor line
-#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
-pub enum BlockDisposition {
- Above,
- Below,
-}
-
#[derive(Clone, Debug)]
struct Transform {
summary: TransformSummary,
block: Option<Block>,
}
-pub(crate) enum BlockType {
- Custom(CustomBlockId),
- ExcerptBoundary,
-}
-
-pub(crate) trait BlockLike {
- fn block_type(&self) -> BlockType;
- fn disposition(&self) -> BlockDisposition;
- fn priority(&self) -> usize;
-}
-
#[allow(clippy::large_enum_variant)]
#[derive(Clone)]
pub enum Block {
@@ -189,26 +314,6 @@ pub enum Block {
},
}
-impl BlockLike for Block {
- fn block_type(&self) -> BlockType {
- match self {
- Block::Custom(block) => BlockType::Custom(block.id),
- Block::ExcerptBoundary { .. } => BlockType::ExcerptBoundary,
- }
- }
-
- fn disposition(&self) -> BlockDisposition {
- self.disposition()
- }
-
- fn priority(&self) -> usize {
- match self {
- Block::Custom(block) => block.priority,
- Block::ExcerptBoundary { .. } => usize::MAX,
- }
- }
-}
-
impl Block {
pub fn id(&self) -> BlockId {
match self {
@@ -219,19 +324,6 @@ impl Block {
}
}
- fn disposition(&self) -> BlockDisposition {
- match self {
- Block::Custom(block) => block.disposition,
- Block::ExcerptBoundary { next_excerpt, .. } => {
- if next_excerpt.is_some() {
- BlockDisposition::Above
- } else {
- BlockDisposition::Below
- }
- }
- }
- }
-
pub fn height(&self) -> u32 {
match self {
Block::Custom(block) => block.height,
@@ -245,6 +337,20 @@ impl Block {
Block::ExcerptBoundary { .. } => BlockStyle::Sticky,
}
}
+
+ fn place_above(&self) -> bool {
+ match self {
+ Block::Custom(block) => matches!(block.placement, BlockPlacement::Above(_)),
+ Block::ExcerptBoundary { next_excerpt, .. } => next_excerpt.is_some(),
+ }
+ }
+
+ fn place_below(&self) -> bool {
+ match self {
+ Block::Custom(block) => matches!(block.placement, BlockPlacement::Below(_)),
+ Block::ExcerptBoundary { next_excerpt, .. } => next_excerpt.is_none(),
+ }
+ }
}
impl Debug for Block {
@@ -270,6 +376,8 @@ impl Debug for Block {
struct TransformSummary {
input_rows: u32,
output_rows: u32,
+ longest_row: u32,
+ longest_row_chars: u32,
}
pub struct BlockChunks<'a> {
@@ -298,11 +406,13 @@ impl BlockMap {
excerpt_footer_height: u32,
) -> Self {
let row_count = wrap_snapshot.max_point().row() + 1;
+ let mut transforms = SumTree::default();
+ push_isomorphic(&mut transforms, row_count, &wrap_snapshot);
let map = Self {
next_block_id: AtomicUsize::new(0),
custom_blocks: Vec::new(),
custom_blocks_by_id: TreeMap::default(),
- transforms: RefCell::new(SumTree::from_item(Transform::isomorphic(row_count), &())),
+ transforms: RefCell::new(transforms),
wrap_snapshot: RefCell::new(wrap_snapshot.clone()),
show_excerpt_controls,
buffer_header_height,
@@ -368,28 +478,29 @@ impl BlockMap {
let mut transforms = self.transforms.borrow_mut();
let mut new_transforms = SumTree::default();
- let old_row_count = transforms.summary().input_rows;
- let new_row_count = wrap_snapshot.max_point().row() + 1;
let mut cursor = transforms.cursor::<WrapRow>(&());
let mut last_block_ix = 0;
let mut blocks_in_edit = Vec::new();
let mut edits = edits.into_iter().peekable();
while let Some(edit) = edits.next() {
- // Preserve any old transforms that precede this edit.
- let old_start = WrapRow(edit.old.start);
- let new_start = WrapRow(edit.new.start);
+ let mut old_start = WrapRow(edit.old.start);
+ let mut new_start = WrapRow(edit.new.start);
+
+ // Preserve transforms that:
+ // * strictly precedes this edit
+ // * isomorphic or replace transforms that end *at* the start of the edit
+ // * below blocks that end at the start of the edit
new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &());
if let Some(transform) = cursor.item() {
- if transform.is_isomorphic() && old_start == cursor.end(&()) {
+ if transform.summary.input_rows > 0 && cursor.end(&()) == old_start {
+ // Preserve the transform (push and next)
new_transforms.push(transform.clone(), &());
cursor.next(&());
+
+ // Preserve below blocks at end of edit
while let Some(transform) = cursor.item() {
- if transform
- .block
- .as_ref()
- .map_or(false, |b| b.disposition().is_below())
- {
+ if transform.block.as_ref().map_or(false, |b| b.place_below()) {
new_transforms.push(transform.clone(), &());
cursor.next(&());
} else {
@@ -399,50 +510,70 @@ impl BlockMap {
}
}
- // Preserve any portion of an old transform that precedes this edit.
- let extent_before_edit = old_start.0 - cursor.start().0;
- push_isomorphic(&mut new_transforms, extent_before_edit);
+ // Ensure the edit starts at a transform boundary.
+ // If the edit starts within an isomorphic transform, preserve its prefix
+ // If the edit lands within a replacement block, expand the edit to include the start of the replaced input range
+ let mut preserved_blocks_above_edit = false;
+ let transform = cursor.item().unwrap();
+ let transform_rows_before_edit = old_start.0 - cursor.start().0;
+ if transform_rows_before_edit > 0 {
+ if transform.block.is_none() {
+ // Preserve any portion of the old isomorphic transform that precedes this edit.
+ push_isomorphic(
+ &mut new_transforms,
+ transform_rows_before_edit,
+ wrap_snapshot,
+ );
+ } else {
+ // We landed within a block that replaces some lines, so we
+ // extend the edit to start at the beginning of the
+ // replacement.
+ debug_assert!(transform.summary.input_rows > 0);
+ old_start.0 -= transform_rows_before_edit;
+ new_start.0 -= transform_rows_before_edit;
+ // The blocks *above* it are already in the new transforms, so
+ // we don't need to re-insert them when querying blocks.
+ preserved_blocks_above_edit = true;
+ }
+ }
- // Skip over any old transforms that intersect this edit.
+ // Decide where the edit ends
+ // * It should end at a transform boundary
+ // * Coalesce edits that intersect the same transform
let mut old_end = WrapRow(edit.old.end);
let mut new_end = WrapRow(edit.new.end);
- cursor.seek(&old_end, Bias::Left, &());
- cursor.next(&());
- if old_end == *cursor.start() {
- while let Some(transform) = cursor.item() {
- if transform
- .block
- .as_ref()
- .map_or(false, |b| b.disposition().is_below())
- {
+ loop {
+ // Seek to the transform starting at or after the end of the edit
+ cursor.seek(&old_end, Bias::Left, &());
+ cursor.next(&());
+
+ // Extend edit to the end of the discarded transform so it is reconstructed in full
+ let transform_rows_after_edit = cursor.start().0 - old_end.0;
+ old_end.0 += transform_rows_after_edit;
+ new_end.0 += transform_rows_after_edit;
+
+ // Combine this edit with any subsequent edits that intersect the same transform.
+ while let Some(next_edit) = edits.peek() {
+ if next_edit.old.start <= cursor.start().0 {
+ old_end = WrapRow(next_edit.old.end);
+ new_end = WrapRow(next_edit.new.end);
+ cursor.seek(&old_end, Bias::Left, &());
cursor.next(&());
+ edits.next();
} else {
break;
}
}
+
+ if *cursor.start() == old_end {
+ break;
+ }
}
- // Combine this edit with any subsequent edits that intersect the same transform.
- while let Some(next_edit) = edits.peek() {
- if next_edit.old.start <= cursor.start().0 {
- old_end = WrapRow(next_edit.old.end);
- new_end = WrapRow(next_edit.new.end);
- cursor.seek(&old_end, Bias::Left, &());
+ // Discard below blocks at the end of the edit. They'll be reconstructed.
+ while let Some(transform) = cursor.item() {
+ if transform.block.as_ref().map_or(false, |b| b.place_below()) {
cursor.next(&());
- if old_end == *cursor.start() {
- while let Some(transform) = cursor.item() {
- if transform
- .block
- .as_ref()
- .map_or(false, |b| b.disposition().is_below())
- {
- cursor.next(&());
- } else {
- break;
- }
- }
- }
- edits.next();
} else {
break;
}
@@ -455,9 +586,10 @@ impl BlockMap {
let start_block_ix =
match self.custom_blocks[last_block_ix..].binary_search_by(|probe| {
probe
- .position
+ .start()
.to_point(buffer)
.cmp(&new_buffer_start)
+ // Move left until we find the index of the first block starting within this edit
.then(Ordering::Greater)
}) {
Ok(ix) | Err(ix) => last_block_ix + ix,
@@ -473,7 +605,7 @@ impl BlockMap {
end_bound = Bound::Excluded(new_buffer_end);
match self.custom_blocks[start_block_ix..].binary_search_by(|probe| {
probe
- .position
+ .start()
.to_point(buffer)
.cmp(&new_buffer_end)
.then(Ordering::Greater)
@@ -484,19 +616,17 @@ impl BlockMap {
last_block_ix = end_block_ix;
debug_assert!(blocks_in_edit.is_empty());
- blocks_in_edit.extend(self.custom_blocks[start_block_ix..end_block_ix].iter().map(
- |block| {
- let mut position = block.position.to_point(buffer);
- match block.disposition {
- BlockDisposition::Above => position.column = 0,
- BlockDisposition::Below => {
- position.column = buffer.line_len(MultiBufferRow(position.row))
- }
- }
- let position = wrap_snapshot.make_wrap_point(position, Bias::Left);
- (position.row(), Block::Custom(block.clone()))
- },
- ));
+
+ blocks_in_edit.extend(
+ self.custom_blocks[start_block_ix..end_block_ix]
+ .iter()
+ .filter_map(|block| {
+ Some((
+ block.placement.to_wrap_row(wrap_snapshot)?,
+ Block::Custom(block.clone()),
+ ))
+ }),
+ );
if buffer.show_headers() {
blocks_in_edit.extend(BlockMap::header_and_footer_blocks(
@@ -514,26 +644,49 @@ impl BlockMap {
// For each of these blocks, insert a new isomorphic transform preceding the block,
// and then insert the block itself.
- for (block_row, block) in blocks_in_edit.drain(..) {
- let insertion_row = match block.disposition() {
- BlockDisposition::Above => block_row,
- BlockDisposition::Below => block_row + 1,
+ for (block_placement, block) in blocks_in_edit.drain(..) {
+ if preserved_blocks_above_edit
+ && block_placement == BlockPlacement::Above(new_start)
+ {
+ continue;
+ }
+
+ let mut summary = TransformSummary {
+ input_rows: 0,
+ output_rows: block.height(),
+ longest_row: 0,
+ longest_row_chars: 0,
};
- let extent_before_block = insertion_row - new_transforms.summary().input_rows;
- push_isomorphic(&mut new_transforms, extent_before_block);
- new_transforms.push(Transform::block(block), &());
- }
- old_end = WrapRow(old_end.0.min(old_row_count));
- new_end = WrapRow(new_end.0.min(new_row_count));
+ let rows_before_block;
+ match block_placement {
+ BlockPlacement::Above(position) => {
+ rows_before_block = position.0 - new_transforms.summary().input_rows;
+ }
+ BlockPlacement::Below(position) => {
+ rows_before_block = (position.0 + 1) - new_transforms.summary().input_rows;
+ }
+ BlockPlacement::Replace(range) => {
+ rows_before_block = range.start.0 - new_transforms.summary().input_rows;
+ summary.input_rows = range.end.0 - range.start.0 + 1;
+ }
+ }
- // Insert an isomorphic transform after the final block.
- let extent_after_last_block = new_end.0 - new_transforms.summary().input_rows;
- push_isomorphic(&mut new_transforms, extent_after_last_block);
+ push_isomorphic(&mut new_transforms, rows_before_block, wrap_snapshot);
+ new_transforms.push(
+ Transform {
+ summary,
+ block: Some(block),
+ },
+ &(),
+ );
+ }
- // Preserve any portion of the old transform after this edit.
- let extent_after_edit = cursor.start().0 - old_end.0;
- push_isomorphic(&mut new_transforms, extent_after_edit);
+ // Insert an isomorphic transform after the final block.
+ let rows_after_last_block = new_end
+ .0
+ .saturating_sub(new_transforms.summary().input_rows);
+ push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot);
}
new_transforms.append(cursor.suffix(&()), &());
@@ -558,7 +711,7 @@ impl BlockMap {
self.show_excerpt_controls
}
- pub fn header_and_footer_blocks<'a, 'b: 'a, 'c: 'a + 'b, R, T>(
+ fn header_and_footer_blocks<'a, 'b: 'a, 'c: 'a + 'b, R, T>(
show_excerpt_controls: bool,
excerpt_footer_height: u32,
buffer_header_height: u32,
@@ -566,7 +719,7 @@ impl BlockMap {
buffer: &'b multi_buffer::MultiBufferSnapshot,
range: R,
wrap_snapshot: &'c WrapSnapshot,
- ) -> impl Iterator<Item = (u32, Block)> + 'b
+ ) -> impl Iterator<Item = (BlockPlacement<WrapRow>, Block)> + 'b
where
R: RangeBounds<T>,
T: multi_buffer::ToOffset,
@@ -619,7 +772,11 @@ impl BlockMap {
}
Some((
- wrap_row,
+ if excerpt_boundary.next.is_some() {
+ BlockPlacement::Above(WrapRow(wrap_row))
+ } else {
+ BlockPlacement::Below(WrapRow(wrap_row))
+ },
Block::ExcerptBoundary {
prev_excerpt: excerpt_boundary.prev,
next_excerpt: excerpt_boundary.next,
@@ -631,45 +788,96 @@ impl BlockMap {
})
}
- pub(crate) fn sort_blocks<B: BlockLike>(blocks: &mut [(u32, B)]) {
- // Place excerpt headers and footers above custom blocks on the same row
- blocks.sort_unstable_by(|(row_a, block_a), (row_b, block_b)| {
- row_a.cmp(row_b).then_with(|| {
- block_a
- .disposition()
- .cmp(&block_b.disposition())
- .then_with(|| match ((block_a.block_type()), (block_b.block_type())) {
- (BlockType::ExcerptBoundary, BlockType::ExcerptBoundary) => Ordering::Equal,
- (BlockType::ExcerptBoundary, _) => Ordering::Less,
- (_, BlockType::ExcerptBoundary) => Ordering::Greater,
- (BlockType::Custom(a_id), BlockType::Custom(b_id)) => block_b
- .priority()
- .cmp(&block_a.priority())
- .then_with(|| a_id.cmp(&b_id)),
- })
- })
+ fn sort_blocks(blocks: &mut Vec<(BlockPlacement<WrapRow>, Block)>) {
+ blocks.sort_unstable_by(|(placement_a, block_a), (placement_b, block_b)| {
+ placement_a
+ .cmp(&placement_b)
+ .then_with(|| match (block_a, block_b) {
+ (
+ Block::ExcerptBoundary {
+ next_excerpt: next_excerpt_a,
+ ..
+ },
+ Block::ExcerptBoundary {
+ next_excerpt: next_excerpt_b,
+ ..
+ },
+ ) => next_excerpt_a
+ .as_ref()
+ .map(|excerpt| excerpt.id)
+ .cmp(&next_excerpt_b.as_ref().map(|excerpt| excerpt.id)),
+ (Block::ExcerptBoundary { next_excerpt, .. }, Block::Custom(_)) => {
+ if next_excerpt.is_some() {
+ Ordering::Less
+ } else {
+ Ordering::Greater
+ }
+ }
+ (Block::Custom(_), Block::ExcerptBoundary { next_excerpt, .. }) => {
+ if next_excerpt.is_some() {
+ Ordering::Greater
+ } else {
+ Ordering::Less
+ }
+ }
+ (Block::Custom(block_a), Block::Custom(block_b)) => block_a
+ .priority
+ .cmp(&block_b.priority)
+ .then_with(|| block_a.id.cmp(&block_b.id)),
+ })
+ });
+ blocks.dedup_by(|(right, _), (left, _)| match (left, right) {
+ (BlockPlacement::Replace(range), BlockPlacement::Above(row)) => {
+ range.start < *row && range.end >= *row
+ }
+ (BlockPlacement::Replace(range), BlockPlacement::Below(row)) => {
+ range.start <= *row && range.end > *row
+ }
+ (BlockPlacement::Replace(range_a), BlockPlacement::Replace(range_b)) => {
+ if range_a.end >= range_b.start && range_a.start <= range_b.end {
+ range_a.end = range_a.end.max(range_b.end);
+ true
+ } else {
+ false
+ }
+ }
+ _ => false,
});
}
}
-fn push_isomorphic(tree: &mut SumTree<Transform>, rows: u32) {
+fn push_isomorphic(tree: &mut SumTree<Transform>, rows: u32, wrap_snapshot: &WrapSnapshot) {
if rows == 0 {
return;
}
- let mut extent = Some(rows);
+ let wrap_row_start = tree.summary().input_rows;
+ let wrap_row_end = wrap_row_start + rows;
+ let wrap_summary = wrap_snapshot.text_summary_for_range(wrap_row_start..wrap_row_end);
+ let summary = TransformSummary {
+ input_rows: rows,
+ output_rows: rows,
+ longest_row: wrap_summary.longest_row,
+ longest_row_chars: wrap_summary.longest_row_chars,
+ };
+ let mut merged = false;
tree.update_last(
|last_transform| {
- if last_transform.is_isomorphic() {
- let extent = extent.take().unwrap();
- last_transform.summary.input_rows += extent;
- last_transform.summary.output_rows += extent;
+ if last_transform.block.is_none() {
+ last_transform.summary.add_summary(&summary, &());
+ merged = true;
}
},
&(),
);
- if let Some(extent) = extent {
- tree.push(Transform::isomorphic(extent), &());
+ if !merged {
+ tree.push(
+ Transform {
+ summary,
+ block: None,
+ },
+ &(),
+ );
}
}
@@ -711,7 +919,7 @@ impl<'a> BlockMapReader<'a> {
pub fn row_for_block(&self, block_id: CustomBlockId) -> Option<BlockRow> {
let block = self.blocks.iter().find(|block| block.id == block_id)?;
let buffer_row = block
- .position
+ .start()
.to_point(self.wrap_snapshot.buffer_snapshot())
.row;
let wrap_row = self
@@ -735,9 +943,7 @@ impl<'a> BlockMapReader<'a> {
break;
}
- if let Some(BlockType::Custom(id)) =
- transform.block.as_ref().map(|block| block.block_type())
- {
+ if let Some(BlockId::Custom(id)) = transform.block.as_ref().map(|block| block.id()) {
if id == block_id {
return Some(cursor.start().1);
}
@@ -762,21 +968,27 @@ impl<'a> BlockMapWriter<'a> {
let mut previous_wrap_row_range: Option<Range<u32>> = None;
for block in blocks {
+ if let BlockPlacement::Replace(_) = &block.placement {
+ debug_assert!(block.height > 0);
+ }
+
let id = CustomBlockId(self.0.next_block_id.fetch_add(1, SeqCst));
ids.push(id);
- let position = block.position;
- let point = position.to_point(buffer);
- let wrap_row = wrap_snapshot
- .make_wrap_point(Point::new(point.row, 0), Bias::Left)
- .row();
+ let start = block.placement.start().to_point(buffer);
+ let end = block.placement.end().to_point(buffer);
+ let start_wrap_row = wrap_snapshot.make_wrap_point(start, Bias::Left).row();
+ let end_wrap_row = wrap_snapshot.make_wrap_point(end, Bias::Left).row();
let (start_row, end_row) = {
- previous_wrap_row_range.take_if(|range| !range.contains(&wrap_row));
+ previous_wrap_row_range.take_if(|range| {
+ !range.contains(&start_wrap_row) || !range.contains(&end_wrap_row)
+ });
let range = previous_wrap_row_range.get_or_insert_with(|| {
- let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
+ let start_row =
+ wrap_snapshot.prev_row_boundary(WrapPoint::new(start_wrap_row, 0));
let end_row = wrap_snapshot
- .next_row_boundary(WrapPoint::new(wrap_row, 0))
+ .next_row_boundary(WrapPoint::new(end_wrap_row, 0))
.unwrap_or(wrap_snapshot.max_point().row() + 1);
start_row..end_row
});
@@ -785,16 +997,15 @@ impl<'a> BlockMapWriter<'a> {
let block_ix = match self
.0
.custom_blocks
- .binary_search_by(|probe| probe.position.cmp(&position, buffer))
+ .binary_search_by(|probe| probe.placement.cmp(&block.placement, buffer))
{
Ok(ix) | Err(ix) => ix,
};
let new_block = Arc::new(CustomBlock {
id,
- position,
+ placement: block.placement,
height: block.height,
render: Arc::new(Mutex::new(block.render)),
- disposition: block.disposition,
style: block.style,
priority: block.priority,
});
@@ -819,34 +1030,41 @@ impl<'a> BlockMapWriter<'a> {
for block in &mut self.0.custom_blocks {
if let Some(new_height) = heights.remove(&block.id) {
+ if let BlockPlacement::Replace(_) = &block.placement {
+ debug_assert!(new_height > 0);
+ }
+
if block.height != new_height {
let new_block = CustomBlock {
id: block.id,
- position: block.position,
+ placement: block.placement.clone(),
height: new_height,
style: block.style,
render: block.render.clone(),
- disposition: block.disposition,
priority: block.priority,
};
let new_block = Arc::new(new_block);
*block = new_block.clone();
self.0.custom_blocks_by_id.insert(block.id, new_block);
- let buffer_row = block.position.to_point(buffer).row;
- if last_block_buffer_row != Some(buffer_row) {
- last_block_buffer_row = Some(buffer_row);
- let wrap_row = wrap_snapshot
- .make_wrap_point(Point::new(buffer_row, 0), Bias::Left)
+ let start_row = block.placement.start().to_point(buffer).row;
+ let end_row = block.placement.end().to_point(buffer).row;
+ if last_block_buffer_row != Some(end_row) {
+ last_block_buffer_row = Some(end_row);
+ let start_wrap_row = wrap_snapshot
+ .make_wrap_point(Point::new(start_row, 0), Bias::Left)
+ .row();
+ let end_wrap_row = wrap_snapshot
+ .make_wrap_point(Point::new(end_row, 0), Bias::Left)
.row();
- let start_row =
- wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
- let end_row = wrap_snapshot
- .next_row_boundary(WrapPoint::new(wrap_row, 0))
+ let start =
+ wrap_snapshot.prev_row_boundary(WrapPoint::new(start_wrap_row, 0));
+ let end = wrap_snapshot
+ .next_row_boundary(WrapPoint::new(end_wrap_row, 0))
.unwrap_or(wrap_snapshot.max_point().row() + 1);
edits.push(Edit {
- old: start_row..end_row,
- new: start_row..end_row,
+ old: start..end,
+ new: start..end,
})
}
}
@@ -864,19 +1082,21 @@ impl<'a> BlockMapWriter<'a> {
let mut previous_wrap_row_range: Option<Range<u32>> = None;
self.0.custom_blocks.retain(|block| {
if block_ids.contains(&block.id) {
- let buffer_row = block.position.to_point(buffer).row;
- if last_block_buffer_row != Some(buffer_row) {
- last_block_buffer_row = Some(buffer_row);
- let wrap_row = wrap_snapshot
- .make_wrap_point(Point::new(buffer_row, 0), Bias::Left)
- .row();
+ let start = block.placement.start().to_point(buffer);
+ let end = block.placement.end().to_point(buffer);
+ if last_block_buffer_row != Some(end.row) {
+ last_block_buffer_row = Some(end.row);
+ let start_wrap_row = wrap_snapshot.make_wrap_point(start, Bias::Left).row();
+ let end_wrap_row = wrap_snapshot.make_wrap_point(end, Bias::Left).row();
let (start_row, end_row) = {
- previous_wrap_row_range.take_if(|range| !range.contains(&wrap_row));
+ previous_wrap_row_range.take_if(|range| {
+ !range.contains(&start_wrap_row) || !range.contains(&end_wrap_row)
+ });
let range = previous_wrap_row_range.get_or_insert_with(|| {
let start_row =
- wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
+ wrap_snapshot.prev_row_boundary(WrapPoint::new(start_wrap_row, 0));
let end_row = wrap_snapshot
- .next_row_boundary(WrapPoint::new(wrap_row, 0))
+ .next_row_boundary(WrapPoint::new(end_wrap_row, 0))
.unwrap_or(wrap_snapshot.max_point().row() + 1);
start_row..end_row
});
@@ -921,31 +1141,24 @@ impl BlockSnapshot {
highlights: Highlights<'a>,
) -> BlockChunks<'a> {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
+
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- let input_end = {
- cursor.seek(&BlockRow(rows.end), Bias::Right, &());
- let overshoot = if cursor
- .item()
- .map_or(false, |transform| transform.is_isomorphic())
- {
- rows.end - cursor.start().0 .0
- } else {
- 0
- };
- cursor.start().1 .0 + overshoot
- };
- let input_start = {
- cursor.seek(&BlockRow(rows.start), Bias::Right, &());
- let overshoot = if cursor
- .item()
- .map_or(false, |transform| transform.is_isomorphic())
- {
- rows.start - cursor.start().0 .0
- } else {
- 0
- };
- cursor.start().1 .0 + overshoot
- };
+ cursor.seek(&BlockRow(rows.start), Bias::Right, &());
+ let transform_output_start = cursor.start().0 .0;
+ let transform_input_start = cursor.start().1 .0;
+
+ let mut input_start = transform_input_start;
+ let mut input_end = transform_input_start;
+ if let Some(transform) = cursor.item() {
+ if transform.block.is_none() {
+ input_start += rows.start - transform_output_start;
+ input_end += cmp::min(
+ rows.end - transform_output_start,
+ transform.summary.input_rows,
+ );
+ }
+ }
+
BlockChunks {
input_chunks: self.wrap_snapshot.chunks(
input_start..input_end,
@@ -964,7 +1177,10 @@ impl BlockSnapshot {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&start_row, Bias::Right, &());
let (output_start, input_start) = cursor.start();
- let overshoot = if cursor.item().map_or(false, |t| t.is_isomorphic()) {
+ let overshoot = if cursor
+ .item()
+ .map_or(false, |transform| transform.block.is_none())
+ {
start_row.0 - output_start.0
} else {
0
@@ -1049,13 +1265,12 @@ impl BlockSnapshot {
}
pub fn max_point(&self) -> BlockPoint {
- let row = self.transforms.summary().output_rows - 1;
+ let row = self.transforms.summary().output_rows.saturating_sub(1);
BlockPoint::new(row, self.line_len(BlockRow(row)))
}
pub fn longest_row(&self) -> u32 {
- let input_row = self.wrap_snapshot.longest_row();
- self.to_block_point(WrapPoint::new(input_row, 0)).row
+ self.transforms.summary().longest_row
}
pub(super) fn line_len(&self, row: BlockRow) -> u32 {
@@ -1069,6 +1284,8 @@ impl BlockSnapshot {
} else {
self.wrap_snapshot.line_len(input_start.0 + overshoot)
}
+ } else if row.0 == 0 {
+ 0
} else {
panic!("row out of range");
}
@@ -1091,26 +1308,40 @@ impl BlockSnapshot {
loop {
if let Some(transform) = cursor.item() {
- if transform.is_isomorphic() {
- let (output_start_row, input_start_row) = cursor.start();
- let (output_end_row, input_end_row) = cursor.end(&());
- let output_start = Point::new(output_start_row.0, 0);
- let input_start = Point::new(input_start_row.0, 0);
- let input_end = Point::new(input_end_row.0, 0);
- let input_point = if point.row >= output_end_row.0 {
- let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1);
- self.wrap_snapshot
- .clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias)
- } else {
- let output_overshoot = point.0.saturating_sub(output_start);
- self.wrap_snapshot
- .clip_point(WrapPoint(input_start + output_overshoot), bias)
- };
-
- if (input_start..input_end).contains(&input_point.0) {
- let input_overshoot = input_point.0.saturating_sub(input_start);
- return BlockPoint(output_start + input_overshoot);
+ let (output_start_row, input_start_row) = cursor.start();
+ let (output_end_row, input_end_row) = cursor.end(&());
+ let output_start = Point::new(output_start_row.0, 0);
+ let output_end = Point::new(output_end_row.0, 0);
+ let input_start = Point::new(input_start_row.0, 0);
+ let input_end = Point::new(input_end_row.0, 0);
+
+ match transform.block.as_ref() {
+ Some(Block::Custom(block))
+ if matches!(block.placement, BlockPlacement::Replace(_)) =>
+ {
+ if bias == Bias::Left {
+ return BlockPoint(output_start);
+ } else {
+ return BlockPoint(Point::new(output_end.row - 1, 0));
+ }
}
+ None => {
+ let input_point = if point.row >= output_end_row.0 {
+ let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1);
+ self.wrap_snapshot
+ .clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias)
+ } else {
+ let output_overshoot = point.0.saturating_sub(output_start);
+ self.wrap_snapshot
+ .clip_point(WrapPoint(input_start + output_overshoot), bias)
+ };
+
+ if (input_start..input_end).contains(&input_point.0) {
+ let input_overshoot = input_point.0.saturating_sub(input_start);
+ return BlockPoint(output_start + input_overshoot);
+ }
+ }
+ _ => {}
}
if search_left {
@@ -1100,6 +1100,17 @@ pub struct FoldBufferRows<'a> {
fold_point: FoldPoint,
}
+impl<'a> FoldBufferRows<'a> {
+ pub(crate) fn seek(&mut self, row: u32) {
+ let fold_point = FoldPoint::new(row, 0);
+ self.cursor.seek(&fold_point, Bias::Left, &());
+ let overshoot = fold_point.0 - self.cursor.start().0 .0;
+ let inlay_point = InlayPoint(self.cursor.start().1 .0 + overshoot);
+ self.input_buffer_rows.seek(inlay_point.row());
+ self.fold_point = fold_point;
+ }
+}
+
impl<'a> Iterator for FoldBufferRows<'a> {
type Item = Option<u32>;
@@ -1135,6 +1146,38 @@ pub struct FoldChunks<'a> {
max_output_offset: FoldOffset,
}
+impl<'a> FoldChunks<'a> {
+ pub(crate) fn seek(&mut self, range: Range<FoldOffset>) {
+ self.transform_cursor.seek(&range.start, Bias::Right, &());
+
+ let inlay_start = {
+ let overshoot = range.start.0 - self.transform_cursor.start().0 .0;
+ self.transform_cursor.start().1 + InlayOffset(overshoot)
+ };
+
+ let transform_end = self.transform_cursor.end(&());
+
+ let inlay_end = if self
+ .transform_cursor
+ .item()
+ .map_or(true, |transform| transform.is_fold())
+ {
+ inlay_start
+ } else if range.end < transform_end.0 {
+ let overshoot = range.end.0 - self.transform_cursor.start().0 .0;
+ self.transform_cursor.start().1 + InlayOffset(overshoot)
+ } else {
+ transform_end.1
+ };
+
+ self.inlay_chunks.seek(inlay_start..inlay_end);
+ self.inlay_chunk = None;
+ self.inlay_offset = inlay_start;
+ self.output_offset = range.start;
+ self.max_output_offset = range.end;
+ }
+}
+
impl<'a> Iterator for FoldChunks<'a> {
type Item = Chunk<'a>;
@@ -251,6 +251,7 @@ impl TabSnapshot {
};
TabChunks {
+ snapshot: self,
fold_chunks: self.fold_snapshot.chunks(
input_start..input_end,
language_aware,
@@ -485,6 +486,7 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
const SPACES: &str = " ";
pub struct TabChunks<'a> {
+ snapshot: &'a TabSnapshot,
fold_chunks: FoldChunks<'a>,
chunk: Chunk<'a>,
column: u32,
@@ -496,6 +498,37 @@ pub struct TabChunks<'a> {
inside_leading_tab: bool,
}
+impl<'a> TabChunks<'a> {
+ pub(crate) fn seek(&mut self, range: Range<TabPoint>) {
+ let (input_start, expanded_char_column, to_next_stop) =
+ self.snapshot.to_fold_point(range.start, Bias::Left);
+ let input_column = input_start.column();
+ let input_start = input_start.to_offset(&self.snapshot.fold_snapshot);
+ let input_end = self
+ .snapshot
+ .to_fold_point(range.end, Bias::Right)
+ .0
+ .to_offset(&self.snapshot.fold_snapshot);
+ let to_next_stop = if range.start.0 + Point::new(0, to_next_stop) > range.end.0 {
+ range.end.column() - range.start.column()
+ } else {
+ to_next_stop
+ };
+
+ self.fold_chunks.seek(input_start..input_end);
+ self.input_column = input_column;
+ self.column = expanded_char_column;
+ self.output_position = range.start.0;
+ self.max_output_position = range.end.0;
+ self.chunk = Chunk {
+ text: &SPACES[0..(to_next_stop as usize)],
+ is_tab: true,
+ ..Default::default()
+ };
+ self.inside_leading_tab = to_next_stop > 0;
+ }
+}
+
impl<'a> Iterator for TabChunks<'a> {
type Item = Chunk<'a>;
@@ -56,6 +56,7 @@ pub struct WrapChunks<'a> {
output_position: WrapPoint,
max_output_row: u32,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
+ snapshot: &'a WrapSnapshot,
}
#[derive(Clone)]
@@ -68,6 +69,21 @@ pub struct WrapBufferRows<'a> {
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
}
+impl<'a> WrapBufferRows<'a> {
+ pub(crate) fn seek(&mut self, start_row: u32) {
+ self.transforms
+ .seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
+ let mut input_row = self.transforms.start().1.row();
+ if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
+ input_row += start_row - self.transforms.start().0.row();
+ }
+ self.soft_wrapped = self.transforms.item().map_or(false, |t| !t.is_isomorphic());
+ self.input_buffer_rows.seek(input_row);
+ self.input_buffer_row = self.input_buffer_rows.next().unwrap();
+ self.output_row = start_row;
+ }
+}
+
impl WrapMap {
pub fn new(
tab_snapshot: TabSnapshot,
@@ -598,6 +614,7 @@ impl WrapSnapshot {
output_position: output_start,
max_output_row: rows.end,
transforms,
+ snapshot: self,
}
}
@@ -625,6 +642,65 @@ impl WrapSnapshot {
}
}
+ pub fn text_summary_for_range(&self, rows: Range<u32>) -> TextSummary {
+ let mut summary = TextSummary::default();
+
+ let start = WrapPoint::new(rows.start, 0);
+ let end = WrapPoint::new(rows.end, 0);
+
+ let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
+ cursor.seek(&start, Bias::Right, &());
+ if let Some(transform) = cursor.item() {
+ let start_in_transform = start.0 - cursor.start().0 .0;
+ let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0 .0;
+ if transform.is_isomorphic() {
+ let tab_start = TabPoint(cursor.start().1 .0 + start_in_transform);
+ let tab_end = TabPoint(cursor.start().1 .0 + end_in_transform);
+ summary += &self.tab_snapshot.text_summary_for_range(tab_start..tab_end);
+ } else {
+ debug_assert_eq!(start_in_transform.row, end_in_transform.row);
+ let indent_len = end_in_transform.column - start_in_transform.column;
+ summary += &TextSummary {
+ lines: Point::new(0, indent_len),
+ first_line_chars: indent_len,
+ last_line_chars: indent_len,
+ longest_row: 0,
+ longest_row_chars: indent_len,
+ };
+ }
+
+ cursor.next(&());
+ }
+
+ if rows.end > cursor.start().0.row() {
+ summary += &cursor
+ .summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right, &())
+ .output;
+
+ if let Some(transform) = cursor.item() {
+ let end_in_transform = end.0 - cursor.start().0 .0;
+ if transform.is_isomorphic() {
+ let char_start = cursor.start().1;
+ let char_end = TabPoint(char_start.0 + end_in_transform);
+ summary += &self
+ .tab_snapshot
+ .text_summary_for_range(char_start..char_end);
+ } else {
+ debug_assert_eq!(end_in_transform, Point::new(1, 0));
+ summary += &TextSummary {
+ lines: Point::new(1, 0),
+ first_line_chars: 0,
+ last_line_chars: 0,
+ longest_row: 0,
+ longest_row_chars: 0,
+ };
+ }
+ }
+ }
+
+ summary
+ }
+
pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> {
let mut cursor = self.transforms.cursor::<WrapPoint>(&());
cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &());
@@ -738,6 +814,21 @@ impl WrapSnapshot {
None
}
+ #[cfg(test)]
+ pub fn text(&self) -> String {
+ self.text_chunks(0).collect()
+ }
+
+ #[cfg(test)]
+ pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
+ self.chunks(
+ wrap_row..self.max_point().row() + 1,
+ false,
+ Highlights::default(),
+ )
+ .map(|h| h.text)
+ }
+
fn check_invariants(&self) {
#[cfg(test)]
{
@@ -784,6 +875,26 @@ impl WrapSnapshot {
}
}
+impl<'a> WrapChunks<'a> {
+ pub(crate) fn seek(&mut self, rows: Range<u32>) {
+ let output_start = WrapPoint::new(rows.start, 0);
+ let output_end = WrapPoint::new(rows.end, 0);
+ self.transforms.seek(&output_start, Bias::Right, &());
+ let mut input_start = TabPoint(self.transforms.start().1 .0);
+ if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
+ input_start.0 += output_start.0 - self.transforms.start().0 .0;
+ }
+ let input_end = self
+ .snapshot
+ .to_tab_point(output_end)
+ .min(self.snapshot.tab_snapshot.max_point());
+ self.input_chunks.seek(input_start..input_end);
+ self.input_chunk = Chunk::default();
+ self.output_position = output_start;
+ self.max_output_row = rows.end;
+ }
+}
+
impl<'a> Iterator for WrapChunks<'a> {
type Item = Chunk<'a>;
@@ -1331,19 +1442,6 @@ mod tests {
}
impl WrapSnapshot {
- pub fn text(&self) -> String {
- self.text_chunks(0).collect()
- }
-
- pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
- self.chunks(
- wrap_row..self.max_point().row() + 1,
- false,
- Highlights::default(),
- )
- .map(|h| h.text)
- }
-
fn verify_chunks(&mut self, rng: &mut impl Rng) {
for _ in 0..5 {
let mut end_row = rng.gen_range(0..=self.max_point().row());
@@ -3282,10 +3282,25 @@ impl Editor {
&bracket_pair.start[..prefix_len],
));
+ let is_closing_quote = if bracket_pair.end == bracket_pair.start
+ && bracket_pair.start.len() == 1
+ {
+ let target = bracket_pair.start.chars().next().unwrap();
+ let current_line_count = snapshot
+ .reversed_chars_at(selection.start)
+ .take_while(|&c| c != '\n')
+ .filter(|&c| c == target)
+ .count();
+ current_line_count % 2 == 1
+ } else {
+ false
+ };
+
if autoclose
&& bracket_pair.close
&& following_text_allows_autoclose
&& preceding_text_matches_prefix
+ && !is_closing_quote
{
let anchor = snapshot.anchor_before(selection.end);
new_selections.push((selection.map(|_| anchor), text.len()));
@@ -10210,7 +10225,7 @@ impl Editor {
let block_id = this.insert_blocks(
[BlockProperties {
style: BlockStyle::Flex,
- position: range.start,
+ placement: BlockPlacement::Below(range.start),
height: 1,
render: Box::new({
let rename_editor = rename_editor.clone();
@@ -10246,7 +10261,6 @@ impl Editor {
.into_any_element()
}
}),
- disposition: BlockDisposition::Below,
priority: 0,
}],
Some(Autoscroll::fit()),
@@ -10531,10 +10545,11 @@ impl Editor {
let message_height = diagnostic.message.matches('\n').count() as u32 + 1;
BlockProperties {
style: BlockStyle::Fixed,
- position: buffer.anchor_after(entry.range.start),
+ placement: BlockPlacement::Below(
+ buffer.anchor_after(entry.range.start),
+ ),
height: message_height,
render: diagnostic_block_renderer(diagnostic, None, true, true),
- disposition: BlockDisposition::Below,
priority: 0,
}
}),
@@ -10728,15 +10743,42 @@ impl Editor {
self.fold_ranges(fold_ranges, true, cx);
}
+ fn fold_at_level(&mut self, fold_at: &FoldAtLevel, cx: &mut ViewContext<Self>) {
+ let fold_at_level = fold_at.level;
+ let snapshot = self.buffer.read(cx).snapshot(cx);
+ let mut fold_ranges = Vec::new();
+ let mut stack = vec![(0, snapshot.max_buffer_row().0, 1)];
+
+ while let Some((mut start_row, end_row, current_level)) = stack.pop() {
+ while start_row < end_row {
+ match self.snapshot(cx).foldable_range(MultiBufferRow(start_row)) {
+ Some(foldable_range) => {
+ let nested_start_row = foldable_range.0.start.row + 1;
+ let nested_end_row = foldable_range.0.end.row;
+
+ if current_level < fold_at_level {
+ stack.push((nested_start_row, nested_end_row, current_level + 1));
+ } else if current_level == fold_at_level {
+ fold_ranges.push(foldable_range);
+ }
+
+ start_row = nested_end_row + 1;
+ }
+ None => start_row += 1,
+ }
+ }
+ }
+
+ self.fold_ranges(fold_ranges, true, cx);
+ }
+
pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext<Self>) {
let mut fold_ranges = Vec::new();
- let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+ let snapshot = self.buffer.read(cx).snapshot(cx);
- for row in 0..display_map.max_buffer_row().0 {
- if let Some((foldable_range, fold_text)) =
- display_map.foldable_range(MultiBufferRow(row))
- {
- fold_ranges.push((foldable_range, fold_text));
+ for row in 0..snapshot.max_buffer_row().0 {
+ if let Some(foldable_range) = self.snapshot(cx).foldable_range(MultiBufferRow(row)) {
+ fold_ranges.push(foldable_range);
}
}
@@ -1080,6 +1080,112 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+fn test_fold_at_level(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let view = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple(
+ &"
+ class Foo:
+ # Hello!
+
+ def a():
+ print(1)
+
+ def b():
+ print(2)
+
+
+ class Bar:
+ # World!
+
+ def a():
+ print(1)
+
+ def b():
+ print(2)
+
+
+ "
+ .unindent(),
+ cx,
+ );
+ build_editor(buffer.clone(), cx)
+ });
+
+ _ = view.update(cx, |view, cx| {
+ view.fold_at_level(&FoldAtLevel { level: 2 }, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "
+ class Foo:
+ # Hello!
+
+ def a():⋯
+
+ def b():⋯
+
+
+ class Bar:
+ # World!
+
+ def a():⋯
+
+ def b():⋯
+
+
+ "
+ .unindent(),
+ );
+
+ view.fold_at_level(&FoldAtLevel { level: 1 }, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "
+ class Foo:⋯
+
+
+ class Bar:⋯
+
+
+ "
+ .unindent(),
+ );
+
+ view.unfold_all(&UnfoldAll, cx);
+ view.fold_at_level(&FoldAtLevel { level: 0 }, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "
+ class Foo:
+ # Hello!
+
+ def a():
+ print(1)
+
+ def b():
+ print(2)
+
+
+ class Bar:
+ # World!
+
+ def a():
+ print(1)
+
+ def b():
+ print(2)
+
+
+ "
+ .unindent(),
+ );
+
+ assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text());
+ });
+}
+
#[gpui::test]
fn test_move_cursor(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -3868,8 +3974,7 @@ fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) {
editor.insert_blocks(
[BlockProperties {
style: BlockStyle::Fixed,
- position: snapshot.anchor_after(Point::new(2, 0)),
- disposition: BlockDisposition::Below,
+ placement: BlockPlacement::Below(snapshot.anchor_after(Point::new(2, 0))),
height: 1,
render: Box::new(|_| div().into_any()),
priority: 0,
@@ -336,6 +336,7 @@ impl EditorElement {
register_action(view, cx, Editor::open_url);
register_action(view, cx, Editor::open_file);
register_action(view, cx, Editor::fold);
+ register_action(view, cx, Editor::fold_at_level);
register_action(view, cx, Editor::fold_all);
register_action(view, cx, Editor::fold_at);
register_action(view, cx, Editor::fold_recursive);
@@ -444,6 +445,7 @@ impl EditorElement {
register_action(view, cx, Editor::accept_inline_completion);
register_action(view, cx, Editor::revert_file);
register_action(view, cx, Editor::revert_selected_hunks);
+ register_action(view, cx, Editor::apply_all_diff_hunks);
register_action(view, cx, Editor::apply_selected_diff_hunks);
register_action(view, cx, Editor::open_active_item_in_terminal);
register_action(view, cx, Editor::reload_file)
@@ -2072,7 +2074,7 @@ impl EditorElement {
let mut element = match block {
Block::Custom(block) => {
let align_to = block
- .position()
+ .start()
.to_point(&snapshot.buffer_snapshot)
.to_display_point(snapshot);
let anchor_x = text_x
@@ -6295,7 +6297,7 @@ fn compute_auto_height_layout(
mod tests {
use super::*;
use crate::{
- display_map::{BlockDisposition, BlockProperties},
+ display_map::{BlockPlacement, BlockProperties},
editor_tests::{init_test, update_test_language_settings},
Editor, MultiBuffer,
};
@@ -6551,9 +6553,8 @@ mod tests {
editor.insert_blocks(
[BlockProperties {
style: BlockStyle::Fixed,
- disposition: BlockDisposition::Above,
+ placement: BlockPlacement::Above(Anchor::min()),
height: 3,
- position: Anchor::min(),
render: Box::new(|cx| div().h(3. * cx.line_height()).into_any()),
priority: 0,
}],
@@ -706,10 +706,11 @@ pub(crate) async fn find_file(
) -> Option<ResolvedPath> {
project
.update(cx, |project, cx| {
- project.resolve_existing_file_path(&candidate_file_path, buffer, cx)
+ project.resolve_path_in_buffer(&candidate_file_path, buffer, cx)
})
.ok()?
.await
+ .filter(|s| s.is_file())
}
if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await {
@@ -1612,4 +1613,46 @@ mod tests {
assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs");
});
}
+
+ #[gpui::test]
+ async fn test_hover_directories(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ // Insert a new file
+ let fs = cx.update_workspace(|workspace, cx| workspace.project().read(cx).fs().clone());
+ fs.as_fake()
+ .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec())
+ .await;
+
+ cx.set_state(indoc! {"
+ You can't open ../diˇr because it's a directory.
+ "});
+
+ // File does not exist
+ let screen_coord = cx.pixel_position(indoc! {"
+ You can't open ../diˇr because it's a directory.
+ "});
+ cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
+
+ // No highlight
+ cx.update_editor(|editor, cx| {
+ assert!(editor
+ .snapshot(cx)
+ .text_highlight_ranges::<HoveredLinkState>()
+ .unwrap_or_default()
+ .1
+ .is_empty());
+ });
+
+ // Does not open the directory
+ cx.simulate_click(screen_coord, Modifiers::secondary_key());
+ cx.update_workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 1));
+ }
}
@@ -16,10 +16,10 @@ use util::RangeExt;
use workspace::Item;
use crate::{
- editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk,
- BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow,
- DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile,
- RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
+ editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyAllDiffHunks,
+ ApplyDiffHunk, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight,
+ DisplayRow, DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk,
+ RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff,
};
#[derive(Debug, Clone)]
@@ -352,7 +352,11 @@ impl Editor {
None
}
- pub(crate) fn apply_all_diff_hunks(&mut self, cx: &mut ViewContext<Self>) {
+ pub(crate) fn apply_all_diff_hunks(
+ &mut self,
+ _: &ApplyAllDiffHunks,
+ cx: &mut ViewContext<Self>,
+ ) {
let buffers = self.buffer.read(cx).all_buffers();
for branch_buffer in buffers {
branch_buffer.update(cx, |branch_buffer, cx| {
@@ -417,10 +421,9 @@ impl Editor {
};
BlockProperties {
- position: hunk.multi_buffer_range.start,
+ placement: BlockPlacement::Above(hunk.multi_buffer_range.start),
height: 1,
style: BlockStyle::Sticky,
- disposition: BlockDisposition::Above,
priority: 0,
render: Box::new({
let editor = cx.view().clone();
@@ -700,10 +703,9 @@ impl Editor {
let hunk = hunk.clone();
let height = editor_height.max(deleted_text_height);
BlockProperties {
- position: hunk.multi_buffer_range.start,
+ placement: BlockPlacement::Above(hunk.multi_buffer_range.start),
height,
style: BlockStyle::Flex,
- disposition: BlockDisposition::Above,
priority: 0,
render: Box::new(move |cx| {
let width = EditorElement::diff_hunk_strip_width(cx.line_height());
@@ -1,4 +1,4 @@
-use crate::{Editor, EditorEvent, SemanticsProvider};
+use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SemanticsProvider};
use collections::HashSet;
use futures::{channel::mpsc, future::join_all};
use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View};
@@ -8,7 +8,7 @@ use project::Project;
use smol::stream::StreamExt;
use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
use text::ToOffset;
-use ui::prelude::*;
+use ui::{prelude::*, ButtonLike, KeyBinding};
use workspace::{
searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView, Workspace,
@@ -232,7 +232,10 @@ impl ProposedChangesEditor {
impl Render for ProposedChangesEditor {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
- self.editor.clone()
+ div()
+ .size_full()
+ .key_context("ProposedChangesEditor")
+ .child(self.editor.clone())
}
}
@@ -331,17 +334,21 @@ impl ProposedChangesEditorToolbar {
}
impl Render for ProposedChangesEditorToolbar {
- fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
- let editor = self.current_editor.clone();
- Button::new("apply-changes", "Apply All").on_click(move |_, cx| {
- if let Some(editor) = &editor {
- editor.update(cx, |editor, cx| {
- editor.editor.update(cx, |editor, cx| {
- editor.apply_all_diff_hunks(cx);
- })
- });
+ fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
+ let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All"));
+
+ match &self.current_editor {
+ Some(editor) => {
+ let focus_handle = editor.focus_handle(cx);
+ let keybinding = KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, cx)
+ .map(|binding| binding.into_any_element());
+
+ button_like.children(keybinding).on_click({
+ move |_event, cx| focus_handle.dispatch_action(&ApplyAllDiffHunks, cx)
+ })
}
- })
+ None => button_like.disabled(true),
+ }
}
}
@@ -1,6 +1,7 @@
use std::{
borrow::Cow,
ops::{Deref, DerefMut, Range},
+ path::Path,
sync::Arc,
};
@@ -66,10 +67,12 @@ impl EditorLspTestContext {
);
language_registry.add(Arc::new(language));
+ let root = Self::root_path();
+
app_state
.fs
.as_fake()
- .insert_tree("/root", json!({ "dir": { file_name.clone(): "" }}))
+ .insert_tree(root, json!({ "dir": { file_name.clone(): "" }}))
.await;
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
@@ -79,7 +82,7 @@ impl EditorLspTestContext {
let mut cx = VisualTestContext::from_window(*window.deref(), cx);
project
.update(&mut cx, |project, cx| {
- project.find_or_create_worktree("/root", true, cx)
+ project.find_or_create_worktree(root, true, cx)
})
.await
.unwrap();
@@ -108,7 +111,7 @@ impl EditorLspTestContext {
},
lsp,
workspace,
- buffer_lsp_url: lsp::Url::from_file_path(format!("/root/dir/{file_name}")).unwrap(),
+ buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(),
}
}
@@ -123,6 +126,7 @@ impl EditorLspTestContext {
path_suffixes: vec!["rs".to_string()],
..Default::default()
},
+ line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()],
..Default::default()
},
Some(tree_sitter_rust::LANGUAGE.into()),
@@ -309,6 +313,16 @@ impl EditorLspTestContext {
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
self.lsp.notify::<T>(params);
}
+
+ #[cfg(target_os = "windows")]
+ fn root_path() -> &'static Path {
+ Path::new("C:\\root")
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ fn root_path() -> &'static Path {
+ Path::new("/root")
+ }
}
impl Deref for EditorLspTestContext {
@@ -17,6 +17,7 @@ use project::{FakeFs, Project};
use std::{
any::TypeId,
ops::{Deref, DerefMut, Range},
+ path::Path,
sync::{
atomic::{AtomicUsize, Ordering},
Arc,
@@ -42,17 +43,18 @@ impl EditorTestContext {
pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext {
let fs = FakeFs::new(cx.executor());
// fs.insert_file("/file", "".to_owned()).await;
+ let root = Self::root_path();
fs.insert_tree(
- "/root",
+ root,
serde_json::json!({
"file": "",
}),
)
.await;
- let project = Project::test(fs, ["/root".as_ref()], cx).await;
+ let project = Project::test(fs, [root], cx).await;
let buffer = project
.update(cx, |project, cx| {
- project.open_local_buffer("/root/file", cx)
+ project.open_local_buffer(root.join("file"), cx)
})
.await
.unwrap();
@@ -71,6 +73,16 @@ impl EditorTestContext {
}
}
+ #[cfg(target_os = "windows")]
+ fn root_path() -> &'static Path {
+ Path::new("C:\\root")
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ fn root_path() -> &'static Path {
+ Path::new("/root")
+ }
+
pub async fn for_editor(editor: WindowHandle<Editor>, cx: &mut gpui::TestAppContext) -> Self {
let editor_view = editor.root_view(cx).unwrap();
Self {
@@ -8,7 +8,8 @@ use collections::HashMap;
use futures::{Future, FutureExt};
use gpui::AsyncAppContext;
use language::{
- CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate,
+ CodeLabel, HighlightId, Language, LanguageServerName, LanguageToolchainStore, LspAdapter,
+ LspAdapterDelegate,
};
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
use serde::Serialize;
@@ -194,6 +195,7 @@ impl LspAdapter for ExtensionLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
_cx: &mut AsyncAppContext,
) -> Result<Value> {
let delegate = delegate.clone();
@@ -3,6 +3,7 @@ use std::sync::{atomic::AtomicBool, Arc};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
+ SlashCommandResult,
};
use futures::FutureExt;
use gpui::{Task, WeakView, WindowContext};
@@ -87,7 +88,7 @@ impl SlashCommand for ExtensionSlashCommand {
_workspace: WeakView<Workspace>,
delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
- ) -> Task<Result<SlashCommandOutput>> {
+ ) -> Task<SlashCommandResult> {
let arguments = arguments.to_owned();
let output = cx.background_executor().spawn(async move {
self.extension
@@ -127,7 +128,8 @@ impl SlashCommand for ExtensionSlashCommand {
})
.collect(),
run_commands_in_text: false,
- })
+ }
+ .to_event_stream())
})
}
}
@@ -37,7 +37,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
use indexed_docs::{IndexedDocsRegistry, ProviderId};
use language::{
LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry,
- QUERY_FILENAME_PREFIXES,
+ LoadedLanguage, QUERY_FILENAME_PREFIXES,
};
use node_runtime::NodeRuntime;
use project::ContextProviderWithTasks;
@@ -1102,14 +1102,21 @@ impl ExtensionStore {
let config = std::fs::read_to_string(language_path.join("config.toml"))?;
let config: LanguageConfig = ::toml::from_str(&config)?;
let queries = load_plugin_queries(&language_path);
- let tasks = std::fs::read_to_string(language_path.join("tasks.json"))
- .ok()
- .and_then(|contents| {
- let definitions = serde_json_lenient::from_str(&contents).log_err()?;
- Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>)
- });
-
- Ok((config, queries, tasks))
+ let context_provider =
+ std::fs::read_to_string(language_path.join("tasks.json"))
+ .ok()
+ .and_then(|contents| {
+ let definitions =
+ serde_json_lenient::from_str(&contents).log_err()?;
+ Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>)
+ });
+
+ Ok(LoadedLanguage {
+ config,
+ queries,
+ context_provider,
+ toolchain_provider: None,
+ })
},
);
}
@@ -790,9 +790,9 @@ impl FileFinderDelegate {
let mut path_matches = Vec::new();
let abs_file_exists = if let Ok(task) = project.update(&mut cx, |this, cx| {
- this.abs_file_path_exists(query.path_query(), cx)
+ this.resolve_abs_file_path(query.path_query(), cx)
}) {
- task.await
+ task.await.is_some()
} else {
false
};
@@ -813,6 +813,7 @@ struct FakeFsState {
root: Arc<Mutex<FakeFsEntry>>,
next_inode: u64,
next_mtime: SystemTime,
+ git_event_tx: smol::channel::Sender<PathBuf>,
event_txs: Vec<smol::channel::Sender<Vec<PathEvent>>>,
events_paused: bool,
buffered_events: Vec<PathEvent>,
@@ -865,14 +866,22 @@ impl FakeFsState {
let mut entry_stack = Vec::new();
'outer: loop {
let mut path_components = path.components().peekable();
+ let mut prefix = None;
while let Some(component) = path_components.next() {
match component {
- Component::Prefix(_) => panic!("prefix paths aren't supported"),
+ Component::Prefix(prefix_component) => prefix = Some(prefix_component),
Component::RootDir => {
entry_stack.clear();
entry_stack.push(self.root.clone());
canonical_path.clear();
- canonical_path.push("/");
+ match prefix {
+ Some(prefix_component) => {
+ canonical_path = PathBuf::from(prefix_component.as_os_str());
+ // Prefixes like `C:\\` are represented without their trailing slash, so we have to re-add it.
+ canonical_path.push(std::path::MAIN_SEPARATOR_STR);
+ }
+ None => canonical_path = PathBuf::from(std::path::MAIN_SEPARATOR_STR),
+ }
}
Component::CurDir => {}
Component::ParentDir => {
@@ -894,7 +903,7 @@ impl FakeFsState {
}
}
entry_stack.push(entry.clone());
- canonical_path.push(name);
+ canonical_path = canonical_path.join(name);
} else {
return None;
}
@@ -956,9 +965,15 @@ pub static FS_DOT_GIT: std::sync::LazyLock<&'static OsStr> =
#[cfg(any(test, feature = "test-support"))]
impl FakeFs {
+ /// We need to use something large enough for Windows and Unix to consider this a new file.
+ /// https://doc.rust-lang.org/nightly/std/time/struct.SystemTime.html#platform-specific-behavior
+ const SYSTEMTIME_INTERVAL: u64 = 100;
+
pub fn new(executor: gpui::BackgroundExecutor) -> Arc<Self> {
- Arc::new(Self {
- executor,
+ let (tx, mut rx) = smol::channel::bounded::<PathBuf>(10);
+
+ let this = Arc::new(Self {
+ executor: executor.clone(),
state: Mutex::new(FakeFsState {
root: Arc::new(Mutex::new(FakeFsEntry::Dir {
inode: 0,
@@ -967,6 +982,7 @@ impl FakeFs {
entries: Default::default(),
git_repo_state: None,
})),
+ git_event_tx: tx,
next_mtime: SystemTime::UNIX_EPOCH,
next_inode: 1,
event_txs: Default::default(),
@@ -975,7 +991,22 @@ impl FakeFs {
read_dir_call_count: 0,
metadata_call_count: 0,
}),
- })
+ });
+
+ executor.spawn({
+ let this = this.clone();
+ async move {
+ while let Some(git_event) = rx.next().await {
+ if let Some(mut state) = this.state.try_lock() {
+ state.emit_event([(git_event, None)]);
+ } else {
+ panic!("Failed to lock file system state, this execution would have caused a test hang");
+ }
+ }
+ }
+ }).detach();
+
+ this
}
pub fn set_next_mtime(&self, next_mtime: SystemTime) {
@@ -989,7 +1020,7 @@ impl FakeFs {
let new_mtime = state.next_mtime;
let new_inode = state.next_inode;
state.next_inode += 1;
- state.next_mtime += Duration::from_nanos(1);
+ state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
state
.write_path(path, move |entry| {
match entry {
@@ -1042,7 +1073,7 @@ impl FakeFs {
let inode = state.next_inode;
let mtime = state.next_mtime;
state.next_inode += 1;
- state.next_mtime += Duration::from_nanos(1);
+ state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
let file = Arc::new(Mutex::new(FakeFsEntry::File {
inode,
mtime,
@@ -1169,7 +1200,12 @@ impl FakeFs {
let mut entry = entry.lock();
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
- let repo_state = git_repo_state.get_or_insert_with(Default::default);
+ let repo_state = git_repo_state.get_or_insert_with(|| {
+ Arc::new(Mutex::new(FakeGitRepositoryState::new(
+ dot_git.to_path_buf(),
+ state.git_event_tx.clone(),
+ )))
+ });
let mut repo_state = repo_state.lock();
f(&mut repo_state);
@@ -1184,7 +1220,22 @@ impl FakeFs {
pub fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
self.with_git_state(dot_git, true, |state| {
- state.branch_name = branch.map(Into::into)
+ let branch = branch.map(Into::into);
+ state.branches.extend(branch.clone());
+ state.current_branch_name = branch.map(Into::into)
+ })
+ }
+
+ pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) {
+ self.with_git_state(dot_git, true, |state| {
+ if let Some(first) = branches.first() {
+ if state.current_branch_name.is_none() {
+ state.current_branch_name = Some(first.to_string())
+ }
+ }
+ state
+ .branches
+ .extend(branches.iter().map(ToString::to_string));
})
}
@@ -1384,15 +1435,16 @@ impl Fs for FakeFs {
let mut created_dirs = Vec::new();
let mut cur_path = PathBuf::new();
for component in path.components() {
- let mut state = self.state.lock();
+ let should_skip = matches!(component, Component::Prefix(..) | Component::RootDir);
cur_path.push(component);
- if cur_path == Path::new("/") {
+ if should_skip {
continue;
}
+ let mut state = self.state.lock();
let inode = state.next_inode;
let mtime = state.next_mtime;
- state.next_mtime += Duration::from_nanos(1);
+ state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
state.next_inode += 1;
state.write_path(&cur_path, |entry| {
entry.or_insert_with(|| {
@@ -1418,7 +1470,7 @@ impl Fs for FakeFs {
let mut state = self.state.lock();
let inode = state.next_inode;
let mtime = state.next_mtime;
- state.next_mtime += Duration::from_nanos(1);
+ state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
state.next_inode += 1;
let file = Arc::new(Mutex::new(FakeFsEntry::File {
inode,
@@ -1553,7 +1605,7 @@ impl Fs for FakeFs {
let mut state = self.state.lock();
let mtime = state.next_mtime;
let inode = util::post_inc(&mut state.next_inode);
- state.next_mtime += Duration::from_nanos(1);
+ state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL);
let source_entry = state.read_path(&source)?;
let content = source_entry.lock().file_content(&source)?.clone();
let mut kind = Some(PathEventKind::Created);
@@ -1823,7 +1875,12 @@ impl Fs for FakeFs {
let mut entry = entry.lock();
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
let state = git_repo_state
- .get_or_insert_with(|| Arc::new(Mutex::new(FakeGitRepositoryState::default())))
+ .get_or_insert_with(|| {
+ Arc::new(Mutex::new(FakeGitRepositoryState::new(
+ abs_dot_git.to_path_buf(),
+ state.git_event_tx.clone(),
+ )))
+ })
.clone();
Some(git::repository::FakeGitRepository::open(state))
} else {
@@ -1,8 +1,9 @@
use crate::GitHostingProviderRegistry;
use crate::{blame::Blame, status::GitStatus};
use anyhow::{Context, Result};
-use collections::HashMap;
+use collections::{HashMap, HashSet};
use git2::BranchType;
+use gpui::SharedString;
use parking_lot::Mutex;
use rope::Rope;
use serde::{Deserialize, Serialize};
@@ -17,7 +18,7 @@ use util::ResultExt;
#[derive(Clone, Debug, Hash, PartialEq)]
pub struct Branch {
pub is_head: bool,
- pub name: Box<str>,
+ pub name: SharedString,
/// Timestamp of most recent commit, normalized to Unix Epoch format.
pub unix_timestamp: Option<i64>,
}
@@ -41,6 +42,7 @@ pub trait GitRepository: Send + Sync {
fn branches(&self) -> Result<Vec<Branch>>;
fn change_branch(&self, _: &str) -> Result<()>;
fn create_branch(&self, _: &str) -> Result<()>;
+ fn branch_exits(&self, _: &str) -> Result<bool>;
fn blame(&self, path: &Path, content: Rope) -> Result<crate::blame::Blame>;
}
@@ -132,6 +134,18 @@ impl GitRepository for RealGitRepository {
GitStatus::new(&self.git_binary_path, &working_directory, path_prefixes)
}
+ fn branch_exits(&self, name: &str) -> Result<bool> {
+ let repo = self.repository.lock();
+ let branch = repo.find_branch(name, BranchType::Local);
+ match branch {
+ Ok(_) => Ok(true),
+ Err(e) => match e.code() {
+ git2::ErrorCode::NotFound => Ok(false),
+ _ => Err(anyhow::anyhow!(e)),
+ },
+ }
+ }
+
fn branches(&self) -> Result<Vec<Branch>> {
let repo = self.repository.lock();
let local_branches = repo.branches(Some(BranchType::Local))?;
@@ -139,7 +153,11 @@ impl GitRepository for RealGitRepository {
.filter_map(|branch| {
branch.ok().and_then(|(branch, _)| {
let is_head = branch.is_head();
- let name = branch.name().ok().flatten().map(Box::from)?;
+ let name = branch
+ .name()
+ .ok()
+ .flatten()
+ .map(|name| name.to_string().into())?;
let timestamp = branch.get().peel_to_commit().ok()?.time();
let unix_timestamp = timestamp.seconds();
let timezone_offset = timestamp.offset_minutes();
@@ -201,17 +219,20 @@ impl GitRepository for RealGitRepository {
}
}
-#[derive(Debug, Clone, Default)]
+#[derive(Debug, Clone)]
pub struct FakeGitRepository {
state: Arc<Mutex<FakeGitRepositoryState>>,
}
-#[derive(Debug, Clone, Default)]
+#[derive(Debug, Clone)]
pub struct FakeGitRepositoryState {
+ pub path: PathBuf,
+ pub event_emitter: smol::channel::Sender<PathBuf>,
pub index_contents: HashMap<PathBuf, String>,
pub blames: HashMap<PathBuf, Blame>,
pub worktree_statuses: HashMap<RepoPath, GitFileStatus>,
- pub branch_name: Option<String>,
+ pub current_branch_name: Option<String>,
+ pub branches: HashSet<String>,
}
impl FakeGitRepository {
@@ -220,6 +241,20 @@ impl FakeGitRepository {
}
}
+impl FakeGitRepositoryState {
+ pub fn new(path: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
+ FakeGitRepositoryState {
+ path,
+ event_emitter,
+ index_contents: Default::default(),
+ blames: Default::default(),
+ worktree_statuses: Default::default(),
+ current_branch_name: Default::default(),
+ branches: Default::default(),
+ }
+ }
+}
+
impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
@@ -234,7 +269,7 @@ impl GitRepository for FakeGitRepository {
fn branch_name(&self) -> Option<String> {
let state = self.state.lock();
- state.branch_name.clone()
+ state.current_branch_name.clone()
}
fn head_sha(&self) -> Option<String> {
@@ -264,18 +299,41 @@ impl GitRepository for FakeGitRepository {
}
fn branches(&self) -> Result<Vec<Branch>> {
- Ok(vec![])
+ let state = self.state.lock();
+ let current_branch = &state.current_branch_name;
+ Ok(state
+ .branches
+ .iter()
+ .map(|branch_name| Branch {
+ is_head: Some(branch_name) == current_branch.as_ref(),
+ name: branch_name.into(),
+ unix_timestamp: None,
+ })
+ .collect())
+ }
+
+ fn branch_exits(&self, name: &str) -> Result<bool> {
+ let state = self.state.lock();
+ Ok(state.branches.contains(name))
}
fn change_branch(&self, name: &str) -> Result<()> {
let mut state = self.state.lock();
- state.branch_name = Some(name.to_owned());
+ state.current_branch_name = Some(name.to_owned());
+ state
+ .event_emitter
+ .try_send(state.path.clone())
+ .expect("Dropped repo change event");
Ok(())
}
fn create_branch(&self, name: &str) -> Result<()> {
let mut state = self.state.lock();
- state.branch_name = Some(name.to_owned());
+ state.branches.insert(name.to_owned());
+ state
+ .event_emitter
+ .try_send(state.path.clone())
+ .expect("Dropped repo change event");
Ok(())
}
@@ -256,6 +256,9 @@ pub struct AppContext {
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
pub(crate) propagate_event: bool,
pub(crate) prompt_builder: Option<PromptBuilder>,
+
+ #[cfg(any(test, feature = "test-support", debug_assertions))]
+ pub(crate) name: Option<&'static str>,
}
impl AppContext {
@@ -309,6 +312,9 @@ impl AppContext {
layout_id_buffer: Default::default(),
propagate_event: true,
prompt_builder: Some(PromptBuilder::Default),
+
+ #[cfg(any(test, feature = "test-support", debug_assertions))]
+ name: None,
}),
});
@@ -988,6 +994,7 @@ impl AppContext {
}
/// Move the global of the given type to the stack.
+ #[track_caller]
pub(crate) fn lease_global<G: Global>(&mut self) -> GlobalLease<G> {
GlobalLease::new(
self.globals_by_type
@@ -1319,6 +1326,12 @@ impl AppContext {
(task, is_first)
}
+
+ /// Get the name for this App.
+ #[cfg(any(test, feature = "test-support", debug_assertions))]
+ pub fn get_name(&self) -> &'static str {
+ self.name.as_ref().unwrap()
+ }
}
impl Context for AppContext {
@@ -536,6 +536,15 @@ impl AnyWeakModel {
}
}
+impl std::fmt::Debug for AnyWeakModel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct(type_name::<Self>())
+ .field("entity_id", &self.entity_id)
+ .field("entity_type", &self.entity_type)
+ .finish()
+ }
+}
+
impl<T> From<WeakModel<T>> for AnyWeakModel {
fn from(model: WeakModel<T>) -> Self {
model.any_model
@@ -478,6 +478,12 @@ impl TestAppContext {
.await
.unwrap();
}
+
+ /// Set a name for this App.
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn set_name(&mut self, name: &'static str) {
+ self.update(|cx| cx.name = Some(name))
+ }
}
impl<T: 'static> Model<T> {
@@ -48,6 +48,7 @@ where
item_count,
item_to_measure_index: 0,
render_items: Box::new(render_range),
+ decorations: Vec::new(),
interactivity: Interactivity {
element_id: Some(id),
base_style: Box::new(base_style),
@@ -69,6 +70,7 @@ pub struct UniformList {
item_to_measure_index: usize,
render_items:
Box<dyn for<'a> Fn(Range<usize>, &'a mut WindowContext) -> SmallVec<[AnyElement; 64]>>,
+ decorations: Vec<Box<dyn UniformListDecoration>>,
interactivity: Interactivity,
scroll_handle: Option<UniformListScrollHandle>,
sizing_behavior: ListSizingBehavior,
@@ -78,6 +80,7 @@ pub struct UniformList {
/// Frame state used by the [UniformList].
pub struct UniformListFrameState {
items: SmallVec<[AnyElement; 32]>,
+ decorations: SmallVec<[AnyElement; 1]>,
}
/// A handle for controlling the scroll position of a uniform list.
@@ -185,6 +188,7 @@ impl Element for UniformList {
layout_id,
UniformListFrameState {
items: SmallVec::new(),
+ decorations: SmallVec::new(),
},
)
}
@@ -292,9 +296,10 @@ impl Element for UniformList {
..cmp::min(last_visible_element_ix, self.item_count);
let mut items = (self.render_items)(visible_range.clone(), cx);
+
let content_mask = ContentMask { bounds };
cx.with_content_mask(Some(content_mask), |cx| {
- for (mut item, ix) in items.into_iter().zip(visible_range) {
+ for (mut item, ix) in items.into_iter().zip(visible_range.clone()) {
let item_origin = padded_bounds.origin
+ point(
if can_scroll_horizontally {
@@ -317,6 +322,35 @@ impl Element for UniformList {
item.prepaint_at(item_origin, cx);
frame_state.items.push(item);
}
+
+ let bounds = Bounds::new(
+ padded_bounds.origin
+ + point(
+ if can_scroll_horizontally {
+ scroll_offset.x + padding.left
+ } else {
+ scroll_offset.x
+ },
+ scroll_offset.y + padding.top,
+ ),
+ padded_bounds.size,
+ );
+ for decoration in &self.decorations {
+ let mut decoration = decoration.as_ref().compute(
+ visible_range.clone(),
+ bounds,
+ item_height,
+ self.item_count,
+ cx,
+ );
+ let available_space = size(
+ AvailableSpace::Definite(bounds.size.width),
+ AvailableSpace::Definite(bounds.size.height),
+ );
+ decoration.layout_as_root(available_space, cx);
+ decoration.prepaint_at(bounds.origin, cx);
+ frame_state.decorations.push(decoration);
+ }
});
}
@@ -338,6 +372,9 @@ impl Element for UniformList {
for item in &mut request_layout.items {
item.paint(cx);
}
+ for decoration in &mut request_layout.decorations {
+ decoration.paint(cx);
+ }
})
}
}
@@ -350,6 +387,21 @@ impl IntoElement for UniformList {
}
}
+/// A decoration for a [`UniformList`]. This can be used for various things,
+/// such as rendering indent guides, or other visual effects.
+pub trait UniformListDecoration {
+ /// Compute the decoration element, given the visible range of list items,
+ /// the bounds of the list, and the height of each item.
+ fn compute(
+ &self,
+ visible_range: Range<usize>,
+ bounds: Bounds<Pixels>,
+ item_height: Pixels,
+ item_count: usize,
+ cx: &mut WindowContext,
+ ) -> AnyElement;
+}
+
impl UniformList {
/// Selects a specific list item for measurement.
pub fn with_width_from_item(mut self, item_index: Option<usize>) -> Self {
@@ -382,6 +434,12 @@ impl UniformList {
self
}
+ /// Adds a decoration element to the list.
+ pub fn with_decoration(mut self, decoration: impl UniformListDecoration + 'static) -> Self {
+ self.decorations.push(Box::new(decoration));
+ self
+ }
+
fn measure_item(&self, list_width: Option<Pixels>, cx: &mut WindowContext) -> Size<Pixels> {
if self.item_count == 0 {
return Size::default();
@@ -57,6 +57,7 @@ pub trait UpdateGlobal {
}
impl<T: Global> UpdateGlobal for T {
+ #[track_caller]
fn update_global<C, F, R>(cx: &mut C, update: F) -> R
where
C: BorrowAppContext,
@@ -306,6 +306,7 @@ where
self.borrow_mut().set_global(global)
}
+ #[track_caller]
fn update_global<G, R>(&mut self, f: impl FnOnce(&mut G, &mut Self) -> R) -> R
where
G: Global,
@@ -1,37 +0,0 @@
-[package]
-name = "headless"
-version = "0.1.0"
-edition = "2021"
-publish = false
-license = "GPL-3.0-or-later"
-
-[lints]
-workspace = true
-
-[lib]
-path = "src/headless.rs"
-doctest = false
-
-[dependencies]
-anyhow.workspace = true
-client.workspace = true
-extension.workspace = true
-signal-hook.workspace = true
-gpui.workspace = true
-log.workspace = true
-util.workspace = true
-node_runtime.workspace = true
-language.workspace = true
-project.workspace = true
-proto.workspace = true
-fs.workspace = true
-futures.workspace = true
-settings.workspace = true
-shellexpand.workspace = true
-postage.workspace = true
-
-[dev-dependencies]
-client = { workspace = true, features = ["test-support"] }
-fs = { workspace = true, features = ["test-support"] }
-gpui = { workspace = true, features = ["test-support"] }
-util = { workspace = true, features = ["test-support"] }
@@ -1 +0,0 @@
-../../LICENSE-GPL
@@ -1,397 +0,0 @@
-use anyhow::{anyhow, Result};
-use client::DevServerProjectId;
-use client::{user::UserStore, Client, ClientSettings};
-use extension::ExtensionStore;
-use fs::Fs;
-use futures::{Future, StreamExt};
-use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ModelContext, Task, WeakModel};
-use language::LanguageRegistry;
-use node_runtime::NodeRuntime;
-use postage::stream::Stream;
-use project::Project;
-use proto::{self, ErrorCode, TypedEnvelope};
-use settings::{Settings, SettingsStore};
-use std::path::Path;
-use std::{collections::HashMap, sync::Arc};
-use util::{ResultExt, TryFutureExt};
-
-pub struct DevServer {
- client: Arc<Client>,
- app_state: AppState,
- remote_shutdown: bool,
- projects: HashMap<DevServerProjectId, Model<Project>>,
- _subscriptions: Vec<client::Subscription>,
- _maintain_connection: Task<Option<()>>,
-}
-
-pub struct AppState {
- pub node_runtime: NodeRuntime,
- pub user_store: Model<UserStore>,
- pub languages: Arc<LanguageRegistry>,
- pub fs: Arc<dyn Fs>,
-}
-
-struct GlobalDevServer(Model<DevServer>);
-
-impl Global for GlobalDevServer {}
-
-pub fn init(client: Arc<Client>, app_state: AppState, cx: &mut AppContext) -> Task<Result<()>> {
- let dev_server = cx.new_model(|cx| DevServer::new(client.clone(), app_state, cx));
- cx.set_global(GlobalDevServer(dev_server.clone()));
-
- #[cfg(not(target_os = "windows"))]
- {
- use signal_hook::consts::{SIGINT, SIGTERM};
- use signal_hook::iterator::Signals;
- // Set up a handler when the dev server is shut down
- // with ctrl-c or kill
- let (tx, rx) = futures::channel::oneshot::channel();
- let mut signals = Signals::new([SIGTERM, SIGINT]).unwrap();
- std::thread::spawn({
- move || {
- if let Some(sig) = signals.forever().next() {
- tx.send(sig).log_err();
- }
- }
- });
- cx.spawn(|cx| async move {
- if let Ok(sig) = rx.await {
- log::info!("received signal {sig:?}");
- cx.update(|cx| cx.quit()).log_err();
- }
- })
- .detach();
- }
-
- let server_url = ClientSettings::get_global(cx).server_url.clone();
- cx.spawn(|cx| async move {
- client
- .authenticate_and_connect(false, &cx)
- .await
- .map_err(|e| anyhow!("Error connecting to '{}': {}", server_url, e))
- })
-}
-
-impl DevServer {
- pub fn global(cx: &AppContext) -> Model<DevServer> {
- cx.global::<GlobalDevServer>().0.clone()
- }
-
- pub fn new(client: Arc<Client>, app_state: AppState, cx: &mut ModelContext<Self>) -> Self {
- cx.on_app_quit(Self::app_will_quit).detach();
-
- let maintain_connection = cx.spawn({
- let client = client.clone();
- move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
- });
-
- cx.observe_global::<SettingsStore>(|_, cx| {
- ExtensionStore::global(cx).update(cx, |store, cx| store.auto_install_extensions(cx))
- })
- .detach();
-
- DevServer {
- _subscriptions: vec![
- client.add_message_handler(cx.weak_model(), Self::handle_dev_server_instructions),
- client.add_request_handler(
- cx.weak_model(),
- Self::handle_validate_dev_server_project_request,
- ),
- client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory),
- client.add_message_handler(cx.weak_model(), Self::handle_shutdown),
- ],
- _maintain_connection: maintain_connection,
- projects: Default::default(),
- remote_shutdown: false,
- app_state,
- client,
- }
- }
-
- fn app_will_quit(&mut self, _: &mut ModelContext<Self>) -> impl Future<Output = ()> {
- let request = if self.remote_shutdown {
- None
- } else {
- Some(
- self.client
- .request(proto::ShutdownDevServer { reason: None }),
- )
- };
- async move {
- if let Some(request) = request {
- request.await.log_err();
- }
- }
- }
-
- async fn handle_dev_server_instructions(
- this: Model<Self>,
- envelope: TypedEnvelope<proto::DevServerInstructions>,
- mut cx: AsyncAppContext,
- ) -> Result<()> {
- let (added_projects, retained_projects, removed_projects_ids) =
- this.read_with(&mut cx, |this, _| {
- let removed_projects = this
- .projects
- .keys()
- .filter(|dev_server_project_id| {
- !envelope
- .payload
- .projects
- .iter()
- .any(|p| p.id == dev_server_project_id.0)
- })
- .cloned()
- .collect::<Vec<_>>();
-
- let mut added_projects = vec![];
- let mut retained_projects = vec![];
-
- for project in envelope.payload.projects.iter() {
- if this.projects.contains_key(&DevServerProjectId(project.id)) {
- retained_projects.push(project.clone());
- } else {
- added_projects.push(project.clone());
- }
- }
-
- (added_projects, retained_projects, removed_projects)
- })?;
-
- for dev_server_project in added_projects {
- DevServer::share_project(this.clone(), &dev_server_project, &mut cx).await?;
- }
-
- for dev_server_project in retained_projects {
- DevServer::update_project(this.clone(), &dev_server_project, &mut cx).await?;
- }
-
- this.update(&mut cx, |this, cx| {
- for old_project_id in &removed_projects_ids {
- this.unshare_project(old_project_id, cx)?;
- }
- Ok::<(), anyhow::Error>(())
- })??;
- Ok(())
- }
-
- async fn handle_validate_dev_server_project_request(
- this: Model<Self>,
- envelope: TypedEnvelope<proto::ValidateDevServerProjectRequest>,
- cx: AsyncAppContext,
- ) -> Result<proto::Ack> {
- let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
- let path = std::path::Path::new(&expanded);
- let fs = cx.read_model(&this, |this, _| this.app_state.fs.clone())?;
-
- let path_exists = fs.metadata(path).await.is_ok_and(|result| result.is_some());
- if !path_exists {
- return Err(anyhow!(ErrorCode::DevServerProjectPathDoesNotExist))?;
- }
-
- Ok(proto::Ack {})
- }
-
- async fn handle_list_remote_directory(
- this: Model<Self>,
- envelope: TypedEnvelope<proto::ListRemoteDirectory>,
- cx: AsyncAppContext,
- ) -> Result<proto::ListRemoteDirectoryResponse> {
- let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
- let fs = cx.read_model(&this, |this, _| this.app_state.fs.clone())?;
-
- let mut entries = Vec::new();
- let mut response = fs.read_dir(Path::new(&expanded)).await?;
- while let Some(path) = response.next().await {
- if let Some(file_name) = path?.file_name() {
- entries.push(file_name.to_string_lossy().to_string());
- }
- }
- Ok(proto::ListRemoteDirectoryResponse { entries })
- }
-
- async fn handle_shutdown(
- this: Model<Self>,
- _envelope: TypedEnvelope<proto::ShutdownDevServer>,
- mut cx: AsyncAppContext,
- ) -> Result<()> {
- this.update(&mut cx, |this, cx| {
- this.remote_shutdown = true;
- cx.quit();
- })
- }
-
- fn unshare_project(
- &mut self,
- dev_server_project_id: &DevServerProjectId,
- cx: &mut ModelContext<Self>,
- ) -> Result<()> {
- if let Some(project) = self.projects.remove(dev_server_project_id) {
- project.update(cx, |project, cx| project.unshare(cx))?;
- }
- Ok(())
- }
-
- async fn share_project(
- this: Model<Self>,
- dev_server_project: &proto::DevServerProject,
- cx: &mut AsyncAppContext,
- ) -> Result<()> {
- let (client, project) = this.update(cx, |this, cx| {
- let project = Project::local(
- this.client.clone(),
- this.app_state.node_runtime.clone(),
- this.app_state.user_store.clone(),
- this.app_state.languages.clone(),
- this.app_state.fs.clone(),
- None,
- cx,
- );
-
- (this.client.clone(), project)
- })?;
-
- for path in &dev_server_project.paths {
- let path = shellexpand::tilde(path).to_string();
-
- let (worktree, _) = project
- .update(cx, |project, cx| {
- project.find_or_create_worktree(&path, true, cx)
- })?
- .await?;
-
- worktree.update(cx, |worktree, cx| {
- worktree.as_local_mut().unwrap().share_private_files(cx)
- })?;
- }
-
- let worktrees =
- project.read_with(cx, |project, cx| project.worktree_metadata_protos(cx))?;
-
- let response = client
- .request(proto::ShareDevServerProject {
- dev_server_project_id: dev_server_project.id,
- worktrees,
- })
- .await?;
-
- let project_id = response.project_id;
- project.update(cx, |project, cx| project.shared(project_id, cx))??;
- this.update(cx, |this, _| {
- this.projects
- .insert(DevServerProjectId(dev_server_project.id), project);
- })?;
- Ok(())
- }
-
- async fn update_project(
- this: Model<Self>,
- dev_server_project: &proto::DevServerProject,
- cx: &mut AsyncAppContext,
- ) -> Result<()> {
- let tasks = this.update(cx, |this, cx| {
- let Some(project) = this
- .projects
- .get(&DevServerProjectId(dev_server_project.id))
- else {
- return vec![];
- };
-
- let mut to_delete = vec![];
- let mut tasks = vec![];
-
- project.update(cx, |project, cx| {
- for worktree in project.visible_worktrees(cx) {
- let mut delete = true;
- for config in dev_server_project.paths.iter() {
- if worktree.read(cx).abs_path().to_string_lossy()
- == shellexpand::tilde(config)
- {
- delete = false;
- }
- }
- if delete {
- to_delete.push(worktree.read(cx).id())
- }
- }
-
- for worktree_id in to_delete {
- project.remove_worktree(worktree_id, cx)
- }
-
- for config in dev_server_project.paths.iter() {
- tasks.push(project.find_or_create_worktree(
- shellexpand::tilde(config).to_string(),
- true,
- cx,
- ));
- }
-
- tasks
- })
- })?;
- futures::future::join_all(tasks).await;
- Ok(())
- }
-
- async fn maintain_connection(
- this: WeakModel<Self>,
- client: Arc<Client>,
- mut cx: AsyncAppContext,
- ) -> Result<()> {
- let mut client_status = client.status();
-
- let _ = client_status.try_recv();
- let current_status = *client_status.borrow();
- if current_status.is_connected() {
- // wait for first disconnect
- client_status.recv().await;
- }
-
- loop {
- let Some(current_status) = client_status.recv().await else {
- return Ok(());
- };
- let Some(this) = this.upgrade() else {
- return Ok(());
- };
-
- if !current_status.is_connected() {
- continue;
- }
-
- this.update(&mut cx, |this, cx| this.rejoin(cx))?.await?;
- }
- }
-
- fn rejoin(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
- let mut projects: HashMap<u64, Model<Project>> = HashMap::default();
- let request = self.client.request(proto::ReconnectDevServer {
- reshared_projects: self
- .projects
- .iter()
- .flat_map(|(_, handle)| {
- let project = handle.read(cx);
- let project_id = project.remote_id()?;
- projects.insert(project_id, handle.clone());
- Some(proto::UpdateProject {
- project_id,
- worktrees: project.worktree_metadata_protos(cx),
- })
- })
- .collect(),
- });
- cx.spawn(|_, mut cx| async move {
- let response = request.await?;
-
- for reshared_project in response.reshared_projects {
- if let Some(project) = projects.get(&reshared_project.id) {
- project.update(&mut cx, |project, cx| {
- project.reshared(reshared_project, cx).log_err();
- })?;
- }
- }
- Ok(())
- })
- }
-}
@@ -1,3 +1,4 @@
+use anyhow::Context as _;
use gpui::{
canvas, div, fill, img, opaque_grey, point, size, AnyElement, AppContext, Bounds, Context,
EventEmitter, FocusHandle, FocusableView, Img, InteractiveElement, IntoElement, Model,
@@ -19,6 +20,7 @@ use workspace::{
const IMAGE_VIEWER_KIND: &str = "ImageView";
pub struct ImageItem {
+ id: ProjectEntryId,
path: PathBuf,
project_path: ProjectPath,
}
@@ -48,9 +50,15 @@ impl project::Item for ImageItem {
.read_with(&cx, |project, cx| project.absolute_path(&path, cx))?
.ok_or_else(|| anyhow::anyhow!("Failed to find the absolute path"))?;
+ let id = project
+ .update(&mut cx, |project, cx| project.entry_for_path(&path, cx))?
+ .context("Entry not found")?
+ .id;
+
cx.new_model(|_| ImageItem {
path: abs_path,
project_path: path,
+ id,
})
}))
} else {
@@ -59,7 +67,7 @@ impl project::Item for ImageItem {
}
fn entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
- None
+ Some(self.id)
}
fn project_path(&self, _: &AppContext) -> Option<ProjectPath> {
@@ -68,18 +76,30 @@ impl project::Item for ImageItem {
}
pub struct ImageView {
- path: PathBuf,
+ image: Model<ImageItem>,
focus_handle: FocusHandle,
}
impl Item for ImageView {
type Event = ();
- fn tab_content(&self, params: TabContentParams, _cx: &WindowContext) -> AnyElement {
- let title = self
- .path
+ fn for_each_project_item(
+ &self,
+ cx: &AppContext,
+ f: &mut dyn FnMut(gpui::EntityId, &dyn project::Item),
+ ) {
+ f(self.image.entity_id(), self.image.read(cx))
+ }
+
+ fn is_singleton(&self, _cx: &AppContext) -> bool {
+ true
+ }
+
+ fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement {
+ let path = &self.image.read(cx).path;
+ let title = path
.file_name()
- .unwrap_or_else(|| self.path.as_os_str())
+ .unwrap_or_else(|| path.as_os_str())
.to_string_lossy()
.to_string();
Label::new(title)
@@ -90,9 +110,10 @@ impl Item for ImageView {
}
fn tab_icon(&self, cx: &WindowContext) -> Option<Icon> {
+ let path = &self.image.read(cx).path;
ItemSettings::get_global(cx)
.file_icons
- .then(|| FileIcons::get_icon(self.path.as_path(), cx))
+ .then(|| FileIcons::get_icon(path.as_path(), cx))
.flatten()
.map(Icon::from_path)
}
@@ -106,7 +127,7 @@ impl Item for ImageView {
Self: Sized,
{
Some(cx.new_view(|cx| Self {
- path: self.path.clone(),
+ image: self.image.clone(),
focus_handle: cx.focus_handle(),
}))
}
@@ -118,7 +139,7 @@ impl SerializableItem for ImageView {
}
fn deserialize(
- _project: Model<Project>,
+ project: Model<Project>,
_workspace: WeakView<Workspace>,
workspace_id: WorkspaceId,
item_id: ItemId,
@@ -129,10 +150,38 @@ impl SerializableItem for ImageView {
.get_image_path(item_id, workspace_id)?
.ok_or_else(|| anyhow::anyhow!("No image path found"))?;
- cx.new_view(|cx| ImageView {
- path: image_path,
- focus_handle: cx.focus_handle(),
- })
+ let (worktree, relative_path) = project
+ .update(&mut cx, |project, cx| {
+ project.find_or_create_worktree(image_path.clone(), false, cx)
+ })?
+ .await
+ .context("Path not found")?;
+ let worktree_id = worktree.update(&mut cx, |worktree, _cx| worktree.id())?;
+
+ let project_path = ProjectPath {
+ worktree_id,
+ path: relative_path.into(),
+ };
+
+ let id = project
+ .update(&mut cx, |project, cx| {
+ project.entry_for_path(&project_path, cx)
+ })?
+ .context("No entry found")?
+ .id;
+
+ cx.update(|cx| {
+ let image = cx.new_model(|_| ImageItem {
+ id,
+ path: image_path,
+ project_path,
+ });
+
+ Ok(cx.new_view(|cx| ImageView {
+ image,
+ focus_handle: cx.focus_handle(),
+ }))
+ })?
})
}
@@ -154,7 +203,7 @@ impl SerializableItem for ImageView {
let workspace_id = workspace.database_id()?;
Some(cx.background_executor().spawn({
- let image_path = self.path.clone();
+ let image_path = self.image.read(cx).path.clone();
async move {
IMAGE_VIEWER
.save_image_path(item_id, workspace_id, image_path)
@@ -177,6 +226,7 @@ impl FocusableView for ImageView {
impl Render for ImageView {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
+ let image_path = self.image.read(cx).path.clone();
let checkered_background = |bounds: Bounds<Pixels>, _, cx: &mut WindowContext| {
let square_size = 32.0;
@@ -233,7 +283,7 @@ impl Render for ImageView {
// TODO: In browser based Tailwind & Flex this would be h-screen and we'd use w-full
.h_full()
.child(
- img(self.path.clone())
+ img(image_path)
.object_fit(ObjectFit::ScaleDown)
.max_w_full()
.max_h_full(),
@@ -254,7 +304,7 @@ impl ProjectItem for ImageView {
Self: Sized,
{
Self {
- path: item.read(cx).path.clone(),
+ image: item,
focus_handle: cx.focus_handle(),
}
}
@@ -1967,18 +1967,27 @@ impl Buffer {
let new_text_length = new_text.len();
let old_start = range.start.to_point(&before_edit);
let new_start = (delta + range.start as isize) as usize;
- delta += new_text_length as isize - (range.end as isize - range.start as isize);
+ let range_len = range.end - range.start;
+ delta += new_text_length as isize - range_len as isize;
+ // Decide what range of the insertion to auto-indent, and whether
+ // the first line of the insertion should be considered a newly-inserted line
+ // or an edit to an existing line.
let mut range_of_insertion_to_indent = 0..new_text_length;
- let mut first_line_is_new = false;
- let mut original_indent_column = None;
+ let mut first_line_is_new = true;
+
+ let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
+ let old_line_end = before_edit.line_len(old_start.row);
+
+ if old_start.column > old_line_start {
+ first_line_is_new = false;
+ }
- // When inserting an entire line at the beginning of an existing line,
- // treat the insertion as new.
- if new_text.contains('\n')
- && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
+ if !new_text.contains('\n')
+ && (old_start.column + (range_len as u32) < old_line_end
+ || old_line_end == old_line_start)
{
- first_line_is_new = true;
+ first_line_is_new = false;
}
// When inserting text starting with a newline, avoid auto-indenting the
@@ -1988,7 +1997,7 @@ impl Buffer {
first_line_is_new = true;
}
- // Avoid auto-indenting after the insertion.
+ let mut original_indent_column = None;
if let AutoindentMode::Block {
original_indent_columns,
} = &mode
@@ -2000,6 +2009,8 @@ impl Buffer {
)
.len
}));
+
+ // Avoid auto-indenting the line after the edit.
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
range_of_insertion_to_indent.end -= 1;
}
@@ -4035,7 +4046,7 @@ impl<'a> BufferChunks<'a> {
let old_range = std::mem::replace(&mut self.range, range.clone());
self.chunks.set_range(self.range.clone());
if let Some(highlights) = self.highlights.as_mut() {
- if old_range.start >= self.range.start && old_range.end <= self.range.end {
+ if old_range.start <= self.range.start && old_range.end >= self.range.end {
// Reuse existing highlights stack, as the new range is a subrange of the old one.
highlights
.stack
@@ -1241,11 +1241,43 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
Some(AutoindentMode::EachLine),
cx,
);
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+ c
+ .f
+ .g();
+ d
+ .f
+ .g();
+ }
+ "
+ .unindent()
+ );
+ // Insert a newline after the open brace. It is auto-indented
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {«
+ »
+ c
+ .f
+ .g();
+ d
+ .f
+ .g();
+ }
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
assert_eq!(
buffer.text(),
"
fn a() {
+ ˇ
c
.f
.g();
@@ -1255,7 +1287,42 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
}
"
.unindent()
+ .replace("ˇ", "")
);
+
+ // Manually outdent the line. It stays outdented.
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {
+ «»
+ c
+ .f
+ .g();
+ d
+ .f
+ .g();
+ }
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+
+ c
+ .f
+ .g();
+ d
+ .f
+ .g();
+ }
+ "
+ .unindent()
+ );
+
buffer
});
@@ -15,6 +15,7 @@ mod outline;
pub mod proto;
mod syntax_map;
mod task_context;
+mod toolchain;
#[cfg(test)]
pub mod buffer_tests;
@@ -28,7 +29,7 @@ use futures::Future;
use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task};
pub use highlight_map::HighlightMap;
use http_client::HttpClient;
-pub use language_registry::LanguageName;
+pub use language_registry::{LanguageName, LoadedLanguage};
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
use parking_lot::Mutex;
use regex::Regex;
@@ -61,6 +62,7 @@ use syntax_map::{QueryCursorHandle, SyntaxSnapshot};
use task::RunnableTag;
pub use task_context::{ContextProvider, RunnableRange};
use theme::SyntaxTheme;
+pub use toolchain::{LanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister};
use tree_sitter::{self, wasmtime, Query, QueryCursor, WasmStore};
use util::serde::default_true;
@@ -502,6 +504,7 @@ pub trait LspAdapter: 'static + Send + Sync {
async fn workspace_configuration(
self: Arc<Self>,
_: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
_cx: &mut AsyncAppContext,
) -> Result<Value> {
Ok(serde_json::json!({}))
@@ -855,6 +858,7 @@ pub struct Language {
pub(crate) config: LanguageConfig,
pub(crate) grammar: Option<Arc<Grammar>>,
pub(crate) context_provider: Option<Arc<dyn ContextProvider>>,
+ pub(crate) toolchain: Option<Arc<dyn ToolchainLister>>,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
@@ -983,6 +987,7 @@ impl Language {
})
}),
context_provider: None,
+ toolchain: None,
}
}
@@ -991,6 +996,11 @@ impl Language {
self
}
+ pub fn with_toolchain_lister(mut self, provider: Option<Arc<dyn ToolchainLister>>) -> Self {
+ self.toolchain = provider;
+ self
+ }
+
pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
if let Some(query) = queries.highlights {
self = self
@@ -1361,6 +1371,10 @@ impl Language {
self.context_provider.clone()
}
+ pub fn toolchain_lister(&self) -> Option<Arc<dyn ToolchainLister>> {
+ self.toolchain.clone()
+ }
+
pub fn highlight_text<'a>(
self: &'a Arc<Self>,
text: &'a Rope,
@@ -4,7 +4,7 @@ use crate::{
},
task_context::ContextProvider,
with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher,
- LanguageServerName, LspAdapter, PLAIN_TEXT,
+ LanguageServerName, LspAdapter, ToolchainLister, PLAIN_TEXT,
};
use anyhow::{anyhow, Context, Result};
use collections::{hash_map, HashMap, HashSet};
@@ -75,6 +75,13 @@ impl<'a> From<&'a str> for LanguageName {
}
}
+impl From<LanguageName> for String {
+ fn from(value: LanguageName) -> Self {
+ let value: &str = &value.0;
+ Self::from(value)
+ }
+}
+
pub struct LanguageRegistry {
state: RwLock<LanguageRegistryState>,
language_server_download_dir: Option<Arc<Path>>,
@@ -123,16 +130,7 @@ pub struct AvailableLanguage {
name: LanguageName,
grammar: Option<Arc<str>>,
matcher: LanguageMatcher,
- load: Arc<
- dyn Fn() -> Result<(
- LanguageConfig,
- LanguageQueries,
- Option<Arc<dyn ContextProvider>>,
- )>
- + 'static
- + Send
- + Sync,
- >,
+ load: Arc<dyn Fn() -> Result<LoadedLanguage> + 'static + Send + Sync>,
loaded: bool,
}
@@ -200,6 +198,13 @@ struct LspBinaryStatusSender {
txs: Arc<Mutex<Vec<mpsc::UnboundedSender<(LanguageServerName, LanguageServerBinaryStatus)>>>>,
}
+pub struct LoadedLanguage {
+ pub config: LanguageConfig,
+ pub queries: LanguageQueries,
+ pub context_provider: Option<Arc<dyn ContextProvider>>,
+ pub toolchain_provider: Option<Arc<dyn ToolchainLister>>,
+}
+
impl LanguageRegistry {
pub fn new(executor: BackgroundExecutor) -> Self {
let this = Self {
@@ -283,7 +288,14 @@ impl LanguageRegistry {
config.name.clone(),
config.grammar.clone(),
config.matcher.clone(),
- move || Ok((config.clone(), Default::default(), None)),
+ move || {
+ Ok(LoadedLanguage {
+ config: config.clone(),
+ queries: Default::default(),
+ toolchain_provider: None,
+ context_provider: None,
+ })
+ },
)
}
@@ -424,14 +436,7 @@ impl LanguageRegistry {
name: LanguageName,
grammar_name: Option<Arc<str>>,
matcher: LanguageMatcher,
- load: impl Fn() -> Result<(
- LanguageConfig,
- LanguageQueries,
- Option<Arc<dyn ContextProvider>>,
- )>
- + 'static
- + Send
- + Sync,
+ load: impl Fn() -> Result<LoadedLanguage> + 'static + Send + Sync,
) {
let load = Arc::new(load);
let state = &mut *self.state.write();
@@ -726,16 +731,18 @@ impl LanguageRegistry {
self.executor
.spawn(async move {
let language = async {
- let (config, queries, provider) = (language_load)()?;
-
- if let Some(grammar) = config.grammar.clone() {
+ let loaded_language = (language_load)()?;
+ if let Some(grammar) = loaded_language.config.grammar.clone() {
let grammar = Some(this.get_or_load_grammar(grammar).await?);
- Language::new_with_id(id, config, grammar)
- .with_context_provider(provider)
- .with_queries(queries)
+
+ Language::new_with_id(id, loaded_language.config, grammar)
+ .with_context_provider(loaded_language.context_provider)
+ .with_toolchain_lister(loaded_language.toolchain_provider)
+ .with_queries(loaded_language.queries)
} else {
- Ok(Language::new_with_id(id, config, None)
- .with_context_provider(provider))
+ Ok(Language::new_with_id(id, loaded_language.config, None)
+ .with_context_provider(loaded_language.context_provider)
+ .with_toolchain_lister(loaded_language.toolchain_provider))
}
}
.await;
@@ -0,0 +1,65 @@
+//! Provides support for language toolchains.
+//!
+//! A language can have associated toolchains,
+//! which is a set of tools used to interact with the projects written in said language.
+//! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override.
+
+use std::{path::PathBuf, sync::Arc};
+
+use async_trait::async_trait;
+use gpui::{AsyncAppContext, SharedString};
+use settings::WorktreeId;
+
+use crate::LanguageName;
+
+/// Represents a single toolchain.
+#[derive(Clone, Debug, PartialEq)]
+pub struct Toolchain {
+ /// User-facing label
+ pub name: SharedString,
+ pub path: SharedString,
+ pub language_name: LanguageName,
+}
+
+#[async_trait(?Send)]
+pub trait ToolchainLister: Send + Sync {
+ async fn list(&self, _: PathBuf) -> ToolchainList;
+}
+
+#[async_trait(?Send)]
+pub trait LanguageToolchainStore {
+ async fn active_toolchain(
+ self: Arc<Self>,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Toolchain>;
+}
+
+type DefaultIndex = usize;
+#[derive(Default, Clone)]
+pub struct ToolchainList {
+ pub toolchains: Vec<Toolchain>,
+ pub default: Option<DefaultIndex>,
+ pub groups: Box<[(usize, SharedString)]>,
+}
+
+impl ToolchainList {
+ pub fn toolchains(&self) -> &[Toolchain] {
+ &self.toolchains
+ }
+ pub fn default_toolchain(&self) -> Option<Toolchain> {
+ self.default.and_then(|ix| self.toolchains.get(ix)).cloned()
+ }
+ pub fn group_for_index(&self, index: usize) -> Option<(usize, SharedString)> {
+ if index >= self.toolchains.len() {
+ return None;
+ }
+ let first_equal_or_greater = self
+ .groups
+ .partition_point(|(group_lower_bound, _)| group_lower_bound <= &index);
+ self.groups
+ .get(first_equal_or_greater.checked_sub(1)?)
+ .cloned()
+ }
+}
@@ -505,10 +505,14 @@ pub fn map_to_language_model_completion_events(
LanguageModelToolUse {
id: tool_use.id,
name: tool_use.name,
- input: serde_json::Value::from_str(
- &tool_use.input_json,
- )
- .map_err(|err| anyhow!(err))?,
+ input: if tool_use.input_json.is_empty() {
+ serde_json::Value::Null
+ } else {
+ serde_json::Value::from_str(
+ &tool_use.input_json,
+ )
+ .map_err(|err| anyhow!(err))?
+ },
},
))
})),
@@ -54,6 +54,7 @@ pub struct OllamaLanguageModelProvider {
pub struct State {
http_client: Arc<dyn HttpClient>,
available_models: Vec<ollama::Model>,
+ fetch_model_task: Option<Task<Result<()>>>,
_subscription: Subscription,
}
@@ -89,6 +90,11 @@ impl State {
})
}
+ fn restart_fetch_models_task(&mut self, cx: &mut ModelContext<Self>) {
+ let task = self.fetch_models(cx);
+ self.fetch_model_task.replace(task);
+ }
+
fn authenticate(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
if self.is_authenticated() {
Task::ready(Ok(()))
@@ -102,17 +108,29 @@ impl OllamaLanguageModelProvider {
pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
let this = Self {
http_client: http_client.clone(),
- state: cx.new_model(|cx| State {
- http_client,
- available_models: Default::default(),
- _subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
- this.fetch_models(cx).detach();
- cx.notify();
- }),
+ state: cx.new_model(|cx| {
+ let subscription = cx.observe_global::<SettingsStore>({
+ let mut settings = AllLanguageModelSettings::get_global(cx).ollama.clone();
+ move |this: &mut State, cx| {
+ let new_settings = &AllLanguageModelSettings::get_global(cx).ollama;
+ if &settings != new_settings {
+ settings = new_settings.clone();
+ this.restart_fetch_models_task(cx);
+ cx.notify();
+ }
+ }
+ });
+
+ State {
+ http_client,
+ available_models: Default::default(),
+ fetch_model_task: None,
+ _subscription: subscription,
+ }
}),
};
this.state
- .update(cx, |state, cx| state.fetch_models(cx).detach());
+ .update(cx, |state, cx| state.restart_fetch_models_task(cx));
this
}
}
@@ -10,7 +10,7 @@ workspace = true
[features]
test-support = [
- "tree-sitter"
+ "load-grammars"
]
load-grammars = [
"tree-sitter-bash",
@@ -47,6 +47,11 @@ log.workspace = true
lsp.workspace = true
node_runtime.workspace = true
paths.workspace = true
+pet.workspace = true
+pet-core.workspace = true
+pet-conda.workspace = true
+pet-poetry.workspace = true
+pet-reporter.workspace = true
project.workspace = true
regex.workspace = true
rope.workspace = true
@@ -82,3 +87,8 @@ text.workspace = true
theme = { workspace = true, features = ["test-support"] }
unindent.workspace = true
workspace = { workspace = true, features = ["test-support"] }
+tree-sitter-typescript.workspace = true
+tree-sitter-python.workspace = true
+tree-sitter-go.workspace = true
+tree-sitter-c.workspace = true
+tree-sitter-css.workspace = true
@@ -7,7 +7,9 @@ use feature_flags::FeatureFlagAppExt;
use futures::StreamExt;
use gpui::{AppContext, AsyncAppContext};
use http_client::github::{latest_github_release, GitHubLspBinaryVersion};
-use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
+use language::{
+ LanguageRegistry, LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate,
+};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use project::ContextProviderWithTasks;
@@ -198,6 +200,7 @@ impl LspAdapter for JsonLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
_: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
cx.update(|cx| {
@@ -3,7 +3,7 @@ use gpui::{AppContext, UpdateGlobal};
use json::json_task_context;
pub use language::*;
use node_runtime::NodeRuntime;
-use python::PythonContextProvider;
+use python::{PythonContextProvider, PythonToolchainProvider};
use rust_embed::RustEmbed;
use settings::SettingsStore;
use smol::stream::StreamExt;
@@ -61,7 +61,14 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
config.name.clone(),
config.grammar.clone(),
config.matcher.clone(),
- move || Ok((config.clone(), load_queries($name), None)),
+ move || {
+ Ok(LoadedLanguage {
+ config: config.clone(),
+ queries: load_queries($name),
+ context_provider: None,
+ toolchain_provider: None,
+ })
+ },
);
};
($name:literal, $adapters:expr) => {
@@ -75,7 +82,14 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
config.name.clone(),
config.grammar.clone(),
config.matcher.clone(),
- move || Ok((config.clone(), load_queries($name), None)),
+ move || {
+ Ok(LoadedLanguage {
+ config: config.clone(),
+ queries: load_queries($name),
+ context_provider: None,
+ toolchain_provider: None,
+ })
+ },
);
};
($name:literal, $adapters:expr, $context_provider:expr) => {
@@ -90,11 +104,33 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
config.grammar.clone(),
config.matcher.clone(),
move || {
- Ok((
- config.clone(),
- load_queries($name),
- Some(Arc::new($context_provider)),
- ))
+ Ok(LoadedLanguage {
+ config: config.clone(),
+ queries: load_queries($name),
+ context_provider: Some(Arc::new($context_provider)),
+ toolchain_provider: None,
+ })
+ },
+ );
+ };
+ ($name:literal, $adapters:expr, $context_provider:expr, $toolchain_provider:expr) => {
+ let config = load_config($name);
+ // typeck helper
+ let adapters: Vec<Arc<dyn LspAdapter>> = $adapters;
+ for adapter in adapters {
+ languages.register_lsp_adapter(config.name.clone(), adapter);
+ }
+ languages.register_language(
+ config.name.clone(),
+ config.grammar.clone(),
+ config.matcher.clone(),
+ move || {
+ Ok(LoadedLanguage {
+ config: config.clone(),
+ queries: load_queries($name),
+ context_provider: Some(Arc::new($context_provider)),
+ toolchain_provider: Some($toolchain_provider),
+ })
},
);
};
@@ -141,7 +177,8 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mu
vec![Arc::new(python::PythonLspAdapter::new(
node_runtime.clone(),
))],
- PythonContextProvider
+ PythonContextProvider,
+ Arc::new(PythonToolchainProvider::default()) as Arc<dyn ToolchainLister>
);
language!(
"rust",
@@ -3,9 +3,16 @@ use async_trait::async_trait;
use collections::HashMap;
use gpui::AppContext;
use gpui::AsyncAppContext;
+use language::LanguageName;
+use language::LanguageToolchainStore;
+use language::Toolchain;
+use language::ToolchainList;
+use language::ToolchainLister;
use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
+use pet_core::python_environment::PythonEnvironmentKind;
+use pet_core::Configuration;
use project::lsp_store::language_server_settings;
use serde_json::Value;
@@ -200,12 +207,35 @@ impl LspAdapter for PythonLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
adapter: &Arc<dyn LspAdapterDelegate>,
+ toolchains: Arc<dyn LanguageToolchainStore>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
- cx.update(|cx| {
- language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx)
- .and_then(|s| s.settings.clone())
- .unwrap_or_default()
+ let toolchain = toolchains
+ .active_toolchain(adapter.worktree_id(), LanguageName::new("Python"), cx)
+ .await;
+ cx.update(move |cx| {
+ let mut user_settings =
+ language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx)
+ .and_then(|s| s.settings.clone())
+ .unwrap_or_default();
+
+ // If python.pythonPath is not set in user config, do so using our toolchain picker.
+ if let Some(toolchain) = toolchain {
+ if user_settings.is_null() {
+ user_settings = Value::Object(serde_json::Map::default());
+ }
+ let object = user_settings.as_object_mut().unwrap();
+ if let Some(python) = object
+ .entry("python")
+ .or_insert(Value::Object(serde_json::Map::default()))
+ .as_object_mut()
+ {
+ python
+ .entry("pythonPath")
+ .or_insert(Value::String(toolchain.path.into()));
+ }
+ }
+ user_settings
})
}
}
@@ -320,6 +350,83 @@ fn python_module_name_from_relative_path(relative_path: &str) -> String {
.to_string()
}
+#[derive(Default)]
+pub(crate) struct PythonToolchainProvider {}
+
+static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[
+ // Prioritize non-Conda environments.
+ PythonEnvironmentKind::Poetry,
+ PythonEnvironmentKind::Pipenv,
+ PythonEnvironmentKind::VirtualEnvWrapper,
+ PythonEnvironmentKind::Venv,
+ PythonEnvironmentKind::VirtualEnv,
+ PythonEnvironmentKind::Conda,
+ PythonEnvironmentKind::Pyenv,
+ PythonEnvironmentKind::GlobalPaths,
+ PythonEnvironmentKind::Homebrew,
+];
+
+fn env_priority(kind: Option<PythonEnvironmentKind>) -> usize {
+ if let Some(kind) = kind {
+ ENV_PRIORITY_LIST
+ .iter()
+ .position(|blessed_env| blessed_env == &kind)
+ .unwrap_or(ENV_PRIORITY_LIST.len())
+ } else {
+ // Unknown toolchains are less useful than non-blessed ones.
+ ENV_PRIORITY_LIST.len() + 1
+ }
+}
+
+#[async_trait(?Send)]
+impl ToolchainLister for PythonToolchainProvider {
+ async fn list(&self, worktree_root: PathBuf) -> ToolchainList {
+ let environment = pet_core::os_environment::EnvironmentApi::new();
+ let locators = pet::locators::create_locators(
+ Arc::new(pet_conda::Conda::from(&environment)),
+ Arc::new(pet_poetry::Poetry::from(&environment)),
+ &environment,
+ );
+ let mut config = Configuration::default();
+ config.workspace_directories = Some(vec![worktree_root]);
+ let reporter = pet_reporter::collect::create_reporter();
+ pet::find::find_and_report_envs(&reporter, config, &locators, &environment, None);
+
+ let mut toolchains = reporter
+ .environments
+ .lock()
+ .ok()
+ .map_or(Vec::new(), |mut guard| std::mem::take(&mut guard));
+ toolchains.sort_by(|lhs, rhs| {
+ env_priority(lhs.kind)
+ .cmp(&env_priority(rhs.kind))
+ .then_with(|| lhs.executable.cmp(&rhs.executable))
+ });
+ let mut toolchains: Vec<_> = toolchains
+ .into_iter()
+ .filter_map(|toolchain| {
+ let name = if let Some(version) = &toolchain.version {
+ format!("Python {version} ({:?})", toolchain.kind?)
+ } else {
+ format!("{:?}", toolchain.kind?)
+ }
+ .into();
+ Some(Toolchain {
+ name,
+ path: toolchain.executable?.to_str()?.to_owned().into(),
+ language_name: LanguageName::new("Python"),
+ })
+ })
+ .collect();
+ toolchains.dedup();
+ ToolchainList {
+ toolchains,
+ default: None,
+ groups: Default::default(),
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use gpui::{BorrowAppContext, Context, ModelContext, TestAppContext};
@@ -5,6 +5,9 @@ line_comments = ["// ", "/// ", "//! "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
+ { start = "r#\"", end = "\"#", close = true, newline = true },
+ { start = "r##\"", end = "\"##", close = true, newline = true },
+ { start = "r###\"", end = "\"###", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] },
@@ -3,7 +3,7 @@ use async_trait::async_trait;
use collections::HashMap;
use futures::StreamExt;
use gpui::AsyncAppContext;
-use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
+use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use project::lsp_store::language_server_settings;
@@ -111,6 +111,7 @@ impl LspAdapter for TailwindLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
let tailwind_user_settings = cx.update(|cx| {
@@ -5,7 +5,7 @@ use async_trait::async_trait;
use collections::HashMap;
use gpui::AsyncAppContext;
use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion};
-use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
+use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
use lsp::{CodeActionKind, LanguageServerBinary};
use node_runtime::NodeRuntime;
use project::lsp_store::language_server_settings;
@@ -230,6 +230,7 @@ impl LspAdapter for TypeScriptLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
let override_options = cx.update(|cx| {
@@ -325,6 +326,7 @@ impl LspAdapter for EsLintLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
let workspace_root = delegate.worktree_root_path();
@@ -2,7 +2,7 @@ use anyhow::{anyhow, Result};
use async_trait::async_trait;
use collections::HashMap;
use gpui::AsyncAppContext;
-use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
+use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate};
use lsp::{CodeActionKind, LanguageServerBinary};
use node_runtime::NodeRuntime;
use project::lsp_store::language_server_settings;
@@ -183,6 +183,7 @@ impl LspAdapter for VtslsLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
let tsdk_path = Self::tsdk_path(delegate).await;
@@ -3,7 +3,8 @@ use async_trait::async_trait;
use futures::StreamExt;
use gpui::AsyncAppContext;
use language::{
- language_settings::AllLanguageSettings, LanguageServerName, LspAdapter, LspAdapterDelegate,
+ language_settings::AllLanguageSettings, LanguageServerName, LanguageToolchainStore, LspAdapter,
+ LspAdapterDelegate,
};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
@@ -92,6 +93,7 @@ impl LspAdapter for YamlLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
+ _: Arc<dyn LanguageToolchainStore>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
let location = SettingsLocation {
@@ -1177,6 +1177,8 @@ impl FakeLanguageServer {
let (stdout_writer, stdout_reader) = async_pipe::pipe();
let (notifications_tx, notifications_rx) = channel::unbounded();
+ let root = Self::root_path();
+
let mut server = LanguageServer::new_internal(
server_id,
stdin_writer,
@@ -1184,8 +1186,8 @@ impl FakeLanguageServer {
None::<async_pipe::PipeReader>,
Arc::new(Mutex::new(None)),
None,
- Path::new("/"),
- Path::new("/"),
+ root,
+ root,
None,
cx.clone(),
|_| {},
@@ -1201,8 +1203,8 @@ impl FakeLanguageServer {
None::<async_pipe::PipeReader>,
Arc::new(Mutex::new(None)),
None,
- Path::new("/"),
- Path::new("/"),
+ root,
+ root,
None,
cx,
move |msg| {
@@ -1238,6 +1240,16 @@ impl FakeLanguageServer {
(server, fake)
}
+
+ #[cfg(target_os = "windows")]
+ fn root_path() -> &'static Path {
+ Path::new("C:\\")
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ fn root_path() -> &'static Path {
+ Path::new("/")
+ }
}
#[cfg(any(test, feature = "test-support"))]
@@ -234,6 +234,10 @@ impl<'a> MarkdownParser<'a> {
text.push('\n');
}
+ // We want to ignore any inline HTML tags in the text but keep
+ // the text between them
+ Event::InlineHtml(_) => {}
+
Event::Text(t) => {
text.push_str(t.as_ref());
@@ -626,6 +630,8 @@ impl<'a> MarkdownParser<'a> {
// Otherwise we need to insert the block after all the nested items
// that have been parsed so far
items.extend(block);
+ } else {
+ self.cursor += 1;
}
}
}
@@ -847,6 +853,16 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_text_with_inline_html() {
+ let parsed = parse("This is a paragraph with an inline HTML <sometag>tag</sometag>.").await;
+
+ assert_eq!(
+ parsed.children,
+ vec![p("This is a paragraph with an inline HTML tag.", 0..63),],
+ );
+ }
+
#[gpui::test]
async fn test_raw_links_detection() {
let parsed = parse("Checkout this https://zed.dev link").await;
@@ -1090,6 +1106,26 @@ Some other content
);
}
+ #[gpui::test]
+ async fn test_list_item_with_inline_html() {
+ let parsed = parse(
+ "\
+* This is a list item with an inline HTML <sometag>tag</sometag>.
+",
+ )
+ .await;
+
+ assert_eq!(
+ parsed.children,
+ vec![list_item(
+ 0..67,
+ 1,
+ Unordered,
+ vec![p("This is a list item with an inline HTML tag.", 4..44),],
+ ),],
+ );
+ }
+
#[gpui::test]
async fn test_nested_list_with_paragraph_inside() {
let parsed = parse(
@@ -2862,6 +2862,30 @@ impl MultiBufferSnapshot {
}
}
+ pub fn indent_and_comment_for_line(&self, row: MultiBufferRow, cx: &AppContext) -> String {
+ let mut indent = self.indent_size_for_line(row).chars().collect::<String>();
+
+ if self.settings_at(0, cx).extend_comment_on_newline {
+ if let Some(language_scope) = self.language_scope_at(Point::new(row.0, 0)) {
+ let delimiters = language_scope.line_comment_prefixes();
+ for delimiter in delimiters {
+ if *self
+ .chars_at(Point::new(row.0, indent.len() as u32))
+ .take(delimiter.chars().count())
+ .collect::<String>()
+ .as_str()
+ == **delimiter
+ {
+ indent.push_str(&delimiter);
+ break;
+ }
+ }
+ }
+ }
+
+ indent
+ }
+
pub fn prev_non_blank_row(&self, mut row: MultiBufferRow) -> Option<MultiBufferRow> {
while row.0 > 0 {
row.0 -= 1;
@@ -30,8 +30,10 @@ search.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
+smallvec.workspace = true
smol.workspace = true
theme.workspace = true
+ui.workspace = true
util.workspace = true
worktree.workspace = true
workspace.workspace = true
@@ -24,12 +24,12 @@ use editor::{
use file_icons::FileIcons;
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
use gpui::{
- actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement,
- AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId,
- EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement,
- KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render,
- SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext,
- VisualContext, WeakView, WindowContext,
+ actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action,
+ AnyElement, AppContext, AssetSource, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent,
+ Div, ElementId, EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement,
+ IntoElement, KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point,
+ Render, SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View,
+ ViewContext, VisualContext, WeakView, WindowContext,
};
use itertools::Itertools;
use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem};
@@ -42,6 +42,7 @@ use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use smol::channel;
use theme::{SyntaxTheme, ThemeSettings};
+use ui::{IndentGuideColors, IndentGuideLayout};
use util::{debug_panic, RangeExt, ResultExt, TryFutureExt};
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
@@ -254,14 +255,14 @@ impl SearchState {
#[derive(Debug)]
enum SelectedEntry {
Invalidated(Option<PanelEntry>),
- Valid(PanelEntry),
+ Valid(PanelEntry, usize),
None,
}
impl SelectedEntry {
fn invalidate(&mut self) {
match std::mem::replace(self, SelectedEntry::None) {
- Self::Valid(entry) => *self = Self::Invalidated(Some(entry)),
+ Self::Valid(entry, _) => *self = Self::Invalidated(Some(entry)),
Self::None => *self = Self::Invalidated(None),
other => *self = other,
}
@@ -3568,7 +3569,7 @@ impl OutlinePanel {
fn selected_entry(&self) -> Option<&PanelEntry> {
match &self.selected_entry {
SelectedEntry::Invalidated(entry) => entry.as_ref(),
- SelectedEntry::Valid(entry) => Some(entry),
+ SelectedEntry::Valid(entry, _) => Some(entry),
SelectedEntry::None => None,
}
}
@@ -3577,7 +3578,16 @@ impl OutlinePanel {
if focus {
self.focus_handle.focus(cx);
}
- self.selected_entry = SelectedEntry::Valid(entry);
+ let ix = self
+ .cached_entries
+ .iter()
+ .enumerate()
+ .find(|(_, cached_entry)| &cached_entry.entry == &entry)
+ .map(|(i, _)| i)
+ .unwrap_or_default();
+
+ self.selected_entry = SelectedEntry::Valid(entry, ix);
+
self.autoscroll(cx);
cx.notify();
}
@@ -3736,6 +3746,9 @@ impl Render for OutlinePanel {
let project = self.project.read(cx);
let query = self.query(cx);
let pinned = self.pinned;
+ let settings = OutlinePanelSettings::get_global(cx);
+ let indent_size = settings.indent_size;
+ let show_indent_guides = settings.indent_guides;
let outline_panel = v_flex()
.id("outline-panel")
@@ -3901,6 +3914,61 @@ impl Render for OutlinePanel {
})
.size_full()
.track_scroll(self.scroll_handle.clone())
+ .when(show_indent_guides, |list| {
+ list.with_decoration(
+ ui::indent_guides(
+ cx.view().clone(),
+ px(indent_size),
+ IndentGuideColors::panel(cx),
+ |outline_panel, range, _| {
+ let entries = outline_panel.cached_entries.get(range);
+ if let Some(entries) = entries {
+ entries.into_iter().map(|item| item.depth).collect()
+ } else {
+ smallvec::SmallVec::new()
+ }
+ },
+ )
+ .with_render_fn(
+ cx.view().clone(),
+ move |outline_panel, params, _| {
+ const LEFT_OFFSET: f32 = 14.;
+
+ let indent_size = params.indent_size;
+ let item_height = params.item_height;
+ let active_indent_guide_ix = find_active_indent_guide_ix(
+ outline_panel,
+ ¶ms.indent_guides,
+ );
+
+ params
+ .indent_guides
+ .into_iter()
+ .enumerate()
+ .map(|(ix, layout)| {
+ let bounds = Bounds::new(
+ point(
+ px(layout.offset.x as f32) * indent_size
+ + px(LEFT_OFFSET),
+ px(layout.offset.y as f32) * item_height,
+ ),
+ size(
+ px(1.),
+ px(layout.length as f32) * item_height,
+ ),
+ );
+ ui::RenderedIndentGuide {
+ bounds,
+ layout,
+ is_active: active_indent_guide_ix == Some(ix),
+ hitbox: None,
+ }
+ })
+ .collect()
+ },
+ ),
+ )
+ })
})
}
.children(self.context_menu.as_ref().map(|(menu, position, _)| {
@@ -3945,6 +4013,40 @@ impl Render for OutlinePanel {
}
}
+fn find_active_indent_guide_ix(
+ outline_panel: &OutlinePanel,
+ candidates: &[IndentGuideLayout],
+) -> Option<usize> {
+ let SelectedEntry::Valid(_, target_ix) = &outline_panel.selected_entry else {
+ return None;
+ };
+ let target_depth = outline_panel
+ .cached_entries
+ .get(*target_ix)
+ .map(|cached_entry| cached_entry.depth)?;
+
+ let (target_ix, target_depth) = if let Some(target_depth) = outline_panel
+ .cached_entries
+ .get(target_ix + 1)
+ .filter(|cached_entry| cached_entry.depth > target_depth)
+ .map(|entry| entry.depth)
+ {
+ (target_ix + 1, target_depth.saturating_sub(1))
+ } else {
+ (*target_ix, target_depth.saturating_sub(1))
+ };
+
+ candidates
+ .iter()
+ .enumerate()
+ .find(|(_, guide)| {
+ guide.offset.y <= target_ix
+ && target_ix < guide.offset.y + guide.length
+ && guide.offset.x == target_depth
+ })
+ .map(|(ix, _)| ix)
+}
+
fn subscribe_for_editor_events(
editor: &View<Editor>,
cx: &mut ViewContext<OutlinePanel>,
@@ -19,6 +19,7 @@ pub struct OutlinePanelSettings {
pub folder_icons: bool,
pub git_status: bool,
pub indent_size: f32,
+ pub indent_guides: bool,
pub auto_reveal_entries: bool,
pub auto_fold_dirs: bool,
}
@@ -53,6 +54,10 @@ pub struct OutlinePanelSettingsContent {
///
/// Default: 20
pub indent_size: Option<f32>,
+ /// Whether to show indent guides in the outline panel.
+ ///
+ /// Default: true
+ pub indent_guides: Option<bool>,
/// Whether to reveal it in the outline panel automatically,
/// when a corresponding project entry becomes active.
/// Gitignored entries are never auto revealed.
@@ -30,7 +30,6 @@ async-trait.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
-dev_server_projects.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
@@ -7,10 +7,11 @@ use crate::{
prettier_store::{self, PrettierStore, PrettierStoreEvent},
project_settings::{LspSettings, ProjectSettings},
relativize_path, resolve_path,
+ toolchain_store::{EmptyToolchainStore, ToolchainStoreEvent},
worktree_store::{WorktreeStore, WorktreeStoreEvent},
yarn::YarnPathStore,
CodeAction, Completion, CoreCompletion, Hover, InlayHint, Item as _, ProjectPath,
- ProjectTransaction, ResolveState, Symbol,
+ ProjectTransaction, ResolveState, Symbol, ToolchainStore,
};
use anyhow::{anyhow, Context as _, Result};
use async_trait::async_trait;
@@ -36,9 +37,9 @@ use language::{
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic,
DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageName,
- LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LocalFile, LspAdapter,
- LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
- Unclipped,
+ LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LanguageToolchainStore,
+ LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset,
+ ToPointUtf16, Transaction, Unclipped,
};
use lsp::{
CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag,
@@ -707,12 +708,13 @@ pub struct LspStore {
nonce: u128,
buffer_store: Model<BufferStore>,
worktree_store: Model<WorktreeStore>,
+ toolchain_store: Option<Model<ToolchainStore>>,
buffer_snapshots: HashMap<BufferId, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
pub languages: Arc<LanguageRegistry>,
language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
pub language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
active_entry: Option<ProjectEntryId>,
- _maintain_workspace_config: Task<Result<()>>,
+ _maintain_workspace_config: (Task<Result<()>>, watch::Sender<()>),
_maintain_buffer_languages: Task<()>,
next_diagnostic_group_id: usize,
diagnostic_summaries:
@@ -871,6 +873,7 @@ impl LspStore {
buffer_store: Model<BufferStore>,
worktree_store: Model<WorktreeStore>,
prettier_store: Model<PrettierStore>,
+ toolchain_store: Model<ToolchainStore>,
environment: Model<ProjectEnvironment>,
languages: Arc<LanguageRegistry>,
http_client: Arc<dyn HttpClient>,
@@ -884,9 +887,15 @@ impl LspStore {
.detach();
cx.subscribe(&prettier_store, Self::on_prettier_store_event)
.detach();
+ cx.subscribe(&toolchain_store, Self::on_toolchain_store_event)
+ .detach();
cx.observe_global::<SettingsStore>(Self::on_settings_changed)
.detach();
+ let _maintain_workspace_config = {
+ let (sender, receiver) = watch::channel();
+ (Self::maintain_workspace_config(receiver, cx), sender)
+ };
Self {
mode: LspStoreMode::Local(LocalLspStore {
supplementary_language_servers: Default::default(),
@@ -909,6 +918,7 @@ impl LspStore {
downstream_client: None,
buffer_store,
worktree_store,
+ toolchain_store: Some(toolchain_store),
languages: languages.clone(),
language_server_ids: Default::default(),
language_server_statuses: Default::default(),
@@ -919,7 +929,7 @@ impl LspStore {
diagnostics: Default::default(),
active_entry: None,
- _maintain_workspace_config: Self::maintain_workspace_config(cx),
+ _maintain_workspace_config,
_maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
}
}
@@ -942,9 +952,10 @@ impl LspStore {
})
}
- pub fn new_remote(
+ pub(super) fn new_remote(
buffer_store: Model<BufferStore>,
worktree_store: Model<WorktreeStore>,
+ toolchain_store: Option<Model<ToolchainStore>>,
languages: Arc<LanguageRegistry>,
upstream_client: AnyProtoClient,
project_id: u64,
@@ -954,7 +965,10 @@ impl LspStore {
.detach();
cx.subscribe(&worktree_store, Self::on_worktree_store_event)
.detach();
-
+ let _maintain_workspace_config = {
+ let (sender, receiver) = watch::channel();
+ (Self::maintain_workspace_config(receiver, cx), sender)
+ };
Self {
mode: LspStoreMode::Remote(RemoteLspStore {
upstream_client: Some(upstream_client),
@@ -972,7 +986,8 @@ impl LspStore {
diagnostic_summaries: Default::default(),
diagnostics: Default::default(),
active_entry: None,
- _maintain_workspace_config: Self::maintain_workspace_config(cx),
+ toolchain_store,
+ _maintain_workspace_config,
_maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
}
}
@@ -1063,6 +1078,22 @@ impl LspStore {
}
}
+ fn on_toolchain_store_event(
+ &mut self,
+ _: Model<ToolchainStore>,
+ event: &ToolchainStoreEvent,
+ _: &mut ModelContext<Self>,
+ ) {
+ match event {
+ ToolchainStoreEvent::ToolchainActivated { .. } => {
+ self.request_workspace_config_refresh()
+ }
+ }
+ }
+
+ fn request_workspace_config_refresh(&mut self) {
+ *self._maintain_workspace_config.1.borrow_mut() = ();
+ }
// todo!
pub fn prettier_store(&self) -> Option<Model<PrettierStore>> {
self.as_local().map(|local| local.prettier_store.clone())
@@ -3029,17 +3060,13 @@ impl LspStore {
None
}
- fn maintain_workspace_config(cx: &mut ModelContext<Self>) -> Task<Result<()>> {
- let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
- let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
-
- let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
- *settings_changed_tx.borrow_mut() = ();
- });
-
- cx.spawn(move |this, mut cx| async move {
- while let Some(()) = settings_changed_rx.next().await {
- let servers = this.update(&mut cx, |this, cx| {
+ pub(crate) async fn refresh_workspace_configurations(
+ this: &WeakModel<Self>,
+ mut cx: AsyncAppContext,
+ ) {
+ maybe!(async move {
+ let servers = this
+ .update(&mut cx, |this, cx| {
this.language_server_ids
.iter()
.filter_map(|((worktree_id, _), server_id)| {
@@ -3061,17 +3088,52 @@ impl LspStore {
}
})
.collect::<Vec<_>>()
- })?;
+ })
+ .ok()?;
+
+ let toolchain_store = this
+ .update(&mut cx, |this, cx| this.toolchain_store(cx))
+ .ok()?;
+ for (adapter, server, delegate) in servers {
+ let settings = adapter
+ .workspace_configuration(&delegate, toolchain_store.clone(), &mut cx)
+ .await
+ .ok()?;
- for (adapter, server, delegate) in servers {
- let settings = adapter.workspace_configuration(&delegate, &mut cx).await?;
+ server
+ .notify::<lsp::notification::DidChangeConfiguration>(
+ lsp::DidChangeConfigurationParams { settings },
+ )
+ .ok();
+ }
+ Some(())
+ })
+ .await;
+ }
- server
- .notify::<lsp::notification::DidChangeConfiguration>(
- lsp::DidChangeConfigurationParams { settings },
- )
- .ok();
- }
+ fn toolchain_store(&self, cx: &AppContext) -> Arc<dyn LanguageToolchainStore> {
+ if let Some(toolchain_store) = self.toolchain_store.as_ref() {
+ toolchain_store.read(cx).as_language_toolchain_store()
+ } else {
+ Arc::new(EmptyToolchainStore)
+ }
+ }
+ fn maintain_workspace_config(
+ external_refresh_requests: watch::Receiver<()>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
+ let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
+
+ let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
+ *settings_changed_tx.borrow_mut() = ();
+ });
+
+ let mut joint_future =
+ futures::stream::select(settings_changed_rx, external_refresh_requests);
+ cx.spawn(move |this, cx| async move {
+ while let Some(()) = joint_future.next().await {
+ Self::refresh_workspace_configurations(&this, cx.clone()).await;
}
drop(settings_observation);
@@ -5517,6 +5579,9 @@ impl LspStore {
let delegate = delegate.clone();
let adapter = adapter.clone();
let this = this.clone();
+ let toolchains = this
+ .update(&mut cx, |this, cx| this.toolchain_store(cx))
+ .ok()?;
let mut cx = cx.clone();
async move {
let language_server = pending_server.await?;
@@ -5524,7 +5589,7 @@ impl LspStore {
let workspace_config = adapter
.adapter
.clone()
- .workspace_configuration(&delegate, &mut cx)
+ .workspace_configuration(&delegate, toolchains.clone(), &mut cx)
.await?;
let mut initialization_options = adapter
@@ -5864,17 +5929,21 @@ impl LspStore {
}
})
.detach();
-
language_server
.on_request::<lsp::request::WorkspaceConfiguration, _, _>({
let adapter = adapter.adapter.clone();
let delegate = delegate.clone();
+ let this = this.clone();
move |params, mut cx| {
let adapter = adapter.clone();
let delegate = delegate.clone();
+ let this = this.clone();
async move {
- let workspace_config =
- adapter.workspace_configuration(&delegate, &mut cx).await?;
+ let toolchains =
+ this.update(&mut cx, |this, cx| this.toolchain_store(cx))?;
+ let workspace_config = adapter
+ .workspace_configuration(&delegate, toolchains, &mut cx)
+ .await?;
Ok(params
.items
.into_iter()
@@ -11,6 +11,7 @@ pub mod search;
mod task_inventory;
pub mod task_store;
pub mod terminals;
+pub mod toolchain_store;
pub mod worktree_store;
#[cfg(test)]
@@ -24,10 +25,7 @@ mod yarn;
use anyhow::{anyhow, Context as _, Result};
use buffer_store::{BufferStore, BufferStoreEvent};
-use client::{
- proto, Client, Collaborator, DevServerProjectId, PendingEntitySubscription, ProjectId,
- TypedEnvelope, UserStore,
-};
+use client::{proto, Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore};
use clock::ReplicaId;
use collections::{BTreeSet, HashMap, HashSet};
use debounced_delay::DebouncedDelay;
@@ -47,8 +45,8 @@ use itertools::Itertools;
use language::{
language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent,
CachedLspAdapter, Capability, CodeLabel, DiagnosticEntry, Documentation, File as _, Language,
- LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, Transaction,
- Unclipped,
+ LanguageName, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16,
+ Toolchain, ToolchainList, Transaction, Unclipped,
};
use lsp::{
CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServer, LanguageServerId,
@@ -104,7 +102,7 @@ pub use lsp_store::{
LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent,
SERVER_PROGRESS_THROTTLE_TIMEOUT,
};
-
+pub use toolchain_store::ToolchainStore;
const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500;
const MAX_SEARCH_RESULT_FILES: usize = 5_000;
const MAX_SEARCH_RESULT_RANGES: usize = 10_000;
@@ -155,14 +153,13 @@ pub struct Project {
remotely_created_models: Arc<Mutex<RemotelyCreatedModels>>,
terminals: Terminals,
node: Option<NodeRuntime>,
- hosted_project_id: Option<ProjectId>,
- dev_server_project_id: Option<client::DevServerProjectId>,
search_history: SearchHistory,
search_included_history: SearchHistory,
search_excluded_history: SearchHistory,
snippets: Model<SnippetProvider>,
environment: Model<ProjectEnvironment>,
settings_observer: Model<SettingsObserver>,
+ toolchain_store: Option<Model<ToolchainStore>>,
}
#[derive(Default)]
@@ -217,7 +214,6 @@ enum ProjectClientState {
capability: Capability,
remote_id: u64,
replica_id: ReplicaId,
- in_room: bool,
},
}
@@ -291,6 +287,13 @@ impl ProjectPath {
path: self.path.to_string_lossy().to_string(),
}
}
+
+ pub fn root_path(worktree_id: WorktreeId) -> Self {
+ Self {
+ worktree_id,
+ path: Path::new("").into(),
+ }
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -578,6 +581,7 @@ impl Project {
LspStore::init(&client);
SettingsObserver::init(&client);
TaskStore::init(Some(&client));
+ ToolchainStore::init(&client);
}
pub fn local(
@@ -634,12 +638,15 @@ impl Project {
});
cx.subscribe(&settings_observer, Self::on_settings_observer_event)
.detach();
-
+ let toolchain_store = cx.new_model(|cx| {
+ ToolchainStore::local(languages.clone(), worktree_store.clone(), cx)
+ });
let lsp_store = cx.new_model(|cx| {
LspStore::new_local(
buffer_store.clone(),
worktree_store.clone(),
prettier_store.clone(),
+ toolchain_store.clone(),
environment.clone(),
languages.clone(),
client.http_client(),
@@ -674,14 +681,14 @@ impl Project {
local_handles: Vec::new(),
},
node: Some(node),
- hosted_project_id: None,
- dev_server_project_id: None,
search_history: Self::new_search_history(),
environment,
remotely_created_models: Default::default(),
search_included_history: Self::new_search_history(),
search_excluded_history: Self::new_search_history(),
+
+ toolchain_store: Some(toolchain_store),
}
})
}
@@ -705,7 +712,7 @@ impl Project {
let ssh_proto = ssh.read(cx).proto_client();
let worktree_store =
- cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), 0, None));
+ cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), SSH_PROJECT_ID));
cx.subscribe(&worktree_store, Self::on_worktree_store_event)
.detach();
@@ -738,10 +745,14 @@ impl Project {
.detach();
let environment = ProjectEnvironment::new(&worktree_store, None, cx);
+ let toolchain_store = Some(cx.new_model(|cx| {
+ ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx)
+ }));
let lsp_store = cx.new_model(|cx| {
LspStore::new_remote(
buffer_store.clone(),
worktree_store.clone(),
+ toolchain_store.clone(),
languages.clone(),
ssh_proto.clone(),
SSH_PROJECT_ID,
@@ -793,14 +804,14 @@ impl Project {
local_handles: Vec::new(),
},
node: Some(node),
- hosted_project_id: None,
- dev_server_project_id: None,
search_history: Self::new_search_history(),
environment,
remotely_created_models: Default::default(),
search_included_history: Self::new_search_history(),
search_excluded_history: Self::new_search_history(),
+
+ toolchain_store,
};
let ssh = ssh.read(cx);
@@ -821,6 +832,7 @@ impl Project {
LspStore::init(&ssh_proto);
SettingsObserver::init(&ssh_proto);
TaskStore::init(Some(&ssh_proto));
+ ToolchainStore::init(&ssh_proto);
this
})
@@ -898,15 +910,7 @@ impl Project {
let role = response.payload.role();
let worktree_store = cx.new_model(|_| {
- WorktreeStore::remote(
- true,
- client.clone().into(),
- response.payload.project_id,
- response
- .payload
- .dev_server_project_id
- .map(DevServerProjectId),
- )
+ WorktreeStore::remote(true, client.clone().into(), response.payload.project_id)
})?;
let buffer_store = cx.new_model(|cx| {
BufferStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx)
@@ -916,6 +920,7 @@ impl Project {
let mut lsp_store = LspStore::new_remote(
buffer_store.clone(),
worktree_store.clone(),
+ None,
languages.clone(),
client.clone().into(),
remote_id,
@@ -992,7 +997,6 @@ impl Project {
capability: Capability::ReadWrite,
remote_id,
replica_id,
- in_room: response.payload.dev_server_project_id.is_none(),
},
buffers_needing_diff: Default::default(),
git_diff_debouncer: DebouncedDelay::new(),
@@ -1000,16 +1004,12 @@ impl Project {
local_handles: Vec::new(),
},
node: None,
- hosted_project_id: None,
- dev_server_project_id: response
- .payload
- .dev_server_project_id
- .map(DevServerProjectId),
search_history: Self::new_search_history(),
search_included_history: Self::new_search_history(),
search_excluded_history: Self::new_search_history(),
environment: ProjectEnvironment::new(&worktree_store, None, cx),
remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())),
+ toolchain_store: None,
};
this.set_role(role, cx);
for worktree in worktrees {
@@ -1056,47 +1056,6 @@ impl Project {
Ok(this)
}
- pub async fn hosted(
- remote_id: ProjectId,
- user_store: Model<UserStore>,
- client: Arc<Client>,
- languages: Arc<LanguageRegistry>,
- fs: Arc<dyn Fs>,
- cx: AsyncAppContext,
- ) -> Result<Model<Self>> {
- client.authenticate_and_connect(true, &cx).await?;
-
- let subscriptions = [
- EntitySubscription::Project(client.subscribe_to_entity::<Self>(remote_id.0)?),
- EntitySubscription::BufferStore(
- client.subscribe_to_entity::<BufferStore>(remote_id.0)?,
- ),
- EntitySubscription::WorktreeStore(
- client.subscribe_to_entity::<WorktreeStore>(remote_id.0)?,
- ),
- EntitySubscription::LspStore(client.subscribe_to_entity::<LspStore>(remote_id.0)?),
- EntitySubscription::SettingsObserver(
- client.subscribe_to_entity::<SettingsObserver>(remote_id.0)?,
- ),
- ];
- let response = client
- .request_envelope(proto::JoinHostedProject {
- project_id: remote_id.0,
- })
- .await?;
- Self::from_join_project_response(
- response,
- subscriptions,
- client,
- true,
- user_store,
- languages,
- fs,
- cx,
- )
- .await
- }
-
fn new_search_history() -> SearchHistory {
SearchHistory::new(
Some(MAX_PROJECT_SEARCH_HISTORY_SIZE),
@@ -1301,43 +1260,23 @@ impl Project {
}
}
- pub fn hosted_project_id(&self) -> Option<ProjectId> {
- self.hosted_project_id
- }
-
- pub fn dev_server_project_id(&self) -> Option<DevServerProjectId> {
- self.dev_server_project_id
- }
-
- pub fn supports_terminal(&self, cx: &AppContext) -> bool {
+ pub fn supports_terminal(&self, _cx: &AppContext) -> bool {
if self.is_local() {
return true;
}
if self.is_via_ssh() {
return true;
}
- let Some(id) = self.dev_server_project_id else {
- return false;
- };
- let Some(server) = dev_server_projects::Store::global(cx)
- .read(cx)
- .dev_server_for_project(id)
- else {
- return false;
- };
- server.ssh_connection_string.is_some()
+
+ return false;
}
pub fn ssh_connection_string(&self, cx: &AppContext) -> Option<SharedString> {
if let Some(ssh_state) = &self.ssh_client {
return Some(ssh_state.read(cx).connection_string().into());
}
- let dev_server_id = self.dev_server_project_id()?;
- dev_server_projects::Store::global(cx)
- .read(cx)
- .dev_server_for_project(dev_server_id)?
- .ssh_connection_string
- .clone()
+
+ return None;
}
pub fn ssh_connection_state(&self, cx: &AppContext) -> Option<remote::ConnectionState> {
@@ -1549,17 +1488,9 @@ impl Project {
pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
if !matches!(self.client_state, ProjectClientState::Local) {
- if let ProjectClientState::Remote { in_room, .. } = &mut self.client_state {
- if *in_room || self.dev_server_project_id.is_none() {
- return Err(anyhow!("project was already shared"));
- } else {
- *in_room = true;
- return Ok(());
- }
- } else {
- return Err(anyhow!("project was already shared"));
- }
+ return Err(anyhow!("project was already shared"));
}
+
self.client_subscriptions.extend([
self.client
.subscribe_to_entity(project_id)?
@@ -1657,14 +1588,7 @@ impl Project {
fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
if self.is_via_collab() {
- if self.dev_server_project_id().is_some() {
- if let ProjectClientState::Remote { in_room, .. } = &mut self.client_state {
- *in_room = false
- }
- return Ok(());
- } else {
- return Err(anyhow!("attempted to unshare a remote project"));
- }
+ return Err(anyhow!("attempted to unshare a remote project"));
}
if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
@@ -2261,29 +2185,6 @@ impl Project {
}
fn on_worktree_released(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
- if let Some(dev_server_project_id) = self.dev_server_project_id {
- let paths: Vec<String> = self
- .visible_worktrees(cx)
- .filter_map(|worktree| {
- if worktree.read(cx).id() == id_to_remove {
- None
- } else {
- Some(worktree.read(cx).abs_path().to_string_lossy().to_string())
- }
- })
- .collect();
- if !paths.is_empty() {
- let request = self.client.request(proto::UpdateDevServerProject {
- dev_server_project_id: dev_server_project_id.0,
- paths,
- });
- cx.background_executor()
- .spawn(request)
- .detach_and_log_err(cx);
- }
- return;
- }
-
if let Some(ssh) = &self.ssh_client {
ssh.read(cx)
.proto_client()
@@ -2462,6 +2363,46 @@ impl Project {
.map_err(|e| anyhow!(e))
}
+ pub fn available_toolchains(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &AppContext,
+ ) -> Task<Option<ToolchainList>> {
+ if let Some(toolchain_store) = self.toolchain_store.as_ref() {
+ toolchain_store
+ .read(cx)
+ .list_toolchains(worktree_id, language_name, cx)
+ } else {
+ Task::ready(None)
+ }
+ }
+ pub fn activate_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ toolchain: Toolchain,
+ cx: &mut AppContext,
+ ) -> Task<Option<()>> {
+ let Some(toolchain_store) = self.toolchain_store.clone() else {
+ return Task::ready(None);
+ };
+ toolchain_store.update(cx, |this, cx| {
+ this.activate_toolchain(worktree_id, toolchain, cx)
+ })
+ }
+ pub fn active_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &AppContext,
+ ) -> Task<Option<Toolchain>> {
+ let Some(toolchain_store) = self.toolchain_store.clone() else {
+ return Task::ready(None);
+ };
+ toolchain_store
+ .read(cx)
+ .active_toolchain(worktree_id, language_name, cx)
+ }
pub fn language_server_statuses<'a>(
&'a self,
cx: &'a AppContext,
@@ -3148,12 +3089,12 @@ impl Project {
match &self.client_state {
ProjectClientState::Shared { .. } => true,
ProjectClientState::Local => false,
- ProjectClientState::Remote { in_room, .. } => *in_room,
+ ProjectClientState::Remote { .. } => true,
}
}
/// Returns the resolved version of `path`, that was found in `buffer`, if it exists.
- pub fn resolve_existing_file_path(
+ pub fn resolve_path_in_buffer(
&self,
path: &str,
buffer: &Model<Buffer>,
@@ -3161,47 +3102,56 @@ impl Project {
) -> Task<Option<ResolvedPath>> {
let path_buf = PathBuf::from(path);
if path_buf.is_absolute() || path.starts_with("~") {
- self.resolve_abs_file_path(path, cx)
+ self.resolve_abs_path(path, cx)
} else {
self.resolve_path_in_worktrees(path_buf, buffer, cx)
}
}
- pub fn abs_file_path_exists(&self, path: &str, cx: &mut ModelContext<Self>) -> Task<bool> {
- let resolve_task = self.resolve_abs_file_path(path, cx);
+ pub fn resolve_abs_file_path(
+ &self,
+ path: &str,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Option<ResolvedPath>> {
+ let resolve_task = self.resolve_abs_path(path, cx);
cx.background_executor().spawn(async move {
let resolved_path = resolve_task.await;
- resolved_path.is_some()
+ resolved_path.filter(|path| path.is_file())
})
}
- fn resolve_abs_file_path(
+ pub fn resolve_abs_path(
&self,
path: &str,
cx: &mut ModelContext<Self>,
) -> Task<Option<ResolvedPath>> {
if self.is_local() {
let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned());
-
let fs = self.fs.clone();
cx.background_executor().spawn(async move {
let path = expanded.as_path();
- let exists = fs.is_file(path).await;
+ let metadata = fs.metadata(path).await.ok().flatten();
- exists.then(|| ResolvedPath::AbsPath(expanded))
+ metadata.map(|metadata| ResolvedPath::AbsPath {
+ path: expanded,
+ is_dir: metadata.is_dir,
+ })
})
} else if let Some(ssh_client) = self.ssh_client.as_ref() {
let request = ssh_client
.read(cx)
.proto_client()
- .request(proto::CheckFileExists {
+ .request(proto::GetPathMetadata {
project_id: SSH_PROJECT_ID,
path: path.to_string(),
});
cx.background_executor().spawn(async move {
let response = request.await.log_err()?;
if response.exists {
- Some(ResolvedPath::AbsPath(PathBuf::from(response.path)))
+ Some(ResolvedPath::AbsPath {
+ path: PathBuf::from(response.path),
+ is_dir: response.is_dir,
+ })
} else {
None
}
@@ -3240,10 +3190,14 @@ impl Project {
resolved.strip_prefix(root_entry_path).unwrap_or(&resolved);
worktree.entry_for_path(stripped).map(|entry| {
- ResolvedPath::ProjectPath(ProjectPath {
+ let project_path = ProjectPath {
worktree_id: worktree.id(),
path: entry.path.clone(),
- })
+ };
+ ResolvedPath::ProjectPath {
+ project_path,
+ is_dir: entry.is_dir(),
+ }
})
})
.ok()?;
@@ -3275,20 +3229,6 @@ impl Project {
let response = response.await?;
Ok(response.entries.into_iter().map(PathBuf::from).collect())
})
- } else if let Some(dev_server) = self.dev_server_project_id().and_then(|id| {
- dev_server_projects::Store::global(cx)
- .read(cx)
- .dev_server_for_project(id)
- }) {
- let request = proto::ListRemoteDirectory {
- dev_server_id: dev_server.id.0,
- path: query,
- };
- let response = self.client.request(request);
- cx.background_executor().spawn(async move {
- let response = response.await?;
- Ok(response.entries.into_iter().map(PathBuf::from).collect())
- })
} else {
Task::ready(Err(anyhow!("cannot list directory in remote project")))
}
@@ -3456,6 +3396,25 @@ impl Project {
worktree.get_local_repo(&root_entry)?.repo().clone().into()
}
+ pub fn branches(
+ &self,
+ project_path: ProjectPath,
+ cx: &AppContext,
+ ) -> Task<Result<Vec<git::repository::Branch>>> {
+ self.worktree_store().read(cx).branches(project_path, cx)
+ }
+
+ pub fn update_or_create_branch(
+ &self,
+ repository: ProjectPath,
+ new_branch: String,
+ cx: &AppContext,
+ ) -> Task<Result<()>> {
+ self.worktree_store()
+ .read(cx)
+ .update_or_create_branch(repository, new_branch, cx)
+ }
+
pub fn blame_buffer(
&self,
buffer: &Model<Buffer>,
@@ -3644,6 +3603,13 @@ impl Project {
anyhow::Ok(())
})??;
+ // We drop `this` to avoid holding a reference in this future for too
+ // long.
+ // If we keep the reference, we might not drop the `Project` early
+ // enough when closing a window and it will only get releases on the
+ // next `flush_effects()` call.
+ drop(this);
+
let answer = rx.next().await;
Ok(LanguageServerPromptResponse {
@@ -4203,24 +4169,41 @@ fn resolve_path(base: &Path, path: &Path) -> PathBuf {
/// or an AbsPath and that *exists*.
#[derive(Debug, Clone)]
pub enum ResolvedPath {
- ProjectPath(ProjectPath),
- AbsPath(PathBuf),
+ ProjectPath {
+ project_path: ProjectPath,
+ is_dir: bool,
+ },
+ AbsPath {
+ path: PathBuf,
+ is_dir: bool,
+ },
}
impl ResolvedPath {
pub fn abs_path(&self) -> Option<&Path> {
match self {
- Self::AbsPath(path) => Some(path.as_path()),
+ Self::AbsPath { path, .. } => Some(path.as_path()),
_ => None,
}
}
pub fn project_path(&self) -> Option<&ProjectPath> {
match self {
- Self::ProjectPath(path) => Some(&path),
+ Self::ProjectPath { project_path, .. } => Some(&project_path),
_ => None,
}
}
+
+ pub fn is_file(&self) -> bool {
+ !self.is_dir()
+ }
+
+ pub fn is_dir(&self) -> bool {
+ match self {
+ Self::ProjectPath { is_dir, .. } => *is_dir,
+ Self::AbsPath { is_dir, .. } => *is_dir,
+ }
+ }
}
impl Item for Buffer {
@@ -37,11 +37,8 @@ pub enum TerminalKind {
/// SshCommand describes how to connect to a remote server
#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum SshCommand {
- /// DevServers give a string from the user
- DevServer(String),
- /// Direct ssh has a list of arguments to pass to ssh
- Direct(Vec<String>),
+pub struct SshCommand {
+ arguments: Vec<String>,
}
impl Project {
@@ -73,19 +70,12 @@ impl Project {
if let Some(args) = ssh_client.ssh_args() {
return Some((
ssh_client.connection_options().host.clone(),
- SshCommand::Direct(args),
+ SshCommand { arguments: args },
));
}
}
- let dev_server_project_id = self.dev_server_project_id()?;
- let projects_store = dev_server_projects::Store::global(cx).read(cx);
- let ssh_command = projects_store
- .dev_server_for_project(dev_server_project_id)?
- .ssh_connection_string
- .as_ref()?
- .to_string();
- Some(("".to_string(), SshCommand::DevServer(ssh_command)))
+ return None;
}
pub fn create_terminal(
@@ -399,14 +389,8 @@ pub fn wrap_for_ssh(
};
let shell_invocation = format!("sh -c {}", shlex::try_quote(&commands).unwrap());
- let (program, mut args) = match ssh_command {
- SshCommand::DevServer(ssh_command) => {
- let mut args = shlex::split(ssh_command).unwrap_or_default();
- let program = args.drain(0..1).next().unwrap_or("ssh".to_string());
- (program, args)
- }
- SshCommand::Direct(ssh_args) => ("ssh".to_string(), ssh_args.clone()),
- };
+ let program = "ssh".to_string();
+ let mut args = ssh_command.arguments.clone();
args.push("-t".to_string());
args.push(shell_invocation);
@@ -0,0 +1,416 @@
+use std::sync::Arc;
+
+use anyhow::{bail, Result};
+
+use async_trait::async_trait;
+use collections::BTreeMap;
+use gpui::{
+ AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
+ WeakModel,
+};
+use language::{LanguageName, LanguageRegistry, LanguageToolchainStore, Toolchain, ToolchainList};
+use rpc::{proto, AnyProtoClient, TypedEnvelope};
+use settings::WorktreeId;
+use util::ResultExt as _;
+
+use crate::worktree_store::WorktreeStore;
+
+pub struct ToolchainStore(ToolchainStoreInner);
+enum ToolchainStoreInner {
+ Local(Model<LocalToolchainStore>, #[allow(dead_code)] Subscription),
+ Remote(Model<RemoteToolchainStore>),
+}
+
+impl EventEmitter<ToolchainStoreEvent> for ToolchainStore {}
+impl ToolchainStore {
+ pub fn init(client: &AnyProtoClient) {
+ client.add_model_request_handler(Self::handle_activate_toolchain);
+ client.add_model_request_handler(Self::handle_list_toolchains);
+ client.add_model_request_handler(Self::handle_active_toolchain);
+ }
+
+ pub fn local(
+ languages: Arc<LanguageRegistry>,
+ worktree_store: Model<WorktreeStore>,
+ cx: &mut ModelContext<Self>,
+ ) -> Self {
+ let model = cx.new_model(|_| LocalToolchainStore {
+ languages,
+ worktree_store,
+ active_toolchains: Default::default(),
+ });
+ let subscription = cx.subscribe(&model, |_, _, e: &ToolchainStoreEvent, cx| {
+ cx.emit(e.clone())
+ });
+ Self(ToolchainStoreInner::Local(model, subscription))
+ }
+ pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut AppContext) -> Self {
+ Self(ToolchainStoreInner::Remote(
+ cx.new_model(|_| RemoteToolchainStore { client, project_id }),
+ ))
+ }
+ pub(crate) fn activate_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ toolchain: Toolchain,
+ cx: &mut AppContext,
+ ) -> Task<Option<()>> {
+ match &self.0 {
+ ToolchainStoreInner::Local(local, _) => local.update(cx, |this, cx| {
+ this.activate_toolchain(worktree_id, toolchain, cx)
+ }),
+ ToolchainStoreInner::Remote(remote) => {
+ remote
+ .read(cx)
+ .activate_toolchain(worktree_id, toolchain, cx)
+ }
+ }
+ }
+ pub(crate) fn list_toolchains(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &AppContext,
+ ) -> Task<Option<ToolchainList>> {
+ match &self.0 {
+ ToolchainStoreInner::Local(local, _) => {
+ local
+ .read(cx)
+ .list_toolchains(worktree_id, language_name, cx)
+ }
+ ToolchainStoreInner::Remote(remote) => {
+ remote
+ .read(cx)
+ .list_toolchains(worktree_id, language_name, cx)
+ }
+ }
+ }
+ pub(crate) fn active_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &AppContext,
+ ) -> Task<Option<Toolchain>> {
+ match &self.0 {
+ ToolchainStoreInner::Local(local, _) => {
+ local
+ .read(cx)
+ .active_toolchain(worktree_id, language_name, cx)
+ }
+ ToolchainStoreInner::Remote(remote) => {
+ remote
+ .read(cx)
+ .active_toolchain(worktree_id, language_name, cx)
+ }
+ }
+ }
+ async fn handle_activate_toolchain(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::ActivateToolchain>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::Ack> {
+ this.update(&mut cx, |this, cx| {
+ let language_name = LanguageName::from_proto(envelope.payload.language_name);
+ let Some(toolchain) = envelope.payload.toolchain else {
+ bail!("Missing `toolchain` in payload");
+ };
+ let toolchain = Toolchain {
+ name: toolchain.name.into(),
+ path: toolchain.path.into(),
+ language_name,
+ };
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ Ok(this.activate_toolchain(worktree_id, toolchain, cx))
+ })??
+ .await;
+ Ok(proto::Ack {})
+ }
+ async fn handle_active_toolchain(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::ActiveToolchain>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ActiveToolchainResponse> {
+ let toolchain = this
+ .update(&mut cx, |this, cx| {
+ let language_name = LanguageName::from_proto(envelope.payload.language_name);
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ this.active_toolchain(worktree_id, language_name, cx)
+ })?
+ .await;
+
+ Ok(proto::ActiveToolchainResponse {
+ toolchain: toolchain.map(|toolchain| proto::Toolchain {
+ name: toolchain.name.into(),
+ path: toolchain.path.into(),
+ }),
+ })
+ }
+
+ async fn handle_list_toolchains(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::ListToolchains>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ListToolchainsResponse> {
+ let toolchains = this
+ .update(&mut cx, |this, cx| {
+ let language_name = LanguageName::from_proto(envelope.payload.language_name);
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ this.list_toolchains(worktree_id, language_name, cx)
+ })?
+ .await;
+ let has_values = toolchains.is_some();
+ let groups = if let Some(toolchains) = &toolchains {
+ toolchains
+ .groups
+ .iter()
+ .filter_map(|group| {
+ Some(proto::ToolchainGroup {
+ start_index: u64::try_from(group.0).ok()?,
+ name: String::from(group.1.as_ref()),
+ })
+ })
+ .collect()
+ } else {
+ vec![]
+ };
+ let toolchains = if let Some(toolchains) = toolchains {
+ toolchains
+ .toolchains
+ .into_iter()
+ .map(|toolchain| proto::Toolchain {
+ name: toolchain.name.to_string(),
+ path: toolchain.path.to_string(),
+ })
+ .collect::<Vec<_>>()
+ } else {
+ vec![]
+ };
+
+ Ok(proto::ListToolchainsResponse {
+ has_values,
+ toolchains,
+ groups,
+ })
+ }
+ pub(crate) fn as_language_toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> {
+ match &self.0 {
+ ToolchainStoreInner::Local(local, _) => Arc::new(LocalStore(local.downgrade())),
+ ToolchainStoreInner::Remote(remote) => Arc::new(RemoteStore(remote.downgrade())),
+ }
+ }
+}
+
+struct LocalToolchainStore {
+ languages: Arc<LanguageRegistry>,
+ worktree_store: Model<WorktreeStore>,
+ active_toolchains: BTreeMap<(WorktreeId, LanguageName), Toolchain>,
+}
+
+#[async_trait(?Send)]
+impl language::LanguageToolchainStore for LocalStore {
+ async fn active_toolchain(
+ self: Arc<Self>,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Toolchain> {
+ self.0
+ .update(cx, |this, cx| {
+ this.active_toolchain(worktree_id, language_name, cx)
+ })
+ .ok()?
+ .await
+ }
+}
+
+#[async_trait(?Send)]
+impl language::LanguageToolchainStore for RemoteStore {
+ async fn active_toolchain(
+ self: Arc<Self>,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Toolchain> {
+ self.0
+ .update(cx, |this, cx| {
+ this.active_toolchain(worktree_id, language_name, cx)
+ })
+ .ok()?
+ .await
+ }
+}
+
+pub(crate) struct EmptyToolchainStore;
+#[async_trait(?Send)]
+impl language::LanguageToolchainStore for EmptyToolchainStore {
+ async fn active_toolchain(
+ self: Arc<Self>,
+ _: WorktreeId,
+ _: LanguageName,
+ _: &mut AsyncAppContext,
+ ) -> Option<Toolchain> {
+ None
+ }
+}
+struct LocalStore(WeakModel<LocalToolchainStore>);
+struct RemoteStore(WeakModel<RemoteToolchainStore>);
+
+#[derive(Clone)]
+pub(crate) enum ToolchainStoreEvent {
+ ToolchainActivated,
+}
+
+impl EventEmitter<ToolchainStoreEvent> for LocalToolchainStore {}
+
+impl LocalToolchainStore {
+ pub(crate) fn activate_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ toolchain: Toolchain,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Option<()>> {
+ cx.spawn(move |this, mut cx| async move {
+ this.update(&mut cx, |this, cx| {
+ this.active_toolchains.insert(
+ (worktree_id, toolchain.language_name.clone()),
+ toolchain.clone(),
+ );
+ cx.emit(ToolchainStoreEvent::ToolchainActivated);
+ })
+ .ok();
+ Some(())
+ })
+ }
+ pub(crate) fn list_toolchains(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &AppContext,
+ ) -> Task<Option<ToolchainList>> {
+ let registry = self.languages.clone();
+ let Some(root) = self
+ .worktree_store
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ else {
+ return Task::ready(None);
+ };
+ cx.spawn(|_| async move {
+ let language = registry.language_for_name(&language_name.0).await.ok()?;
+ let toolchains = language.toolchain_lister()?.list(root.to_path_buf()).await;
+ Some(toolchains)
+ })
+ }
+ pub(crate) fn active_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ _: &AppContext,
+ ) -> Task<Option<Toolchain>> {
+ Task::ready(
+ self.active_toolchains
+ .get(&(worktree_id, language_name))
+ .cloned(),
+ )
+ }
+}
+struct RemoteToolchainStore {
+ client: AnyProtoClient,
+ project_id: u64,
+}
+
+impl RemoteToolchainStore {
+ pub(crate) fn activate_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ toolchain: Toolchain,
+ cx: &AppContext,
+ ) -> Task<Option<()>> {
+ let project_id = self.project_id;
+ let client = self.client.clone();
+ cx.spawn(move |_| async move {
+ let _ = client
+ .request(proto::ActivateToolchain {
+ project_id,
+ worktree_id: worktree_id.to_proto(),
+ language_name: toolchain.language_name.into(),
+ toolchain: Some(proto::Toolchain {
+ name: toolchain.name.into(),
+ path: toolchain.path.into(),
+ }),
+ })
+ .await
+ .log_err()?;
+ Some(())
+ })
+ }
+ pub(crate) fn list_toolchains(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &AppContext,
+ ) -> Task<Option<ToolchainList>> {
+ let project_id = self.project_id;
+ let client = self.client.clone();
+ cx.spawn(move |_| async move {
+ let response = client
+ .request(proto::ListToolchains {
+ project_id,
+ worktree_id: worktree_id.to_proto(),
+ language_name: language_name.clone().into(),
+ })
+ .await
+ .log_err()?;
+ if !response.has_values {
+ return None;
+ }
+ let toolchains = response
+ .toolchains
+ .into_iter()
+ .map(|toolchain| Toolchain {
+ language_name: language_name.clone(),
+ name: toolchain.name.into(),
+ path: toolchain.path.into(),
+ })
+ .collect();
+ let groups = response
+ .groups
+ .into_iter()
+ .filter_map(|group| {
+ Some((usize::try_from(group.start_index).ok()?, group.name.into()))
+ })
+ .collect();
+ Some(ToolchainList {
+ toolchains,
+ default: None,
+ groups,
+ })
+ })
+ }
+ pub(crate) fn active_toolchain(
+ &self,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: &AppContext,
+ ) -> Task<Option<Toolchain>> {
+ let project_id = self.project_id;
+ let client = self.client.clone();
+ cx.spawn(move |_| async move {
+ let response = client
+ .request(proto::ActiveToolchain {
+ project_id,
+ worktree_id: worktree_id.to_proto(),
+ language_name: language_name.clone().into(),
+ })
+ .await
+ .log_err()?;
+
+ response.toolchain.map(|toolchain| Toolchain {
+ language_name: language_name.clone(),
+ name: toolchain.name.into(),
+ path: toolchain.path.into(),
+ })
+ })
+ }
+}
@@ -1,11 +1,9 @@
use std::{
- cell::RefCell,
path::{Path, PathBuf},
sync::{atomic::AtomicUsize, Arc},
};
use anyhow::{anyhow, Context as _, Result};
-use client::DevServerProjectId;
use collections::{HashMap, HashSet};
use fs::Fs;
use futures::{
@@ -41,7 +39,6 @@ enum WorktreeStoreState {
fs: Arc<dyn Fs>,
},
Remote {
- dev_server_project_id: Option<DevServerProjectId>,
upstream_client: AnyProtoClient,
upstream_project_id: u64,
},
@@ -76,6 +73,8 @@ impl WorktreeStore {
client.add_model_request_handler(Self::handle_copy_project_entry);
client.add_model_request_handler(Self::handle_delete_project_entry);
client.add_model_request_handler(Self::handle_expand_project_entry);
+ client.add_model_request_handler(Self::handle_git_branches);
+ client.add_model_request_handler(Self::handle_update_branch);
}
pub fn local(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
@@ -94,7 +93,6 @@ impl WorktreeStore {
retain_worktrees: bool,
upstream_client: AnyProtoClient,
upstream_project_id: u64,
- dev_server_project_id: Option<DevServerProjectId>,
) -> Self {
Self {
next_entry_id: Default::default(),
@@ -106,7 +104,6 @@ impl WorktreeStore {
state: WorktreeStoreState::Remote {
upstream_client,
upstream_project_id,
- dev_server_project_id,
},
}
}
@@ -132,6 +129,13 @@ impl WorktreeStore {
.find(|worktree| worktree.read(cx).id() == id)
}
+ pub fn current_branch(&self, repository: ProjectPath, cx: &AppContext) -> Option<Arc<str>> {
+ self.worktree_for_id(repository.worktree_id, cx)?
+ .read(cx)
+ .git_entry(repository.path)?
+ .branch()
+ }
+
pub fn worktree_for_entry(
&self,
entry_id: ProjectEntryId,
@@ -196,18 +200,9 @@ impl WorktreeStore {
if !self.loading_worktrees.contains_key(&path) {
let task = match &self.state {
WorktreeStoreState::Remote {
- upstream_client,
- dev_server_project_id,
- ..
+ upstream_client, ..
} => {
- if let Some(dev_server_project_id) = dev_server_project_id {
- self.create_dev_server_worktree(
- upstream_client.clone(),
- *dev_server_project_id,
- abs_path,
- cx,
- )
- } else if upstream_client.is_via_collab() {
+ if upstream_client.is_via_collab() {
Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
} else {
self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx)
@@ -322,51 +317,6 @@ impl WorktreeStore {
})
}
- fn create_dev_server_worktree(
- &mut self,
- client: AnyProtoClient,
- dev_server_project_id: DevServerProjectId,
- abs_path: impl AsRef<Path>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
- let path: Arc<Path> = abs_path.as_ref().into();
- let mut paths: Vec<String> = self
- .visible_worktrees(cx)
- .map(|worktree| worktree.read(cx).abs_path().to_string_lossy().to_string())
- .collect();
- paths.push(path.to_string_lossy().to_string());
- let request = client.request(proto::UpdateDevServerProject {
- dev_server_project_id: dev_server_project_id.0,
- paths,
- });
-
- let abs_path = abs_path.as_ref().to_path_buf();
- cx.spawn(move |project, cx| async move {
- let (tx, rx) = futures::channel::oneshot::channel();
- let tx = RefCell::new(Some(tx));
- let Some(project) = project.upgrade() else {
- return Err(anyhow!("project dropped"))?;
- };
- let observer = cx.update(|cx| {
- cx.observe(&project, move |project, cx| {
- let abs_path = abs_path.clone();
- project.update(cx, |project, cx| {
- if let Some((worktree, _)) = project.find_worktree(&abs_path, cx) {
- if let Some(tx) = tx.borrow_mut().take() {
- tx.send(worktree).ok();
- }
- }
- })
- })
- })?;
-
- request.await?;
- let worktree = rx.await.map_err(|e| anyhow!(e))?;
- drop(observer);
- Ok(worktree)
- })
- }
-
pub fn add(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
let worktree_id = worktree.read(cx).id();
debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id));
@@ -895,6 +845,131 @@ impl WorktreeStore {
Ok(())
}
+ pub fn branches(
+ &self,
+ project_path: ProjectPath,
+ cx: &AppContext,
+ ) -> Task<Result<Vec<git::repository::Branch>>> {
+ let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
+ return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
+ };
+
+ match worktree.read(cx) {
+ Worktree::Local(local_worktree) => {
+ let branches = util::maybe!({
+ let worktree_error = |error| {
+ format!(
+ "{} for worktree {}",
+ error,
+ local_worktree.abs_path().to_string_lossy()
+ )
+ };
+
+ let entry = local_worktree
+ .git_entry(project_path.path)
+ .with_context(|| worktree_error("No git entry found"))?;
+
+ let repo = local_worktree
+ .get_local_repo(&entry)
+ .with_context(|| worktree_error("No repository found"))?
+ .repo()
+ .clone();
+
+ repo.branches()
+ });
+
+ Task::ready(branches)
+ }
+ Worktree::Remote(remote_worktree) => {
+ let request = remote_worktree.client().request(proto::GitBranches {
+ project_id: remote_worktree.project_id(),
+ repository: Some(proto::ProjectPath {
+ worktree_id: project_path.worktree_id.to_proto(),
+ path: project_path.path.to_string_lossy().to_string(), // Root path
+ }),
+ });
+
+ cx.background_executor().spawn(async move {
+ let response = request.await?;
+
+ let branches = response
+ .branches
+ .into_iter()
+ .map(|proto_branch| git::repository::Branch {
+ is_head: proto_branch.is_head,
+ name: proto_branch.name.into(),
+ unix_timestamp: proto_branch
+ .unix_timestamp
+ .map(|timestamp| timestamp as i64),
+ })
+ .collect();
+
+ Ok(branches)
+ })
+ }
+ }
+ }
+
+ pub fn update_or_create_branch(
+ &self,
+ repository: ProjectPath,
+ new_branch: String,
+ cx: &AppContext,
+ ) -> Task<Result<()>> {
+ let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else {
+ return Task::ready(Err(anyhow!("No worktree found for ProjectPath")));
+ };
+
+ match worktree.read(cx) {
+ Worktree::Local(local_worktree) => {
+ let result = util::maybe!({
+ let worktree_error = |error| {
+ format!(
+ "{} for worktree {}",
+ error,
+ local_worktree.abs_path().to_string_lossy()
+ )
+ };
+
+ let entry = local_worktree
+ .git_entry(repository.path)
+ .with_context(|| worktree_error("No git entry found"))?;
+
+ let repo = local_worktree
+ .get_local_repo(&entry)
+ .with_context(|| worktree_error("No repository found"))?
+ .repo()
+ .clone();
+
+ if !repo.branch_exits(&new_branch)? {
+ repo.create_branch(&new_branch)?;
+ }
+
+ repo.change_branch(&new_branch)?;
+
+ Ok(())
+ });
+
+ Task::ready(result)
+ }
+ Worktree::Remote(remote_worktree) => {
+ let request = remote_worktree.client().request(proto::UpdateGitBranch {
+ project_id: remote_worktree.project_id(),
+ repository: Some(proto::ProjectPath {
+ worktree_id: repository.worktree_id.to_proto(),
+ path: repository.path.to_string_lossy().to_string(), // Root path
+ }),
+ branch_name: new_branch,
+ });
+
+ cx.background_executor().spawn(async move {
+ request.await?;
+ Ok(())
+ })
+ }
+ }
+ }
+
async fn filter_paths(
fs: &Arc<dyn Fs>,
mut input: Receiver<MatchingEntry>,
@@ -976,6 +1051,61 @@ impl WorktreeStore {
.ok_or_else(|| anyhow!("invalid request"))?;
Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
}
+
+ pub async fn handle_git_branches(
+ this: Model<Self>,
+ branches: TypedEnvelope<proto::GitBranches>,
+ cx: AsyncAppContext,
+ ) -> Result<proto::GitBranchesResponse> {
+ let project_path = branches
+ .payload
+ .repository
+ .clone()
+ .context("Invalid GitBranches call")?;
+ let project_path = ProjectPath {
+ worktree_id: WorktreeId::from_proto(project_path.worktree_id),
+ path: Path::new(&project_path.path).into(),
+ };
+
+ let branches = this
+ .read_with(&cx, |this, cx| this.branches(project_path, cx))?
+ .await?;
+
+ Ok(proto::GitBranchesResponse {
+ branches: branches
+ .into_iter()
+ .map(|branch| proto::Branch {
+ is_head: branch.is_head,
+ name: branch.name.to_string(),
+ unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64),
+ })
+ .collect(),
+ })
+ }
+
+ pub async fn handle_update_branch(
+ this: Model<Self>,
+ update_branch: TypedEnvelope<proto::UpdateGitBranch>,
+ cx: AsyncAppContext,
+ ) -> Result<proto::Ack> {
+ let project_path = update_branch
+ .payload
+ .repository
+ .clone()
+ .context("Invalid GitBranches call")?;
+ let project_path = ProjectPath {
+ worktree_id: WorktreeId::from_proto(project_path.worktree_id),
+ path: Path::new(&project_path.path).into(),
+ };
+ let new_branch = update_branch.payload.branch_name;
+
+ this.read_with(&cx, |this, cx| {
+ this.update_or_create_branch(project_path, new_branch, cx)
+ })?
+ .await?;
+
+ Ok(proto::Ack {})
+ }
}
#[derive(Clone, Debug)]
@@ -30,6 +30,7 @@ serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
settings.workspace = true
+smallvec.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
@@ -16,12 +16,13 @@ use anyhow::{anyhow, Context as _, Result};
use collections::{hash_map, BTreeSet, HashMap};
use git::repository::GitFileStatus;
use gpui::{
- actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement,
- AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, DragMoveEvent,
- EventEmitter, ExternalPaths, FocusHandle, FocusableView, InteractiveElement, KeyContext,
- ListHorizontalSizingBehavior, ListSizingBehavior, Model, MouseButton, MouseDownEvent,
- ParentElement, Pixels, Point, PromptLevel, Render, Stateful, Styled, Subscription, Task,
- UniformListScrollHandle, View, ViewContext, VisualContext as _, WeakView, WindowContext,
+ actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action,
+ AnyElement, AppContext, AssetSource, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent,
+ Div, DragMoveEvent, EventEmitter, ExternalPaths, FocusHandle, FocusableView,
+ InteractiveElement, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, Model,
+ MouseButton, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render, Stateful,
+ Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext as _,
+ WeakView, WindowContext,
};
use indexmap::IndexMap;
use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev};
@@ -31,6 +32,7 @@ use project::{
};
use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings};
use serde::{Deserialize, Serialize};
+use smallvec::SmallVec;
use std::{
cell::OnceCell,
collections::HashSet,
@@ -41,7 +43,10 @@ use std::{
time::Duration,
};
use theme::ThemeSettings;
-use ui::{prelude::*, v_flex, ContextMenu, Icon, KeyBinding, Label, ListItem, Tooltip};
+use ui::{
+ prelude::*, v_flex, ContextMenu, Icon, IndentGuideColors, IndentGuideLayout, KeyBinding, Label,
+ ListItem, Tooltip,
+};
use util::{maybe, ResultExt, TryFutureExt};
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
@@ -89,12 +94,18 @@ pub struct ProjectPanel {
struct EditState {
worktree_id: WorktreeId,
entry_id: ProjectEntryId,
- is_new_entry: bool,
+ leaf_entry_id: Option<ProjectEntryId>,
is_dir: bool,
depth: usize,
processing_filename: Option<String>,
}
+impl EditState {
+ fn is_new_entry(&self) -> bool {
+ self.leaf_entry_id.is_none()
+ }
+}
+
#[derive(Clone, Debug)]
enum ClipboardEntry {
Copied(BTreeSet<SelectedEntry>),
@@ -492,13 +503,14 @@ impl ProjectPanel {
if let Some((worktree, entry)) = self.selected_sub_entry(cx) {
let auto_fold_dirs = ProjectPanelSettings::get_global(cx).auto_fold_dirs;
+ let worktree = worktree.read(cx);
let is_root = Some(entry) == worktree.root_entry();
let is_dir = entry.is_dir();
let is_foldable = auto_fold_dirs && self.is_foldable(entry, worktree);
let is_unfoldable = auto_fold_dirs && self.is_unfoldable(entry, worktree);
let worktree_id = worktree.id();
let is_read_only = project.is_read_only(cx);
- let is_remote = project.is_via_collab() && project.dev_server_project_id().is_none();
+ let is_remote = project.is_via_collab();
let is_local = project.is_local();
let context_menu = ContextMenu::build(cx, |menu, cx| {
@@ -654,42 +666,52 @@ impl ProjectPanel {
}
fn collapse_selected_entry(&mut self, _: &CollapseSelectedEntry, cx: &mut ViewContext<Self>) {
- if let Some((worktree, mut entry)) = self.selected_entry(cx) {
- if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) {
- if folded_ancestors.current_ancestor_depth + 1
- < folded_ancestors.max_ancestor_depth()
- {
- folded_ancestors.current_ancestor_depth += 1;
- cx.notify();
- return;
- }
+ let Some((worktree, entry)) = self.selected_entry_handle(cx) else {
+ return;
+ };
+ self.collapse_entry(entry.clone(), worktree, cx)
+ }
+
+ fn collapse_entry(
+ &mut self,
+ entry: Entry,
+ worktree: Model<Worktree>,
+ cx: &mut ViewContext<Self>,
+ ) {
+ let worktree = worktree.read(cx);
+ if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) {
+ if folded_ancestors.current_ancestor_depth + 1 < folded_ancestors.max_ancestor_depth() {
+ folded_ancestors.current_ancestor_depth += 1;
+ cx.notify();
+ return;
}
- let worktree_id = worktree.id();
- let expanded_dir_ids =
- if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
- expanded_dir_ids
- } else {
- return;
- };
+ }
+ let worktree_id = worktree.id();
+ let expanded_dir_ids =
+ if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
+ expanded_dir_ids
+ } else {
+ return;
+ };
- loop {
- let entry_id = entry.id;
- match expanded_dir_ids.binary_search(&entry_id) {
- Ok(ix) => {
- expanded_dir_ids.remove(ix);
- self.update_visible_entries(Some((worktree_id, entry_id)), cx);
- cx.notify();
+ let mut entry = &entry;
+ loop {
+ let entry_id = entry.id;
+ match expanded_dir_ids.binary_search(&entry_id) {
+ Ok(ix) => {
+ expanded_dir_ids.remove(ix);
+ self.update_visible_entries(Some((worktree_id, entry_id)), cx);
+ cx.notify();
+ break;
+ }
+ Err(_) => {
+ if let Some(parent_entry) =
+ entry.path.parent().and_then(|p| worktree.entry_for_path(p))
+ {
+ entry = parent_entry;
+ } else {
break;
}
- Err(_) => {
- if let Some(parent_entry) =
- entry.path.parent().and_then(|p| worktree.entry_for_path(p))
- {
- entry = parent_entry;
- } else {
- break;
- }
- }
}
}
}
@@ -726,6 +748,19 @@ impl ProjectPanel {
}
fn select_prev(&mut self, _: &SelectPrev, cx: &mut ViewContext<Self>) {
+ if let Some(edit_state) = &self.edit_state {
+ if edit_state.processing_filename.is_none() {
+ self.filename_editor.update(cx, |editor, cx| {
+ editor.move_to_beginning_of_line(
+ &editor::actions::MoveToBeginningOfLine {
+ stop_at_soft_wraps: false,
+ },
+ cx,
+ );
+ });
+ return;
+ }
+ }
if let Some(selection) = self.selection {
let (mut worktree_ix, mut entry_ix, _) =
self.index_for_selection(selection).unwrap_or_default();
@@ -795,10 +830,10 @@ impl ProjectPanel {
cx.focus(&self.focus_handle);
let worktree_id = edit_state.worktree_id;
- let is_new_entry = edit_state.is_new_entry;
+ let is_new_entry = edit_state.is_new_entry();
let filename = self.filename_editor.read(cx).text(cx);
edit_state.is_dir = edit_state.is_dir
- || (edit_state.is_new_entry && filename.ends_with(std::path::MAIN_SEPARATOR));
+ || (edit_state.is_new_entry() && filename.ends_with(std::path::MAIN_SEPARATOR));
let is_dir = edit_state.is_dir;
let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?;
let entry = worktree.read(cx).entry_for_id(edit_state.entry_id)?.clone();
@@ -829,7 +864,6 @@ impl ProjectPanel {
if path_already_exists(new_path.as_path()) {
return None;
}
-
edited_entry_id = entry.id;
edit_task = self.project.update(cx, |project, cx| {
project.rename_entry(entry.id, new_path.as_path(), cx)
@@ -948,6 +982,7 @@ impl ProjectPanel {
}) = self.selection
{
let directory_id;
+ let new_entry_id = self.resolve_entry(entry_id);
if let Some((worktree, expanded_dir_ids)) = self
.project
.read(cx)
@@ -955,7 +990,7 @@ impl ProjectPanel {
.zip(self.expanded_dir_ids.get_mut(&worktree_id))
{
let worktree = worktree.read(cx);
- if let Some(mut entry) = worktree.entry_for_id(entry_id) {
+ if let Some(mut entry) = worktree.entry_for_id(new_entry_id) {
loop {
if entry.is_dir() {
if let Err(ix) = expanded_dir_ids.binary_search(&entry.id) {
@@ -983,7 +1018,7 @@ impl ProjectPanel {
self.edit_state = Some(EditState {
worktree_id,
entry_id: directory_id,
- is_new_entry: true,
+ leaf_entry_id: None,
is_dir,
processing_filename: None,
depth: 0,
@@ -1017,12 +1052,12 @@ impl ProjectPanel {
}) = self.selection
{
if let Some(worktree) = self.project.read(cx).worktree_for_id(worktree_id, cx) {
- let entry_id = self.unflatten_entry_id(entry_id);
- if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) {
+ let sub_entry_id = self.unflatten_entry_id(entry_id);
+ if let Some(entry) = worktree.read(cx).entry_for_id(sub_entry_id) {
self.edit_state = Some(EditState {
worktree_id,
- entry_id,
- is_new_entry: false,
+ entry_id: sub_entry_id,
+ leaf_entry_id: Some(entry_id),
is_dir: entry.is_dir(),
processing_filename: None,
depth: 0,
@@ -1196,6 +1231,19 @@ impl ProjectPanel {
}
fn select_next(&mut self, _: &SelectNext, cx: &mut ViewContext<Self>) {
+ if let Some(edit_state) = &self.edit_state {
+ if edit_state.processing_filename.is_none() {
+ self.filename_editor.update(cx, |editor, cx| {
+ editor.move_to_end_of_line(
+ &editor::actions::MoveToEndOfLine {
+ stop_at_soft_wraps: false,
+ },
+ cx,
+ );
+ });
+ return;
+ }
+ }
if let Some(selection) = self.selection {
let (mut worktree_ix, mut entry_ix, _) =
self.index_for_selection(selection).unwrap_or_default();
@@ -1232,6 +1280,7 @@ impl ProjectPanel {
fn select_parent(&mut self, _: &SelectParent, cx: &mut ViewContext<Self>) {
if let Some((worktree, entry)) = self.selected_sub_entry(cx) {
if let Some(parent) = entry.path.parent() {
+ let worktree = worktree.read(cx);
if let Some(parent_entry) = worktree.entry_for_path(parent) {
self.selection = Some(SelectedEntry {
worktree_id: worktree.id(),
@@ -1365,7 +1414,6 @@ impl ProjectPanel {
.clipboard
.as_ref()
.filter(|clipboard| !clipboard.items().is_empty())?;
-
enum PasteTask {
Rename(Task<Result<CreatedEntry>>),
Copy(Task<Result<Option<Entry>>>),
@@ -1375,7 +1423,7 @@ impl ProjectPanel {
let clip_is_cut = clipboard_entries.is_cut();
for clipboard_entry in clipboard_entries.items() {
let new_path =
- self.create_paste_path(clipboard_entry, self.selected_entry_handle(cx)?, cx)?;
+ self.create_paste_path(clipboard_entry, self.selected_sub_entry(cx)?, cx)?;
let clip_entry_id = clipboard_entry.entry_id;
let is_same_worktree = clipboard_entry.worktree_id == worktree_id;
let relative_worktree_source_path = if !is_same_worktree {
@@ -1517,7 +1565,7 @@ impl ProjectPanel {
fn reveal_in_finder(&mut self, _: &RevealInFileManager, cx: &mut ViewContext<Self>) {
if let Some((worktree, entry)) = self.selected_sub_entry(cx) {
- cx.reveal_path(&worktree.abs_path().join(&entry.path));
+ cx.reveal_path(&worktree.read(cx).abs_path().join(&entry.path));
}
}
@@ -1532,7 +1580,7 @@ impl ProjectPanel {
if let Some((worktree, entry)) = self.selected_sub_entry(cx) {
let abs_path = match &entry.canonical_path {
Some(canonical_path) => Some(canonical_path.to_path_buf()),
- None => worktree.absolutize(&entry.path).ok(),
+ None => worktree.read(cx).absolutize(&entry.path).ok(),
};
let working_directory = if entry.is_dir() {
@@ -1555,7 +1603,7 @@ impl ProjectPanel {
if entry.is_dir() {
let include_root = self.project.read(cx).visible_worktrees(cx).count() > 1;
let dir_path = if include_root {
- let mut full_path = PathBuf::from(worktree.root_name());
+ let mut full_path = PathBuf::from(worktree.read(cx).root_name());
full_path.push(&entry.path);
Arc::from(full_path)
} else {
@@ -1689,6 +1737,8 @@ impl ProjectPanel {
}
}
+ /// Finds the currently selected subentry for a given leaf entry id. If a given entry
+ /// has no ancestors, the project entry ID that's passed in is returned as-is.
fn resolve_entry(&self, id: ProjectEntryId) -> ProjectEntryId {
self.ancestors
.get(&id)
@@ -1701,6 +1751,7 @@ impl ProjectPanel {
.copied()
.unwrap_or(id)
}
+
pub fn selected_entry<'a>(
&self,
cx: &'a AppContext,
@@ -1714,12 +1765,12 @@ impl ProjectPanel {
fn selected_sub_entry<'a>(
&self,
cx: &'a AppContext,
- ) -> Option<(&'a Worktree, &'a project::Entry)> {
+ ) -> Option<(Model<Worktree>, &'a project::Entry)> {
let (worktree, mut entry) = self.selected_entry_handle(cx)?;
- let worktree = worktree.read(cx);
let resolved_id = self.resolve_entry(entry.id);
if resolved_id != entry.id {
+ let worktree = worktree.read(cx);
entry = worktree.entry_for_id(resolved_id)?;
}
Some((worktree, entry))
@@ -1789,7 +1840,7 @@ impl ProjectPanel {
let mut new_entry_parent_id = None;
let mut new_entry_kind = EntryKind::Dir;
if let Some(edit_state) = &self.edit_state {
- if edit_state.worktree_id == worktree_id && edit_state.is_new_entry {
+ if edit_state.worktree_id == worktree_id && edit_state.is_new_entry() {
new_entry_parent_id = Some(edit_state.entry_id);
new_entry_kind = if edit_state.is_dir {
EntryKind::Dir
@@ -1843,7 +1894,19 @@ impl ProjectPanel {
}
auto_folded_ancestors.clear();
visible_worktree_entries.push(entry.clone());
- if Some(entry.id) == new_entry_parent_id {
+ let precedes_new_entry = if let Some(new_entry_id) = new_entry_parent_id {
+ entry.id == new_entry_id || {
+ self.ancestors.get(&entry.id).map_or(false, |entries| {
+ entries
+ .ancestors
+ .iter()
+ .any(|entry_id| *entry_id == new_entry_id)
+ })
+ }
+ } else {
+ false
+ };
+ if precedes_new_entry {
visible_worktree_entries.push(Entry {
id: NEW_ENTRY_ID,
kind: new_entry_kind,
@@ -2118,6 +2181,74 @@ impl ProjectPanel {
}
}
+ fn index_for_entry(
+ &self,
+ entry_id: ProjectEntryId,
+ worktree_id: WorktreeId,
+ ) -> Option<(usize, usize, usize)> {
+ let mut worktree_ix = 0;
+ let mut total_ix = 0;
+ for (current_worktree_id, visible_worktree_entries, _) in &self.visible_entries {
+ if worktree_id != *current_worktree_id {
+ total_ix += visible_worktree_entries.len();
+ worktree_ix += 1;
+ continue;
+ }
+
+ return visible_worktree_entries
+ .iter()
+ .enumerate()
+ .find(|(_, entry)| entry.id == entry_id)
+ .map(|(ix, _)| (worktree_ix, ix, total_ix + ix));
+ }
+ None
+ }
+
+ fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, &Entry)> {
+ let mut offset = 0;
+ for (worktree_id, visible_worktree_entries, _) in &self.visible_entries {
+ if visible_worktree_entries.len() > offset + index {
+ return visible_worktree_entries
+ .get(index)
+ .map(|entry| (*worktree_id, entry));
+ }
+ offset += visible_worktree_entries.len();
+ }
+ None
+ }
+
+ fn iter_visible_entries(
+ &self,
+ range: Range<usize>,
+ cx: &mut ViewContext<ProjectPanel>,
+ mut callback: impl FnMut(&Entry, &HashSet<Arc<Path>>, &mut ViewContext<ProjectPanel>),
+ ) {
+ let mut ix = 0;
+ for (_, visible_worktree_entries, entries_paths) in &self.visible_entries {
+ if ix >= range.end {
+ return;
+ }
+
+ if ix + visible_worktree_entries.len() <= range.start {
+ ix += visible_worktree_entries.len();
+ continue;
+ }
+
+ let end_ix = range.end.min(ix + visible_worktree_entries.len());
+ let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
+ let entries = entries_paths.get_or_init(|| {
+ visible_worktree_entries
+ .iter()
+ .map(|e| (e.path.clone()))
+ .collect()
+ });
+ for entry in visible_worktree_entries[entry_range].iter() {
+ callback(entry, entries, cx);
+ }
+ ix = end_ix;
+ }
+ }
+
fn for_each_visible_entry(
&self,
range: Range<usize>,
@@ -2225,7 +2356,7 @@ impl ProjectPanel {
};
if let Some(edit_state) = &self.edit_state {
- let is_edited_entry = if edit_state.is_new_entry {
+ let is_edited_entry = if edit_state.is_new_entry() {
entry.id == NEW_ENTRY_ID
} else {
entry.id == edit_state.entry_id
@@ -2243,10 +2374,41 @@ impl ProjectPanel {
if is_edited_entry {
if let Some(processing_filename) = &edit_state.processing_filename {
details.is_processing = true;
- details.filename.clear();
- details.filename.push_str(processing_filename);
+ if let Some(ancestors) = edit_state
+ .leaf_entry_id
+ .and_then(|entry| self.ancestors.get(&entry))
+ {
+ let position = ancestors.ancestors.iter().position(|entry_id| *entry_id == edit_state.entry_id).expect("Edited sub-entry should be an ancestor of selected leaf entry") + 1;
+ let all_components = ancestors.ancestors.len();
+
+ let prefix_components = all_components - position;
+ let suffix_components = position.checked_sub(1);
+ let mut previous_components =
+ Path::new(&details.filename).components();
+ let mut new_path = previous_components
+ .by_ref()
+ .take(prefix_components)
+ .collect::<PathBuf>();
+ if let Some(last_component) =
+ Path::new(processing_filename).components().last()
+ {
+ new_path.push(last_component);
+ previous_components.next();
+ }
+
+ if let Some(_) = suffix_components {
+ new_path.push(previous_components);
+ }
+ if let Some(str) = new_path.to_str() {
+ details.filename.clear();
+ details.filename.push_str(str);
+ }
+ } else {
+ details.filename.clear();
+ details.filename.push_str(processing_filename);
+ }
} else {
- if edit_state.is_new_entry {
+ if edit_state.is_new_entry() {
details.filename.clear();
}
details.is_editing = true;
@@ -2436,9 +2598,7 @@ impl ProjectPanel {
h_flex().h_6().w_full().child(editor.clone())
} else {
h_flex().h_6().map(|mut this| {
- if let Some(folded_ancestors) =
- is_active.then(|| self.ancestors.get(&entry_id)).flatten()
- {
+ if let Some(folded_ancestors) = self.ancestors.get(&entry_id) {
let components = Path::new(&file_name)
.components()
.map(|comp| {
@@ -2447,6 +2607,7 @@ impl ProjectPanel {
comp_str
})
.collect::<Vec<_>>();
+
let components_len = components.len();
let active_index = components_len
- 1
@@ -2482,9 +2643,10 @@ impl ProjectPanel {
Label::new(component)
.single_line()
.color(filename_text_color)
- .when(index == active_index, |this| {
- this.underline(true)
- }),
+ .when(
+ is_active && index == active_index,
+ |this| this.underline(true),
+ ),
);
this = this.child(label);
@@ -2801,6 +2963,70 @@ impl ProjectPanel {
cx.notify();
}
}
+
+ fn find_active_indent_guide(
+ &self,
+ indent_guides: &[IndentGuideLayout],
+ cx: &AppContext,
+ ) -> Option<usize> {
+ let (worktree, entry) = self.selected_entry(cx)?;
+
+ // Find the parent entry of the indent guide, this will either be the
+ // expanded folder we have selected, or the parent of the currently
+ // selected file/collapsed directory
+ let mut entry = entry;
+ loop {
+ let is_expanded_dir = entry.is_dir()
+ && self
+ .expanded_dir_ids
+ .get(&worktree.id())
+ .map(|ids| ids.binary_search(&entry.id).is_ok())
+ .unwrap_or(false);
+ if is_expanded_dir {
+ break;
+ }
+ entry = worktree.entry_for_path(&entry.path.parent()?)?;
+ }
+
+ let (active_indent_range, depth) = {
+ let (worktree_ix, child_offset, ix) = self.index_for_entry(entry.id, worktree.id())?;
+ let child_paths = &self.visible_entries[worktree_ix].1;
+ let mut child_count = 0;
+ let depth = entry.path.ancestors().count();
+ while let Some(entry) = child_paths.get(child_offset + child_count + 1) {
+ if entry.path.ancestors().count() <= depth {
+ break;
+ }
+ child_count += 1;
+ }
+
+ let start = ix + 1;
+ let end = start + child_count;
+
+ let (_, entries, paths) = &self.visible_entries[worktree_ix];
+ let visible_worktree_entries =
+ paths.get_or_init(|| entries.iter().map(|e| (e.path.clone())).collect());
+
+ // Calculate the actual depth of the entry, taking into account that directories can be auto-folded.
+ let (depth, _) = Self::calculate_depth_and_difference(entry, visible_worktree_entries);
+ (start..end, depth)
+ };
+
+ let candidates = indent_guides
+ .iter()
+ .enumerate()
+ .filter(|(_, indent_guide)| indent_guide.offset.x == depth);
+
+ for (i, indent) in candidates {
+ // Find matches that are either an exact match, partially on screen, or inside the enclosing indent
+ if active_indent_range.start <= indent.offset.y + indent.length
+ && indent.offset.y <= active_indent_range.end
+ {
+ return Some(i);
+ }
+ }
+ None
+ }
}
fn item_width_estimate(depth: usize, item_text_chars: usize, is_symlink: bool) -> usize {
@@ -2816,6 +3042,8 @@ impl Render for ProjectPanel {
fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> impl IntoElement {
let has_worktree = !self.visible_entries.is_empty();
let project = self.project.read(cx);
+ let indent_size = ProjectPanelSettings::get_global(cx).indent_size;
+ let indent_guides = ProjectPanelSettings::get_global(cx).indent_guides;
let is_local = project.is_local();
if has_worktree {
@@ -2919,6 +3147,103 @@ impl Render for ProjectPanel {
items
}
})
+ .when(indent_guides, |list| {
+ list.with_decoration(
+ ui::indent_guides(
+ cx.view().clone(),
+ px(indent_size),
+ IndentGuideColors::panel(cx),
+ |this, range, cx| {
+ let mut items =
+ SmallVec::with_capacity(range.end - range.start);
+ this.iter_visible_entries(range, cx, |entry, entries, _| {
+ let (depth, _) =
+ Self::calculate_depth_and_difference(entry, entries);
+ items.push(depth);
+ });
+ items
+ },
+ )
+ .on_click(cx.listener(
+ |this, active_indent_guide: &IndentGuideLayout, cx| {
+ if cx.modifiers().secondary() {
+ let ix = active_indent_guide.offset.y;
+ let Some((target_entry, worktree)) = maybe!({
+ let (worktree_id, entry) = this.entry_at_index(ix)?;
+ let worktree = this
+ .project
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)?;
+ let target_entry = worktree
+ .read(cx)
+ .entry_for_path(&entry.path.parent()?)?;
+ Some((target_entry, worktree))
+ }) else {
+ return;
+ };
+
+ this.collapse_entry(target_entry.clone(), worktree, cx);
+ }
+ },
+ ))
+ .with_render_fn(
+ cx.view().clone(),
+ move |this, params, cx| {
+ const LEFT_OFFSET: f32 = 14.;
+ const PADDING_Y: f32 = 4.;
+ const HITBOX_OVERDRAW: f32 = 3.;
+
+ let active_indent_guide_index =
+ this.find_active_indent_guide(¶ms.indent_guides, cx);
+
+ let indent_size = params.indent_size;
+ let item_height = params.item_height;
+
+ params
+ .indent_guides
+ .into_iter()
+ .enumerate()
+ .map(|(idx, layout)| {
+ let offset = if layout.continues_offscreen {
+ px(0.)
+ } else {
+ px(PADDING_Y)
+ };
+ let bounds = Bounds::new(
+ point(
+ px(layout.offset.x as f32) * indent_size
+ + px(LEFT_OFFSET),
+ px(layout.offset.y as f32) * item_height
+ + offset,
+ ),
+ size(
+ px(1.),
+ px(layout.length as f32) * item_height
+ - px(offset.0 * 2.),
+ ),
+ );
+ ui::RenderedIndentGuide {
+ bounds,
+ layout,
+ is_active: Some(idx) == active_indent_guide_index,
+ hitbox: Some(Bounds::new(
+ point(
+ bounds.origin.x - px(HITBOX_OVERDRAW),
+ bounds.origin.y,
+ ),
+ size(
+ bounds.size.width
+ + px(2. * HITBOX_OVERDRAW),
+ bounds.size.height,
+ ),
+ )),
+ }
+ })
+ .collect()
+ },
+ ),
+ )
+ })
.size_full()
.with_sizing_behavior(ListSizingBehavior::Infer)
.with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained)
@@ -3072,12 +3397,11 @@ impl Panel for ProjectPanel {
fn starts_open(&self, cx: &WindowContext) -> bool {
let project = &self.project.read(cx);
- project.dev_server_project_id().is_some()
- || project.visible_worktrees(cx).any(|tree| {
- tree.read(cx)
- .root_entry()
- .map_or(false, |entry| entry.is_dir())
- })
+ project.visible_worktrees(cx).any(|tree| {
+ tree.read(cx)
+ .root_entry()
+ .map_or(false, |entry| entry.is_dir())
+ })
}
}
@@ -20,6 +20,7 @@ pub struct ProjectPanelSettings {
pub folder_icons: bool,
pub git_status: bool,
pub indent_size: f32,
+ pub indent_guides: bool,
pub auto_reveal_entries: bool,
pub auto_fold_dirs: bool,
pub scrollbar: ScrollbarSettings,
@@ -71,6 +72,10 @@ pub struct ProjectPanelSettingsContent {
///
/// Default: 20
pub indent_size: Option<f32>,
+ /// Whether to show indent guides in the project panel.
+ ///
+ /// Default: true
+ pub indent_guides: Option<bool>,
/// Whether to reveal it in the project panel automatically,
/// when a corresponding project entry becomes active.
/// Gitignored entries are never auto revealed.
@@ -196,8 +196,6 @@ message Envelope {
GetImplementation get_implementation = 162;
GetImplementationResponse get_implementation_response = 163;
- JoinHostedProject join_hosted_project = 164;
-
CountLanguageModelTokens count_language_model_tokens = 230;
CountLanguageModelTokensResponse count_language_model_tokens_response = 231;
GetCachedEmbeddings get_cached_embeddings = 189;
@@ -217,33 +215,14 @@ message Envelope {
MultiLspQueryResponse multi_lsp_query_response = 176;
RestartLanguageServers restart_language_servers = 208;
- CreateDevServerProject create_dev_server_project = 177;
- CreateDevServerProjectResponse create_dev_server_project_response = 188;
- CreateDevServer create_dev_server = 178;
- CreateDevServerResponse create_dev_server_response = 179;
- ShutdownDevServer shutdown_dev_server = 180;
- DevServerInstructions dev_server_instructions = 181;
- ReconnectDevServer reconnect_dev_server = 182;
- ReconnectDevServerResponse reconnect_dev_server_response = 183;
-
- ShareDevServerProject share_dev_server_project = 184;
- JoinDevServerProject join_dev_server_project = 185;
RejoinRemoteProjects rejoin_remote_projects = 186;
RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187;
- DevServerProjectsUpdate dev_server_projects_update = 193;
- ValidateDevServerProjectRequest validate_dev_server_project_request = 194;
- DeleteDevServer delete_dev_server = 195;
OpenNewBuffer open_new_buffer = 196;
- DeleteDevServerProject delete_dev_server_project = 197;
GetSupermavenApiKey get_supermaven_api_key = 198;
GetSupermavenApiKeyResponse get_supermaven_api_key_response = 199;
- RegenerateDevServerToken regenerate_dev_server_token = 200;
- RegenerateDevServerTokenResponse regenerate_dev_server_token_response = 201;
- RenameDevServer rename_dev_server = 202;
-
TaskContextForLocation task_context_for_location = 203;
TaskContext task_context = 204;
@@ -264,7 +243,6 @@ message Envelope {
ListRemoteDirectory list_remote_directory = 219;
ListRemoteDirectoryResponse list_remote_directory_response = 220;
- UpdateDevServerProject update_dev_server_project = 221;
AddWorktree add_worktree = 222;
AddWorktreeResponse add_worktree_response = 223;
@@ -281,9 +259,6 @@ message Envelope {
CloseBuffer close_buffer = 245;
UpdateUserSettings update_user_settings = 246;
- CheckFileExists check_file_exists = 255;
- CheckFileExistsResponse check_file_exists_response = 256;
-
ShutdownRemoteServer shutdown_remote_server = 257;
RemoveWorktree remove_worktree = 258;
@@ -301,15 +276,36 @@ message Envelope {
FlushBufferedMessages flush_buffered_messages = 267;
LanguageServerPromptRequest language_server_prompt_request = 268;
- LanguageServerPromptResponse language_server_prompt_response = 269; // current max
+ LanguageServerPromptResponse language_server_prompt_response = 269;
+ GitBranches git_branches = 270;
+ GitBranchesResponse git_branches_response = 271;
+
+ UpdateGitBranch update_git_branch = 272;
+
+ ListToolchains list_toolchains = 273;
+ ListToolchainsResponse list_toolchains_response = 274;
+ ActivateToolchain activate_toolchain = 275;
+ ActiveToolchain active_toolchain = 276;
+ ActiveToolchainResponse active_toolchain_response = 277;
+
+ GetPathMetadata get_path_metadata = 278;
+ GetPathMetadataResponse get_path_metadata_response = 279; // current max
}
reserved 87 to 88;
reserved 158 to 161;
+ reserved 164;
reserved 166 to 169;
+ reserved 177 to 185;
+ reserved 188;
+ reserved 193 to 195;
+ reserved 197;
+ reserved 200 to 202;
reserved 205 to 206;
+ reserved 221;
reserved 224 to 229;
reserved 247 to 254;
+ reserved 255 to 256;
}
// Messages
@@ -342,12 +338,11 @@ enum ErrorCode {
WrongMoveTarget = 11;
UnsharedItem = 12;
NoSuchProject = 13;
- DevServerAlreadyOnline = 14;
- DevServerOffline = 15;
DevServerProjectPathDoesNotExist = 16;
RemoteUpgradeRequired = 17;
RateLimitExceeded = 18;
reserved 6;
+ reserved 14 to 15;
}
message EndStream {}
@@ -511,7 +506,7 @@ message LiveKitConnectionInfo {
message ShareProject {
uint64 room_id = 1;
repeated WorktreeMetadata worktrees = 2;
- optional uint64 dev_server_project_id = 3;
+ reserved 3;
bool is_ssh_project = 4;
}
@@ -532,24 +527,6 @@ message JoinProject {
uint64 project_id = 1;
}
-message JoinHostedProject {
- uint64 project_id = 1;
-}
-
-message CreateDevServerProject {
- reserved 1;
- reserved 2;
- uint64 dev_server_id = 3;
- string path = 4;
-}
-message CreateDevServerProjectResponse {
- DevServerProject dev_server_project = 1;
-}
-
-message ValidateDevServerProjectRequest {
- string path = 1;
-}
-
message ListRemoteDirectory {
uint64 dev_server_id = 1;
string path = 2;
@@ -559,77 +536,6 @@ message ListRemoteDirectoryResponse {
repeated string entries = 1;
}
-message UpdateDevServerProject {
- uint64 dev_server_project_id = 1;
- repeated string paths = 2;
-}
-
-message CreateDevServer {
- reserved 1;
- string name = 2;
- optional string ssh_connection_string = 3;
-}
-
-message RegenerateDevServerToken {
- uint64 dev_server_id = 1;
-}
-
-message RegenerateDevServerTokenResponse {
- uint64 dev_server_id = 1;
- string access_token = 2;
-}
-
-message CreateDevServerResponse {
- uint64 dev_server_id = 1;
- reserved 2;
- string access_token = 3;
- string name = 4;
-}
-
-message ShutdownDevServer {
- optional string reason = 1;
-}
-
-message RenameDevServer {
- uint64 dev_server_id = 1;
- string name = 2;
- optional string ssh_connection_string = 3;
-}
-
-message DeleteDevServer {
- uint64 dev_server_id = 1;
-}
-
-message DeleteDevServerProject {
- uint64 dev_server_project_id = 1;
-}
-
-message ReconnectDevServer {
- repeated UpdateProject reshared_projects = 1;
-}
-
-message ReconnectDevServerResponse {
- repeated ResharedProject reshared_projects = 1;
-}
-
-message DevServerInstructions {
- repeated DevServerProject projects = 1;
-}
-
-message DevServerProjectsUpdate {
- repeated DevServer dev_servers = 1;
- repeated DevServerProject dev_server_projects = 2;
-}
-
-message ShareDevServerProject {
- uint64 dev_server_project_id = 1;
- repeated WorktreeMetadata worktrees = 2;
-}
-
-message JoinDevServerProject {
- uint64 dev_server_project_id = 1;
-}
-
message JoinProjectResponse {
uint64 project_id = 5;
uint32 replica_id = 1;
@@ -637,7 +543,7 @@ message JoinProjectResponse {
repeated Collaborator collaborators = 3;
repeated LanguageServer language_servers = 4;
ChannelRole role = 6;
- optional uint64 dev_server_project_id = 7;
+ reserved 7;
}
message LeaveProject {
@@ -1387,13 +1293,7 @@ message UpdateChannels {
repeated ChannelMessageId latest_channel_message_ids = 8;
repeated ChannelBufferVersion latest_channel_buffer_versions = 9;
- repeated HostedProject hosted_projects = 10;
- repeated uint64 deleted_hosted_projects = 11;
-
- reserved 12;
- reserved 13;
- reserved 14;
- reserved 15;
+ reserved 10 to 15;
}
message UpdateUserChannels {
@@ -1422,36 +1322,6 @@ message ChannelParticipants {
repeated uint64 participant_user_ids = 2;
}
-message HostedProject {
- uint64 project_id = 1;
- uint64 channel_id = 2;
- string name = 3;
- ChannelVisibility visibility = 4;
-}
-
-message DevServerProject {
- uint64 id = 1;
- optional uint64 project_id = 2;
- reserved 3;
- reserved 4;
- uint64 dev_server_id = 5;
- string path = 6;
- repeated string paths = 7;
-}
-
-message DevServer {
- reserved 1;
- uint64 dev_server_id = 2;
- string name = 3;
- DevServerStatus status = 4;
- optional string ssh_connection_string = 5;
-}
-
-enum DevServerStatus {
- Offline = 0;
- Online = 1;
-}
-
message JoinChannel {
uint64 channel_id = 1;
}
@@ -2488,14 +2358,15 @@ message UpdateUserSettings {
}
}
-message CheckFileExists {
+message GetPathMetadata {
uint64 project_id = 1;
string path = 2;
}
-message CheckFileExistsResponse {
+message GetPathMetadataResponse {
bool exists = 1;
string path = 2;
+ bool is_dir = 3;
}
message ShutdownRemoteServer {}
@@ -2528,7 +2399,6 @@ message GetPermalinkToLine {
message GetPermalinkToLineResponse {
string permalink = 1;
}
-
message FlushBufferedMessages {}
message FlushBufferedMessagesResponse {}
@@ -2553,3 +2423,64 @@ message LanguageServerPromptRequest {
message LanguageServerPromptResponse {
optional uint64 action_response = 1;
}
+
+message ListToolchains {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ string language_name = 3;
+}
+
+message Toolchain {
+ string name = 1;
+ string path = 2;
+}
+
+message ToolchainGroup {
+ uint64 start_index = 1;
+ string name = 2;
+}
+
+message ListToolchainsResponse {
+ repeated Toolchain toolchains = 1;
+ bool has_values = 2;
+ repeated ToolchainGroup groups = 3;
+}
+
+message ActivateToolchain {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ Toolchain toolchain = 3;
+ string language_name = 4;
+}
+
+message ActiveToolchain {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ string language_name = 3;
+}
+
+message ActiveToolchainResponse {
+ optional Toolchain toolchain = 1;
+}
+
+message Branch {
+ bool is_head = 1;
+ string name = 2;
+ optional uint64 unix_timestamp = 3;
+}
+
+message GitBranches {
+ uint64 project_id = 1;
+ ProjectPath repository = 2;
+}
+
+message GitBranchesResponse {
+ repeated Branch branches = 1;
+}
+
+message UpdateGitBranch {
+ uint64 project_id = 1;
+ string branch_name = 2;
+ ProjectPath repository = 3;
+
+}
@@ -228,7 +228,6 @@ messages!(
(JoinChannelChat, Foreground),
(JoinChannelChatResponse, Foreground),
(JoinProject, Foreground),
- (JoinHostedProject, Foreground),
(JoinProjectResponse, Foreground),
(JoinRoom, Foreground),
(JoinRoomResponse, Foreground),
@@ -318,30 +317,12 @@ messages!(
(SetRoomParticipantRole, Foreground),
(BlameBuffer, Foreground),
(BlameBufferResponse, Foreground),
- (CreateDevServerProject, Background),
- (CreateDevServerProjectResponse, Foreground),
- (CreateDevServer, Foreground),
- (CreateDevServerResponse, Foreground),
- (DevServerInstructions, Foreground),
- (ShutdownDevServer, Foreground),
- (ReconnectDevServer, Foreground),
- (ReconnectDevServerResponse, Foreground),
- (ShareDevServerProject, Foreground),
- (JoinDevServerProject, Foreground),
(RejoinRemoteProjects, Foreground),
(RejoinRemoteProjectsResponse, Foreground),
(MultiLspQuery, Background),
(MultiLspQueryResponse, Background),
- (DevServerProjectsUpdate, Foreground),
- (ValidateDevServerProjectRequest, Background),
(ListRemoteDirectory, Background),
(ListRemoteDirectoryResponse, Background),
- (UpdateDevServerProject, Background),
- (DeleteDevServer, Foreground),
- (DeleteDevServerProject, Foreground),
- (RegenerateDevServerToken, Foreground),
- (RegenerateDevServerTokenResponse, Foreground),
- (RenameDevServer, Foreground),
(OpenNewBuffer, Foreground),
(RestartLanguageServers, Foreground),
(LinkedEditingRange, Background),
@@ -362,8 +343,6 @@ messages!(
(FindSearchCandidatesResponse, Background),
(CloseBuffer, Foreground),
(UpdateUserSettings, Foreground),
- (CheckFileExists, Background),
- (CheckFileExistsResponse, Background),
(ShutdownRemoteServer, Foreground),
(RemoveWorktree, Foreground),
(LanguageServerLog, Foreground),
@@ -375,6 +354,16 @@ messages!(
(FlushBufferedMessages, Foreground),
(LanguageServerPromptRequest, Foreground),
(LanguageServerPromptResponse, Foreground),
+ (GitBranches, Background),
+ (GitBranchesResponse, Background),
+ (UpdateGitBranch, Background),
+ (ListToolchains, Foreground),
+ (ListToolchainsResponse, Foreground),
+ (ActivateToolchain, Foreground),
+ (ActiveToolchain, Foreground),
+ (ActiveToolchainResponse, Foreground),
+ (GetPathMetadata, Background),
+ (GetPathMetadataResponse, Background)
);
request_messages!(
@@ -419,7 +408,6 @@ request_messages!(
(GetTypeDefinition, GetTypeDefinitionResponse),
(LinkedEditingRange, LinkedEditingRangeResponse),
(ListRemoteDirectory, ListRemoteDirectoryResponse),
- (UpdateDevServerProject, Ack),
(GetUsers, UsersResponse),
(IncomingCall, Ack),
(InlayHints, InlayHintsResponse),
@@ -427,7 +415,6 @@ request_messages!(
(JoinChannel, JoinRoomResponse),
(JoinChannelBuffer, JoinChannelBufferResponse),
(JoinChannelChat, JoinChannelChatResponse),
- (JoinHostedProject, JoinProjectResponse),
(JoinProject, JoinProjectResponse),
(JoinRoom, JoinRoomResponse),
(LeaveChannelBuffer, Ack),
@@ -477,32 +464,26 @@ request_messages!(
(LspExtExpandMacro, LspExtExpandMacroResponse),
(SetRoomParticipantRole, Ack),
(BlameBuffer, BlameBufferResponse),
- (CreateDevServerProject, CreateDevServerProjectResponse),
- (CreateDevServer, CreateDevServerResponse),
- (ShutdownDevServer, Ack),
- (ShareDevServerProject, ShareProjectResponse),
- (JoinDevServerProject, JoinProjectResponse),
(RejoinRemoteProjects, RejoinRemoteProjectsResponse),
- (ReconnectDevServer, ReconnectDevServerResponse),
- (ValidateDevServerProjectRequest, Ack),
(MultiLspQuery, MultiLspQueryResponse),
- (DeleteDevServer, Ack),
- (DeleteDevServerProject, Ack),
- (RegenerateDevServerToken, RegenerateDevServerTokenResponse),
- (RenameDevServer, Ack),
(RestartLanguageServers, Ack),
(OpenContext, OpenContextResponse),
(CreateContext, CreateContextResponse),
(SynchronizeContexts, SynchronizeContextsResponse),
(LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse),
(AddWorktree, AddWorktreeResponse),
- (CheckFileExists, CheckFileExistsResponse),
(ShutdownRemoteServer, Ack),
(RemoveWorktree, Ack),
(OpenServerSettings, OpenBufferResponse),
(GetPermalinkToLine, GetPermalinkToLineResponse),
(FlushBufferedMessages, Ack),
(LanguageServerPromptRequest, LanguageServerPromptResponse),
+ (GitBranches, GitBranchesResponse),
+ (UpdateGitBranch, Ack),
+ (ListToolchains, ListToolchainsResponse),
+ (ActivateToolchain, Ack),
+ (ActiveToolchain, ActiveToolchainResponse),
+ (GetPathMetadata, GetPathMetadataResponse)
);
entity_messages!(
@@ -574,13 +555,18 @@ entity_messages!(
SynchronizeContexts,
LspExtSwitchSourceHeader,
UpdateUserSettings,
- CheckFileExists,
LanguageServerLog,
Toast,
HideToast,
OpenServerSettings,
GetPermalinkToLine,
- LanguageServerPromptRequest
+ LanguageServerPromptRequest,
+ GitBranches,
+ UpdateGitBranch,
+ ListToolchains,
+ ActivateToolchain,
+ ActiveToolchain,
+ GetPathMetadata
);
entity_messages!(
@@ -16,7 +16,6 @@ doctest = false
anyhow.workspace = true
auto_update.workspace = true
release_channel.workspace = true
-client.workspace = true
editor.workspace = true
file_finder.workspace = true
futures.workspace = true
@@ -30,15 +29,12 @@ menu.workspace = true
ordered-float.workspace = true
picker.workspace = true
project.workspace = true
-dev_server_projects.workspace = true
remote.workspace = true
-rpc.workspace = true
schemars.workspace = true
serde.workspace = true
settings.workspace = true
smol.workspace = true
task.workspace = true
-terminal_view.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
@@ -1,6 +1,5 @@
use std::path::PathBuf;
-use dev_server_projects::DevServer;
use gpui::{ClickEvent, DismissEvent, EventEmitter, FocusHandle, FocusableView, Render, WeakView};
use project::project_settings::ProjectSettings;
use remote::SshConnectionOptions;
@@ -12,14 +11,10 @@ use ui::{
};
use workspace::{notifications::DetachAndPromptErr, ModalView, OpenOptions, Workspace};
-use crate::{
- open_dev_server_project, open_ssh_project, remote_servers::reconnect_to_dev_server_project,
- RemoteServerProjects, SshSettings,
-};
+use crate::open_ssh_project;
enum Host {
RemoteProject,
- DevServerProject(DevServer),
SshRemoteProject(SshConnectionOptions),
}
@@ -55,20 +50,9 @@ impl DisconnectedOverlay {
return;
}
let handle = cx.view().downgrade();
- let dev_server = project
- .read(cx)
- .dev_server_project_id()
- .and_then(|id| {
- dev_server_projects::Store::global(cx)
- .read(cx)
- .dev_server_for_project(id)
- })
- .cloned();
let ssh_connection_options = project.read(cx).ssh_connection_options(cx);
- let host = if let Some(dev_server) = dev_server {
- Host::DevServerProject(dev_server)
- } else if let Some(ssh_connection_options) = ssh_connection_options {
+ let host = if let Some(ssh_connection_options) = ssh_connection_options {
Host::SshRemoteProject(ssh_connection_options)
} else {
Host::RemoteProject
@@ -89,9 +73,6 @@ impl DisconnectedOverlay {
cx.emit(DismissEvent);
match &self.host {
- Host::DevServerProject(dev_server) => {
- self.reconnect_to_dev_server(dev_server.clone(), cx);
- }
Host::SshRemoteProject(ssh_connection_options) => {
self.reconnect_to_ssh_remote(ssh_connection_options.clone(), cx);
}
@@ -99,50 +80,6 @@ impl DisconnectedOverlay {
}
}
- fn reconnect_to_dev_server(&self, dev_server: DevServer, cx: &mut ViewContext<Self>) {
- let Some(workspace) = self.workspace.upgrade() else {
- return;
- };
- let Some(dev_server_project_id) = workspace
- .read(cx)
- .project()
- .read(cx)
- .dev_server_project_id()
- else {
- return;
- };
-
- if let Some(project_id) = dev_server_projects::Store::global(cx)
- .read(cx)
- .dev_server_project(dev_server_project_id)
- .and_then(|project| project.project_id)
- {
- return workspace.update(cx, move |_, cx| {
- open_dev_server_project(true, dev_server_project_id, project_id, cx)
- .detach_and_prompt_err("Failed to reconnect", cx, |_, _| None)
- });
- }
-
- if dev_server.ssh_connection_string.is_some() {
- let task = workspace.update(cx, |_, cx| {
- reconnect_to_dev_server_project(
- cx.view().clone(),
- dev_server,
- dev_server_project_id,
- true,
- cx,
- )
- });
-
- task.detach_and_prompt_err("Failed to reconnect", cx, |_, _| None);
- } else {
- return workspace.update(cx, |workspace, cx| {
- let handle = cx.view().downgrade();
- workspace.toggle_modal(cx, |cx| RemoteServerProjects::new(cx, handle))
- });
- }
- }
-
fn reconnect_to_ssh_remote(
&self,
connection_options: SshConnectionOptions,
@@ -165,16 +102,6 @@ impl DisconnectedOverlay {
let paths = ssh_project.paths.iter().map(PathBuf::from).collect();
cx.spawn(move |_, mut cx| async move {
- let nickname = cx
- .update(|cx| {
- SshSettings::get_global(cx).nickname_for(
- &connection_options.host,
- connection_options.port,
- &connection_options.username,
- )
- })
- .ok()
- .flatten();
open_ssh_project(
connection_options,
paths,
@@ -183,7 +110,6 @@ impl DisconnectedOverlay {
replace_window: Some(window),
..Default::default()
},
- nickname,
&mut cx,
)
.await?;
@@ -200,13 +126,10 @@ impl DisconnectedOverlay {
impl Render for DisconnectedOverlay {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
- let can_reconnect = matches!(
- self.host,
- Host::DevServerProject(_) | Host::SshRemoteProject(_)
- );
+ let can_reconnect = matches!(self.host, Host::SshRemoteProject(_));
let message = match &self.host {
- Host::RemoteProject | Host::DevServerProject(_) => {
+ Host::RemoteProject => {
"Your connection to the remote project has been lost.".to_string()
}
Host::SshRemoteProject(options) => {
@@ -1,10 +1,8 @@
pub mod disconnected_overlay;
mod remote_servers;
mod ssh_connections;
-use remote::SshConnectionOptions;
pub use ssh_connections::open_ssh_project;
-use client::{DevServerProjectId, ProjectId};
use disconnected_overlay::DisconnectedOverlay;
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
@@ -17,9 +15,7 @@ use picker::{
highlighted_match_with_paths::{HighlightedMatchWithPaths, HighlightedText},
Picker, PickerDelegate,
};
-use remote_servers::reconnect_to_dev_server_project;
pub use remote_servers::RemoteServerProjects;
-use rpc::proto::DevServerStatus;
use serde::Deserialize;
use settings::Settings;
pub use ssh_connections::SshSettings;
@@ -28,13 +24,12 @@ use std::{
sync::Arc,
};
use ui::{
- prelude::*, tooltip_container, ButtonLike, IconWithIndicator, Indicator, KeyBinding, ListItem,
- ListItemSpacing, Tooltip,
+ prelude::*, tooltip_container, ButtonLike, KeyBinding, ListItem, ListItemSpacing, Tooltip,
};
use util::{paths::PathExt, ResultExt};
use workspace::{
- AppState, CloseIntent, ModalView, OpenOptions, SerializedWorkspaceLocation, Workspace,
- WorkspaceId, WORKSPACE_DB,
+ CloseIntent, ModalView, OpenOptions, SerializedWorkspaceLocation, Workspace, WorkspaceId,
+ WORKSPACE_DB,
};
#[derive(PartialEq, Clone, Deserialize, Default)]
@@ -101,7 +96,7 @@ impl RecentProjects {
}
}
- fn register(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
+ fn register(workspace: &mut Workspace, _cx: &mut ViewContext<Workspace>) {
workspace.register_action(|workspace, open_recent: &OpenRecent, cx| {
let Some(recent_projects) = workspace.active_modal::<Self>(cx) else {
Self::open(workspace, open_recent.create_new_window, cx);
@@ -114,20 +109,6 @@ impl RecentProjects {
.update(cx, |picker, cx| picker.cycle_selection(cx))
});
});
- if workspace
- .project()
- .read(cx)
- .dev_server_project_id()
- .is_some()
- {
- workspace.register_action(|workspace, _: &workspace::Open, cx| {
- if workspace.active_modal::<Self>(cx).is_some() {
- cx.propagate();
- } else {
- Self::open(workspace, true, cx);
- }
- });
- }
}
pub fn open(
@@ -254,13 +235,6 @@ impl PickerDelegate for RecentProjectsDelegate {
.map(|(_, path)| path.compact().to_string_lossy().into_owned())
.collect::<Vec<_>>()
.join(""),
- SerializedWorkspaceLocation::DevServer(dev_server_project) => {
- format!(
- "{}{}",
- dev_server_project.dev_server_name,
- dev_server_project.paths.join("")
- )
- }
SerializedWorkspaceLocation::Ssh(ssh_project) => ssh_project
.ssh_urls()
.iter()
@@ -321,7 +295,10 @@ impl PickerDelegate for RecentProjectsDelegate {
cx.spawn(move |workspace, mut cx| async move {
let continue_replacing = workspace
.update(&mut cx, |workspace, cx| {
- workspace.prepare_to_close(CloseIntent::ReplaceWindow, cx)
+ workspace.prepare_to_close(
+ CloseIntent::ReplaceWindow,
+ cx,
+ )
})?
.await?;
if continue_replacing {
@@ -339,74 +316,44 @@ impl PickerDelegate for RecentProjectsDelegate {
workspace.open_workspace_for_paths(false, paths, cx)
}
}
- SerializedWorkspaceLocation::DevServer(dev_server_project) => {
- let store = dev_server_projects::Store::global(cx);
- let Some(project_id) = store.read(cx)
- .dev_server_project(dev_server_project.id)
- .and_then(|p| p.project_id)
- else {
- let server = store.read(cx).dev_server_for_project(dev_server_project.id);
- if server.is_some_and(|server| server.ssh_connection_string.is_some()) {
- return reconnect_to_dev_server_project(cx.view().clone(), server.unwrap().clone(), dev_server_project.id, replace_current_window, cx);
- } else {
- let dev_server_name = dev_server_project.dev_server_name.clone();
- return cx.spawn(|workspace, mut cx| async move {
- let response =
- cx.prompt(gpui::PromptLevel::Warning,
- "Dev Server is offline",
- Some(format!("Cannot connect to {}. To debug open the remote project settings.", dev_server_name).as_str()),
- &["Ok", "Open Settings"]
- ).await?;
- if response == 1 {
- workspace.update(&mut cx, |workspace, cx| {
- let handle = cx.view().downgrade();
- workspace.toggle_modal(cx, |cx| RemoteServerProjects::new(cx, handle))
- })?;
- } else {
- workspace.update(&mut cx, |workspace, cx| {
- RecentProjects::open(workspace, true, cx);
- })?;
- }
- Ok(())
- })
- }
+ SerializedWorkspaceLocation::Ssh(ssh_project) => {
+ let app_state = workspace.app_state().clone();
+
+ let replace_window = if replace_current_window {
+ cx.window_handle().downcast::<Workspace>()
+ } else {
+ None
};
- open_dev_server_project(replace_current_window, dev_server_project.id, project_id, cx)
- }
- SerializedWorkspaceLocation::Ssh(ssh_project) => {
- let app_state = workspace.app_state().clone();
-
- let replace_window = if replace_current_window {
- cx.window_handle().downcast::<Workspace>()
- } else {
- None
- };
-
- let open_options = OpenOptions {
- replace_window,
- ..Default::default()
- };
-
- let args = SshSettings::get_global(cx).args_for(&ssh_project.host, ssh_project.port, &ssh_project.user);
- let nickname = SshSettings::get_global(cx).nickname_for(&ssh_project.host, ssh_project.port, &ssh_project.user);
- let connection_options = SshConnectionOptions {
- host: ssh_project.host.clone(),
- username: ssh_project.user.clone(),
- port: ssh_project.port,
- password: None,
- args,
- };
-
- let paths = ssh_project.paths.iter().map(PathBuf::from).collect();
-
- cx.spawn(|_, mut cx| async move {
- open_ssh_project(connection_options, paths, app_state, open_options, nickname, &mut cx).await
- })
+
+ let open_options = OpenOptions {
+ replace_window,
+ ..Default::default()
+ };
+
+ let connection_options = SshSettings::get_global(cx)
+ .connection_options_for(
+ ssh_project.host.clone(),
+ ssh_project.port,
+ ssh_project.user.clone(),
+ );
+
+ let paths = ssh_project.paths.iter().map(PathBuf::from).collect();
+
+ cx.spawn(|_, mut cx| async move {
+ open_ssh_project(
+ connection_options,
+ paths,
+ app_state,
+ open_options,
+ &mut cx,
+ )
+ .await
+ })
+ }
}
}
- }
})
- .detach_and_log_err(cx);
+ .detach_and_log_err(cx);
cx.emit(DismissEvent);
}
}
@@ -431,20 +378,6 @@ impl PickerDelegate for RecentProjectsDelegate {
let (_, location) = self.workspaces.get(hit.candidate_id)?;
- let dev_server_status =
- if let SerializedWorkspaceLocation::DevServer(dev_server_project) = location {
- let store = dev_server_projects::Store::global(cx).read(cx);
- Some(
- store
- .dev_server_project(dev_server_project.id)
- .and_then(|p| store.dev_server(p.dev_server_id))
- .map(|s| s.status)
- .unwrap_or_default(),
- )
- } else {
- None
- };
-
let mut path_start_offset = 0;
let paths = match location {
SerializedWorkspaceLocation::Local(paths, order) => Arc::new(
@@ -457,13 +390,6 @@ impl PickerDelegate for RecentProjectsDelegate {
.collect(),
),
SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()),
- SerializedWorkspaceLocation::DevServer(dev_server_project) => {
- Arc::new(vec![PathBuf::from(format!(
- "{}:{}",
- dev_server_project.dev_server_name,
- dev_server_project.paths.join(", ")
- ))])
- }
};
let (match_labels, paths): (Vec<_>, Vec<_>) = paths
@@ -478,13 +404,7 @@ impl PickerDelegate for RecentProjectsDelegate {
.unzip();
let highlighted_match = HighlightedMatchWithPaths {
- match_label: HighlightedText::join(match_labels.into_iter().flatten(), ", ").color(
- if matches!(dev_server_status, Some(DevServerStatus::Offline)) {
- Color::Disabled
- } else {
- Color::Default
- },
- ),
+ match_label: HighlightedText::join(match_labels.into_iter().flatten(), ", "),
paths,
};
@@ -507,24 +427,6 @@ impl PickerDelegate for RecentProjectsDelegate {
SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server)
.color(Color::Muted)
.into_any_element(),
- SerializedWorkspaceLocation::DevServer(_) => {
- let indicator_color = match dev_server_status {
- Some(DevServerStatus::Online) => Color::Created,
- Some(DevServerStatus::Offline) => Color::Hidden,
- _ => unreachable!(),
- };
- IconWithIndicator::new(
- Icon::new(IconName::Server).color(Color::Muted),
- Some(Indicator::dot()),
- )
- .indicator_color(indicator_color)
- .indicator_border_color(if selected {
- Some(cx.theme().colors().element_selected)
- } else {
- None
- })
- .into_any_element()
- }
})
})
.child({
@@ -597,59 +499,6 @@ impl PickerDelegate for RecentProjectsDelegate {
}
}
-fn open_dev_server_project(
- replace_current_window: bool,
- dev_server_project_id: DevServerProjectId,
- project_id: ProjectId,
- cx: &mut ViewContext<Workspace>,
-) -> Task<anyhow::Result<()>> {
- if let Some(app_state) = AppState::global(cx).upgrade() {
- let handle = if replace_current_window {
- cx.window_handle().downcast::<Workspace>()
- } else {
- None
- };
-
- if let Some(handle) = handle {
- cx.spawn(move |workspace, mut cx| async move {
- let continue_replacing = workspace
- .update(&mut cx, |workspace, cx| {
- workspace.prepare_to_close(CloseIntent::ReplaceWindow, cx)
- })?
- .await?;
- if continue_replacing {
- workspace
- .update(&mut cx, |_workspace, cx| {
- workspace::join_dev_server_project(
- dev_server_project_id,
- project_id,
- app_state,
- Some(handle),
- cx,
- )
- })?
- .await?;
- }
- Ok(())
- })
- } else {
- let task = workspace::join_dev_server_project(
- dev_server_project_id,
- project_id,
- app_state,
- None,
- cx,
- );
- cx.spawn(|_, _| async move {
- task.await?;
- Ok(())
- })
- }
- } else {
- Task::ready(Err(anyhow::anyhow!("App state not found")))
- }
-}
-
// Compute the highlighted text for the name and path
fn highlights_for_path(
path: &Path,
@@ -1,19 +1,12 @@
-use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
-use std::time::Duration;
-use anyhow::anyhow;
-use anyhow::Context;
-use anyhow::Result;
-use dev_server_projects::{DevServer, DevServerId, DevServerProjectId};
use editor::Editor;
use file_finder::OpenPathDelegate;
use futures::channel::oneshot;
use futures::future::Shared;
use futures::FutureExt;
use gpui::canvas;
-use gpui::AsyncWindowContext;
use gpui::ClipboardItem;
use gpui::Task;
use gpui::WeakView;
@@ -22,17 +15,11 @@ use gpui::{
PromptLevel, ScrollHandle, View, ViewContext,
};
use picker::Picker;
-use project::terminals::wrap_for_ssh;
-use project::terminals::SshCommand;
use project::Project;
use remote::SshConnectionOptions;
-use rpc::proto::DevServerStatus;
+use remote::SshRemoteClient;
use settings::update_settings_file;
use settings::Settings;
-use task::HideStrategy;
-use task::RevealStrategy;
-use task::SpawnInTerminal;
-use terminal_view::terminal_panel::TerminalPanel;
use ui::{
prelude::*, IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Scrollbar,
ScrollbarState, Section, Tooltip,
@@ -43,7 +30,6 @@ use workspace::OpenOptions;
use workspace::Toast;
use workspace::{notifications::DetachAndPromptErr, ModalView, Workspace};
-use crate::open_dev_server_project;
use crate::ssh_connections::connect_over_ssh;
use crate::ssh_connections::open_ssh_project;
use crate::ssh_connections::RemoteSettingsContent;
@@ -61,6 +47,7 @@ pub struct RemoteServerProjects {
scroll_handle: ScrollHandle,
workspace: WeakView<Workspace>,
selectable_items: SelectableItemList,
+ retained_connections: Vec<Model<SshRemoteClient>>,
}
struct CreateRemoteServer {
@@ -210,11 +197,7 @@ impl ProjectPicker {
picker
});
let connection_string = connection.connection_string().into();
- let nickname = SshSettings::get_global(cx).nickname_for(
- &connection.host,
- connection.port,
- &connection.username,
- );
+ let nickname = connection.nickname.clone().map(|nick| nick.into());
let _path_task = cx
.spawn({
let workspace = workspace.clone();
@@ -370,6 +353,7 @@ impl RemoteServerProjects {
scroll_handle: ScrollHandle::new(),
workspace,
selectable_items: Default::default(),
+ retained_connections: Vec::new(),
}
}
@@ -426,7 +410,7 @@ impl RemoteServerProjects {
return;
}
};
- let ssh_prompt = cx.new_view(|cx| SshPrompt::new(&connection_options, None, cx));
+ let ssh_prompt = cx.new_view(|cx| SshPrompt::new(&connection_options, cx));
let connection = connect_over_ssh(
connection_options.remote_server_identifier(),
@@ -439,7 +423,7 @@ impl RemoteServerProjects {
let address_editor = editor.clone();
let creating = cx.spawn(move |this, mut cx| async move {
match connection.await {
- Some(_) => this
+ Some(Some(client)) => this
.update(&mut cx, |this, cx| {
let _ = this.workspace.update(cx, |workspace, _| {
workspace
@@ -447,14 +431,14 @@ impl RemoteServerProjects {
.telemetry()
.report_app_event("create ssh server".to_string())
});
-
+ this.retained_connections.push(client);
this.add_ssh_server(connection_options, cx);
this.mode = Mode::default_mode();
this.selectable_items.reset_selection();
cx.notify()
})
.log_err(),
- None => this
+ _ => this
.update(&mut cx, |this, cx| {
address_editor.update(cx, |this, _| {
this.set_read_only(false);
@@ -503,12 +487,11 @@ impl RemoteServerProjects {
return;
};
- let nickname = ssh_connection.nickname.clone();
let connection_options = ssh_connection.into();
workspace.update(cx, |_, cx| {
cx.defer(move |workspace, cx| {
workspace.toggle_modal(cx, |cx| {
- SshConnectionModal::new(&connection_options, Vec::new(), nickname, cx)
+ SshConnectionModal::new(&connection_options, Vec::new(), cx)
});
let prompt = workspace
.active_modal::<SshConnectionModal>(cx)
@@ -596,9 +579,7 @@ impl RemoteServerProjects {
self.create_ssh_server(state.address_editor.clone(), cx);
}
Mode::EditNickname(state) => {
- let text = Some(state.editor.read(cx).text(cx))
- .filter(|text| !text.is_empty())
- .map(SharedString::from);
+ let text = Some(state.editor.read(cx).text(cx)).filter(|text| !text.is_empty());
let index = state.index;
self.update_settings_file(cx, move |setting, _| {
if let Some(connections) = setting.ssh_connections.as_mut() {
@@ -645,7 +626,7 @@ impl RemoteServerProjects {
) -> impl IntoElement {
let (main_label, aux_label) = if let Some(nickname) = ssh_connection.nickname.clone() {
let aux_label = SharedString::from(format!("({})", ssh_connection.host));
- (nickname, Some(aux_label))
+ (nickname.into(), Some(aux_label))
} else {
(ssh_connection.host.clone(), None)
};
@@ -757,14 +738,13 @@ impl RemoteServerProjects {
};
let project = project.clone();
let server = server.clone();
- cx.spawn(|remote_server_projects, mut cx| async move {
- let nickname = server.nickname.clone();
+ cx.emit(DismissEvent);
+ cx.spawn(|_, mut cx| async move {
let result = open_ssh_project(
server.into(),
project.paths.into_iter().map(PathBuf::from).collect(),
app_state,
OpenOptions::default(),
- nickname,
&mut cx,
)
.await;
@@ -778,10 +758,6 @@ impl RemoteServerProjects {
)
.await
.ok();
- } else {
- remote_server_projects
- .update(&mut cx, |_, cx| cx.emit(DismissEvent))
- .ok();
}
})
.detach();
@@ -873,6 +849,7 @@ impl RemoteServerProjects {
projects: vec![],
nickname: None,
args: connection_options.args.unwrap_or_default(),
+ upload_binary_over_ssh: None,
})
});
}
@@ -965,7 +942,7 @@ impl RemoteServerProjects {
SshConnectionHeader {
connection_string: connection_string.clone(),
paths: Default::default(),
- nickname: connection.nickname.clone(),
+ nickname: connection.nickname.clone().map(|s| s.into()),
}
.render(cx),
)
@@ -1071,7 +1048,7 @@ impl RemoteServerProjects {
);
cx.spawn(|mut cx| async move {
- if confirmation.await.ok() == Some(1) {
+ if confirmation.await.ok() == Some(0) {
remote_servers
.update(&mut cx, |this, cx| {
this.delete_ssh_server(index, cx);
@@ -1147,13 +1124,14 @@ impl RemoteServerProjects {
};
let connection_string = connection.host.clone();
+ let nickname = connection.nickname.clone().map(|s| s.into());
v_flex()
.child(
SshConnectionHeader {
connection_string,
paths: Default::default(),
- nickname: connection.nickname.clone(),
+ nickname,
}
.render(cx),
)
@@ -1319,146 +1297,3 @@ impl Render for RemoteServerProjects {
})
}
}
-
-pub fn reconnect_to_dev_server_project(
- workspace: View<Workspace>,
- dev_server: DevServer,
- dev_server_project_id: DevServerProjectId,
- replace_current_window: bool,
- cx: &mut WindowContext,
-) -> Task<Result<()>> {
- let store = dev_server_projects::Store::global(cx);
- let reconnect = reconnect_to_dev_server(workspace.clone(), dev_server, cx);
- cx.spawn(|mut cx| async move {
- reconnect.await?;
-
- cx.background_executor()
- .timer(Duration::from_millis(1000))
- .await;
-
- if let Some(project_id) = store.update(&mut cx, |store, _| {
- store
- .dev_server_project(dev_server_project_id)
- .and_then(|p| p.project_id)
- })? {
- workspace
- .update(&mut cx, move |_, cx| {
- open_dev_server_project(
- replace_current_window,
- dev_server_project_id,
- project_id,
- cx,
- )
- })?
- .await?;
- }
-
- Ok(())
- })
-}
-
-pub fn reconnect_to_dev_server(
- workspace: View<Workspace>,
- dev_server: DevServer,
- cx: &mut WindowContext,
-) -> Task<Result<()>> {
- let Some(ssh_connection_string) = dev_server.ssh_connection_string else {
- return Task::ready(Err(anyhow!("Can't reconnect, no ssh_connection_string")));
- };
- let dev_server_store = dev_server_projects::Store::global(cx);
- let get_access_token = dev_server_store.update(cx, |store, cx| {
- store.regenerate_dev_server_token(dev_server.id, cx)
- });
-
- cx.spawn(|mut cx| async move {
- let access_token = get_access_token.await?.access_token;
-
- spawn_ssh_task(
- workspace,
- dev_server_store,
- dev_server.id,
- ssh_connection_string.to_string(),
- access_token,
- &mut cx,
- )
- .await
- })
-}
-
-pub async fn spawn_ssh_task(
- workspace: View<Workspace>,
- dev_server_store: Model<dev_server_projects::Store>,
- dev_server_id: DevServerId,
- ssh_connection_string: String,
- access_token: String,
- cx: &mut AsyncWindowContext,
-) -> Result<()> {
- let terminal_panel = workspace
- .update(cx, |workspace, cx| workspace.panel::<TerminalPanel>(cx))
- .ok()
- .flatten()
- .with_context(|| anyhow!("No terminal panel"))?;
-
- let command = "sh".to_string();
- let args = vec![
- "-x".to_string(),
- "-c".to_string(),
- format!(
- r#"~/.local/bin/zed -v >/dev/stderr || (curl -f https://zed.dev/install.sh || wget -qO- https://zed.dev/install.sh) | sh && ZED_HEADLESS=1 ~/.local/bin/zed --dev-server-token {}"#,
- access_token
- ),
- ];
-
- let ssh_connection_string = ssh_connection_string.to_string();
- let (command, args) = wrap_for_ssh(
- &SshCommand::DevServer(ssh_connection_string.clone()),
- Some((&command, &args)),
- None,
- HashMap::default(),
- None,
- );
-
- let terminal = terminal_panel
- .update(cx, |terminal_panel, cx| {
- terminal_panel.spawn_in_new_terminal(
- SpawnInTerminal {
- id: task::TaskId("ssh-remote".into()),
- full_label: "Install zed over ssh".into(),
- label: "Install zed over ssh".into(),
- command,
- args,
- command_label: ssh_connection_string.clone(),
- cwd: None,
- use_new_terminal: true,
- allow_concurrent_runs: false,
- reveal: RevealStrategy::Always,
- hide: HideStrategy::Never,
- env: Default::default(),
- shell: Default::default(),
- },
- cx,
- )
- })?
- .await?;
-
- terminal
- .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))?
- .await;
-
- // There's a race-condition between the task completing successfully, and the server sending us the online status. Make it less likely we'll show the error state.
- if dev_server_store.update(cx, |this, _| this.dev_server_status(dev_server_id))?
- == DevServerStatus::Offline
- {
- cx.background_executor()
- .timer(Duration::from_millis(200))
- .await
- }
-
- if dev_server_store.update(cx, |this, _| this.dev_server_status(dev_server_id))?
- == DevServerStatus::Offline
- {
- return Err(anyhow!("couldn't reconnect"))?;
- }
-
- Ok(())
-}
@@ -26,15 +26,9 @@ use ui::{
};
use workspace::{AppState, ModalView, Workspace};
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
-pub struct RemoteServerSettings {
- pub download_on_host: Option<bool>,
-}
-
#[derive(Deserialize)]
pub struct SshSettings {
pub ssh_connections: Option<Vec<SshConnection>>,
- pub remote_server: Option<RemoteServerSettings>,
}
impl SshSettings {
@@ -42,39 +36,31 @@ impl SshSettings {
self.ssh_connections.clone().into_iter().flatten()
}
- pub fn args_for(
- &self,
- host: &str,
- port: Option<u16>,
- user: &Option<String>,
- ) -> Option<Vec<String>> {
- self.ssh_connections()
- .filter_map(|conn| {
- if conn.host == host && &conn.username == user && conn.port == port {
- Some(conn.args)
- } else {
- None
- }
- })
- .next()
- }
-
- pub fn nickname_for(
+ pub fn connection_options_for(
&self,
- host: &str,
+ host: String,
port: Option<u16>,
- user: &Option<String>,
- ) -> Option<SharedString> {
- self.ssh_connections()
- .filter_map(|conn| {
- if conn.host == host && &conn.username == user && conn.port == port {
- Some(conn.nickname)
- } else {
- None
- }
- })
- .next()
- .flatten()
+ username: Option<String>,
+ ) -> SshConnectionOptions {
+ for conn in self.ssh_connections() {
+ if conn.host == host && conn.username == username && conn.port == port {
+ return SshConnectionOptions {
+ nickname: conn.nickname,
+ upload_binary_over_ssh: conn.upload_binary_over_ssh.unwrap_or_default(),
+ args: Some(conn.args),
+ host,
+ port,
+ username,
+ password: None,
+ };
+ }
+ }
+ SshConnectionOptions {
+ host,
+ port,
+ username,
+ ..Default::default()
+ }
}
}
@@ -85,13 +71,20 @@ pub struct SshConnection {
pub username: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub port: Option<u16>,
- pub projects: Vec<SshProject>,
- /// Name to use for this server in UI.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub nickname: Option<SharedString>,
#[serde(skip_serializing_if = "Vec::is_empty")]
#[serde(default)]
pub args: Vec<String>,
+ #[serde(default)]
+ pub projects: Vec<SshProject>,
+ /// Name to use for this server in UI.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub nickname: Option<String>,
+ // By default Zed will download the binary to the host directly.
+ // If this is set to true, Zed will download the binary to your local machine,
+ // and then upload it over the SSH connection. Useful if your SSH server has
+ // limited outbound internet access.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub upload_binary_over_ssh: Option<bool>,
}
impl From<SshConnection> for SshConnectionOptions {
@@ -102,6 +95,8 @@ impl From<SshConnection> for SshConnectionOptions {
port: val.port,
password: None,
args: Some(val.args),
+ nickname: val.nickname,
+ upload_binary_over_ssh: val.upload_binary_over_ssh.unwrap_or_default(),
}
}
}
@@ -114,7 +109,6 @@ pub struct SshProject {
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct RemoteSettingsContent {
pub ssh_connections: Option<Vec<SshConnection>>,
- pub remote_server: Option<RemoteServerSettings>,
}
impl Settings for SshSettings {
@@ -153,10 +147,10 @@ pub struct SshConnectionModal {
impl SshPrompt {
pub(crate) fn new(
connection_options: &SshConnectionOptions,
- nickname: Option<SharedString>,
cx: &mut ViewContext<Self>,
) -> Self {
let connection_string = connection_options.connection_string().into();
+ let nickname = connection_options.nickname.clone().map(|s| s.into());
Self {
connection_string,
@@ -276,11 +270,10 @@ impl SshConnectionModal {
pub(crate) fn new(
connection_options: &SshConnectionOptions,
paths: Vec<PathBuf>,
- nickname: Option<SharedString>,
cx: &mut ViewContext<Self>,
) -> Self {
Self {
- prompt: cx.new_view(|cx| SshPrompt::new(connection_options, nickname, cx)),
+ prompt: cx.new_view(|cx| SshPrompt::new(connection_options, cx)),
finished: false,
paths,
}
@@ -451,13 +444,17 @@ impl remote::SshClientDelegate for SshClientDelegate {
fn get_server_binary(
&self,
platform: SshPlatform,
+ upload_binary_over_ssh: bool,
cx: &mut AsyncAppContext,
) -> oneshot::Receiver<Result<(ServerBinary, SemanticVersion)>> {
let (tx, rx) = oneshot::channel();
let this = self.clone();
cx.spawn(|mut cx| async move {
- tx.send(this.get_server_binary_impl(platform, &mut cx).await)
- .ok();
+ tx.send(
+ this.get_server_binary_impl(platform, upload_binary_over_ssh, &mut cx)
+ .await,
+ )
+ .ok();
})
.detach();
rx
@@ -492,19 +489,14 @@ impl SshClientDelegate {
async fn get_server_binary_impl(
&self,
platform: SshPlatform,
+ upload_binary_via_ssh: bool,
cx: &mut AsyncAppContext,
) -> Result<(ServerBinary, SemanticVersion)> {
- let (version, release_channel, download_binary_on_host) = cx.update(|cx| {
+ let (version, release_channel) = cx.update(|cx| {
let version = AppVersion::global(cx);
let channel = ReleaseChannel::global(cx);
- let ssh_settings = SshSettings::get_global(cx);
- let download_binary_on_host = ssh_settings
- .remote_server
- .as_ref()
- .and_then(|server| server.download_on_host)
- .unwrap_or(false);
- (version, channel, download_binary_on_host)
+ (version, channel)
})?;
// In dev mode, build the remote server binary from source
@@ -517,23 +509,57 @@ impl SshClientDelegate {
}
}
- if download_binary_on_host {
- let (request_url, request_body) = AutoUpdater::get_latest_remote_server_release_url(
+ // For nightly channel, always get latest
+ let current_version = if release_channel == ReleaseChannel::Nightly {
+ None
+ } else {
+ Some(version)
+ };
+
+ self.update_status(
+ Some(&format!("Checking remote server release {}", version)),
+ cx,
+ );
+
+ if upload_binary_via_ssh {
+ let binary_path = AutoUpdater::download_remote_server_release(
platform.os,
platform.arch,
release_channel,
+ current_version,
cx,
)
.await
.map_err(|e| {
anyhow!(
- "Failed to get remote server binary download url (os: {}, arch: {}): {}",
+ "Failed to download remote server binary (version: {}, os: {}, arch: {}): {}",
+ version,
platform.os,
platform.arch,
e
)
})?;
+ Ok((ServerBinary::LocalBinary(binary_path), version))
+ } else {
+ let (request_url, request_body) = AutoUpdater::get_remote_server_release_url(
+ platform.os,
+ platform.arch,
+ release_channel,
+ current_version,
+ cx,
+ )
+ .await
+ .map_err(|e| {
+ anyhow!(
+ "Failed to get remote server binary download url (version: {}, os: {}, arch: {}): {}",
+ version,
+ platform.os,
+ platform.arch,
+ e
+ )
+ })?;
+
Ok((
ServerBinary::ReleaseUrl {
url: request_url,
@@ -541,25 +567,6 @@ impl SshClientDelegate {
},
version,
))
- } else {
- self.update_status(Some("Checking for latest version of remote server"), cx);
- let binary_path = AutoUpdater::get_latest_remote_server_release(
- platform.os,
- platform.arch,
- release_channel,
- cx,
- )
- .await
- .map_err(|e| {
- anyhow!(
- "Failed to download remote server binary (os: {}, arch: {}): {}",
- platform.os,
- platform.arch,
- e
- )
- })?;
-
- Ok((ServerBinary::LocalBinary(binary_path), version))
}
}
@@ -624,7 +631,7 @@ impl SshClientDelegate {
self.update_status(
Some(&format!(
- "Building remote server binary from source for {}",
+ "Building remote server binary from source for {} with Docker",
&triple
)),
cx,
@@ -700,7 +707,6 @@ pub async fn open_ssh_project(
paths: Vec<PathBuf>,
app_state: Arc<AppState>,
open_options: workspace::OpenOptions,
- nickname: Option<SharedString>,
cx: &mut AsyncAppContext,
) -> Result<()> {
let window = if let Some(window) = open_options.replace_window {
@@ -725,12 +731,11 @@ pub async fn open_ssh_project(
let (cancel_tx, cancel_rx) = oneshot::channel();
let delegate = window.update(cx, {
let connection_options = connection_options.clone();
- let nickname = nickname.clone();
let paths = paths.clone();
move |workspace, cx| {
cx.activate_window();
workspace.toggle_modal(cx, |cx| {
- SshConnectionModal::new(&connection_options, paths, nickname.clone(), cx)
+ SshConnectionModal::new(&connection_options, paths, cx)
});
let ui = workspace
@@ -13,17 +13,18 @@ use futures::{
mpsc::{self, Sender, UnboundedReceiver, UnboundedSender},
oneshot,
},
- future::BoxFuture,
+ future::{BoxFuture, Shared},
select, select_biased, AsyncReadExt as _, Future, FutureExt as _, StreamExt as _,
};
use gpui::{
- AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, SemanticVersion, Task,
- WeakModel,
+ AppContext, AsyncAppContext, BorrowAppContext, Context, EventEmitter, Global, Model,
+ ModelContext, SemanticVersion, Task, WeakModel,
};
use parking_lot::Mutex;
use rpc::{
proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage},
- AnyProtoClient, EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError,
+ AnyProtoClient, EntityMessageSubscriber, ErrorExt, ProtoClient, ProtoMessageHandlerSet,
+ RpcError,
};
use smol::{
fs,
@@ -56,13 +57,16 @@ pub struct SshSocket {
socket_path: PathBuf,
}
-#[derive(Debug, Default, Clone, PartialEq, Eq)]
+#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
pub struct SshConnectionOptions {
pub host: String,
pub username: Option<String>,
pub port: Option<u16>,
pub password: Option<String>,
pub args: Option<Vec<String>>,
+
+ pub nickname: Option<String>,
+ pub upload_binary_over_ssh: bool,
}
impl SshConnectionOptions {
@@ -140,8 +144,10 @@ impl SshConnectionOptions {
host: hostname.to_string(),
username: username.clone(),
port,
- password: None,
args: Some(args),
+ password: None,
+ nickname: None,
+ upload_binary_over_ssh: false,
})
}
@@ -235,6 +241,7 @@ pub trait SshClientDelegate: Send + Sync {
fn get_server_binary(
&self,
platform: SshPlatform,
+ upload_binary_over_ssh: bool,
cx: &mut AsyncAppContext,
) -> oneshot::Receiver<Result<(ServerBinary, SemanticVersion)>>;
fn set_status(&self, status: Option<&str>, cx: &mut AsyncAppContext);
@@ -290,7 +297,7 @@ const MAX_RECONNECT_ATTEMPTS: usize = 3;
enum State {
Connecting,
Connected {
- ssh_connection: Box<dyn SshRemoteProcess>,
+ ssh_connection: Arc<dyn RemoteConnection>,
delegate: Arc<dyn SshClientDelegate>,
multiplex_task: Task<Result<()>>,
@@ -299,7 +306,7 @@ enum State {
HeartbeatMissed {
missed_heartbeats: usize,
- ssh_connection: Box<dyn SshRemoteProcess>,
+ ssh_connection: Arc<dyn RemoteConnection>,
delegate: Arc<dyn SshClientDelegate>,
multiplex_task: Task<Result<()>>,
@@ -307,7 +314,7 @@ enum State {
},
Reconnecting,
ReconnectFailed {
- ssh_connection: Box<dyn SshRemoteProcess>,
+ ssh_connection: Arc<dyn RemoteConnection>,
delegate: Arc<dyn SshClientDelegate>,
error: anyhow::Error,
@@ -332,7 +339,7 @@ impl fmt::Display for State {
}
impl State {
- fn ssh_connection(&self) -> Option<&dyn SshRemoteProcess> {
+ fn ssh_connection(&self) -> Option<&dyn RemoteConnection> {
match self {
Self::Connected { ssh_connection, .. } => Some(ssh_connection.as_ref()),
Self::HeartbeatMissed { ssh_connection, .. } => Some(ssh_connection.as_ref()),
@@ -462,7 +469,7 @@ impl SshRemoteClient {
connection_options: SshConnectionOptions,
cancellation: oneshot::Receiver<()>,
delegate: Arc<dyn SshClientDelegate>,
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Task<Result<Option<Model<Self>>>> {
cx.spawn(|mut cx| async move {
let success = Box::pin(async move {
@@ -479,17 +486,28 @@ impl SshRemoteClient {
state: Arc::new(Mutex::new(Some(State::Connecting))),
})?;
- let (ssh_connection, io_task) = Self::establish_connection(
+ let ssh_connection = cx
+ .update(|cx| {
+ cx.update_default_global(|pool: &mut ConnectionPool, cx| {
+ pool.connect(connection_options, &delegate, cx)
+ })
+ })?
+ .await
+ .map_err(|e| e.cloned())?;
+ let remote_binary_path = ssh_connection
+ .get_remote_binary_path(&delegate, false, &mut cx)
+ .await?;
+
+ let io_task = ssh_connection.start_proxy(
+ remote_binary_path,
unique_identifier,
false,
- connection_options,
incoming_tx,
outgoing_rx,
connection_activity_tx,
delegate.clone(),
&mut cx,
- )
- .await?;
+ );
let multiplex_task = Self::monitor(this.downgrade(), io_task, &cx);
@@ -578,7 +596,7 @@ impl SshRemoteClient {
}
let state = lock.take().unwrap();
- let (attempts, mut ssh_connection, delegate) = match state {
+ let (attempts, ssh_connection, delegate) = match state {
State::Connected {
ssh_connection,
delegate,
@@ -624,7 +642,7 @@ impl SshRemoteClient {
log::info!("Trying to reconnect to ssh server... Attempt {}", attempts);
- let identifier = self.unique_identifier.clone();
+ let unique_identifier = self.unique_identifier.clone();
let client = self.client.clone();
let reconnect_task = cx.spawn(|this, mut cx| async move {
macro_rules! failed {
@@ -652,19 +670,33 @@ impl SshRemoteClient {
let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1);
- let (ssh_connection, io_task) = match Self::establish_connection(
- identifier,
- true,
- connection_options,
- incoming_tx,
- outgoing_rx,
- connection_activity_tx,
- delegate.clone(),
- &mut cx,
- )
+ let (ssh_connection, io_task) = match async {
+ let ssh_connection = cx
+ .update_global(|pool: &mut ConnectionPool, cx| {
+ pool.connect(connection_options, &delegate, cx)
+ })?
+ .await
+ .map_err(|error| error.cloned())?;
+
+ let remote_binary_path = ssh_connection
+ .get_remote_binary_path(&delegate, true, &mut cx)
+ .await?;
+
+ let io_task = ssh_connection.start_proxy(
+ remote_binary_path,
+ unique_identifier,
+ true,
+ incoming_tx,
+ outgoing_rx,
+ connection_activity_tx,
+ delegate.clone(),
+ &mut cx,
+ );
+ anyhow::Ok((ssh_connection, io_task))
+ }
.await
{
- Ok((ssh_connection, ssh_process)) => (ssh_connection, ssh_process),
+ Ok((ssh_connection, io_task)) => (ssh_connection, io_task),
Err(error) => {
failed!(error, attempts, ssh_connection, delegate);
}
@@ -834,108 +866,6 @@ impl SshRemoteClient {
}
}
- fn multiplex(
- mut ssh_proxy_process: Child,
- incoming_tx: UnboundedSender<Envelope>,
- mut outgoing_rx: UnboundedReceiver<Envelope>,
- mut connection_activity_tx: Sender<()>,
- cx: &AsyncAppContext,
- ) -> Task<Result<i32>> {
- let mut child_stderr = ssh_proxy_process.stderr.take().unwrap();
- let mut child_stdout = ssh_proxy_process.stdout.take().unwrap();
- let mut child_stdin = ssh_proxy_process.stdin.take().unwrap();
-
- let mut stdin_buffer = Vec::new();
- let mut stdout_buffer = Vec::new();
- let mut stderr_buffer = Vec::new();
- let mut stderr_offset = 0;
-
- let stdin_task = cx.background_executor().spawn(async move {
- while let Some(outgoing) = outgoing_rx.next().await {
- write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?;
- }
- anyhow::Ok(())
- });
-
- let stdout_task = cx.background_executor().spawn({
- let mut connection_activity_tx = connection_activity_tx.clone();
- async move {
- loop {
- stdout_buffer.resize(MESSAGE_LEN_SIZE, 0);
- let len = child_stdout.read(&mut stdout_buffer).await?;
-
- if len == 0 {
- return anyhow::Ok(());
- }
-
- if len < MESSAGE_LEN_SIZE {
- child_stdout.read_exact(&mut stdout_buffer[len..]).await?;
- }
-
- let message_len = message_len_from_buffer(&stdout_buffer);
- let envelope =
- read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len)
- .await?;
- connection_activity_tx.try_send(()).ok();
- incoming_tx.unbounded_send(envelope).ok();
- }
- }
- });
-
- let stderr_task: Task<anyhow::Result<()>> = cx.background_executor().spawn(async move {
- loop {
- stderr_buffer.resize(stderr_offset + 1024, 0);
-
- let len = child_stderr
- .read(&mut stderr_buffer[stderr_offset..])
- .await?;
- if len == 0 {
- return anyhow::Ok(());
- }
-
- stderr_offset += len;
- let mut start_ix = 0;
- while let Some(ix) = stderr_buffer[start_ix..stderr_offset]
- .iter()
- .position(|b| b == &b'\n')
- {
- let line_ix = start_ix + ix;
- let content = &stderr_buffer[start_ix..line_ix];
- start_ix = line_ix + 1;
- if let Ok(record) = serde_json::from_slice::<LogRecord>(content) {
- record.log(log::logger())
- } else {
- eprintln!("(remote) {}", String::from_utf8_lossy(content));
- }
- }
- stderr_buffer.drain(0..start_ix);
- stderr_offset -= start_ix;
-
- connection_activity_tx.try_send(()).ok();
- }
- });
-
- cx.spawn(|_| async move {
- let result = futures::select! {
- result = stdin_task.fuse() => {
- result.context("stdin")
- }
- result = stdout_task.fuse() => {
- result.context("stdout")
- }
- result = stderr_task.fuse() => {
- result.context("stderr")
- }
- };
-
- let status = ssh_proxy_process.status().await?.code().unwrap_or(1);
- match result {
- Ok(_) => Ok(status),
- Err(error) => Err(error),
- }
- })
- }
-
fn monitor(
this: WeakModel<Self>,
io_task: Task<Result<i32>>,
@@ -1005,75 +935,6 @@ impl SshRemoteClient {
cx.notify();
}
- #[allow(clippy::too_many_arguments)]
- async fn establish_connection(
- unique_identifier: String,
- reconnect: bool,
- connection_options: SshConnectionOptions,
- incoming_tx: UnboundedSender<Envelope>,
- outgoing_rx: UnboundedReceiver<Envelope>,
- connection_activity_tx: Sender<()>,
- delegate: Arc<dyn SshClientDelegate>,
- cx: &mut AsyncAppContext,
- ) -> Result<(Box<dyn SshRemoteProcess>, Task<Result<i32>>)> {
- #[cfg(any(test, feature = "test-support"))]
- if let Some(fake) = fake::SshRemoteConnection::new(&connection_options) {
- let io_task = fake::SshRemoteConnection::multiplex(
- fake.connection_options(),
- incoming_tx,
- outgoing_rx,
- connection_activity_tx,
- cx,
- )
- .await;
- return Ok((fake, io_task));
- }
-
- let ssh_connection =
- SshRemoteConnection::new(connection_options, delegate.clone(), cx).await?;
-
- let platform = ssh_connection.query_platform().await?;
- let remote_binary_path = delegate.remote_server_binary_path(platform, cx)?;
- if !reconnect {
- ssh_connection
- .ensure_server_binary(&delegate, &remote_binary_path, platform, cx)
- .await?;
- }
-
- let socket = ssh_connection.socket.clone();
- run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?;
-
- delegate.set_status(Some("Starting proxy"), cx);
-
- let mut start_proxy_command = format!(
- "RUST_LOG={} RUST_BACKTRACE={} {:?} proxy --identifier {}",
- std::env::var("RUST_LOG").unwrap_or_default(),
- std::env::var("RUST_BACKTRACE").unwrap_or_default(),
- remote_binary_path,
- unique_identifier,
- );
- if reconnect {
- start_proxy_command.push_str(" --reconnect");
- }
-
- let ssh_proxy_process = socket
- .ssh_command(start_proxy_command)
- // IMPORTANT: we kill this process when we drop the task that uses it.
- .kill_on_drop(true)
- .spawn()
- .context("failed to spawn remote server")?;
-
- let io_task = Self::multiplex(
- ssh_proxy_process,
- incoming_tx,
- outgoing_rx,
- connection_activity_tx,
- &cx,
- );
-
- Ok((Box::new(ssh_connection), io_task))
- }
-
pub fn subscribe_to_entity<E: 'static>(&self, remote_id: u64, entity: &Model<E>) {
self.client.subscribe_to_entity(remote_id, entity);
}
@@ -1112,15 +973,21 @@ impl SshRemoteClient {
#[cfg(any(test, feature = "test-support"))]
pub fn simulate_disconnect(&self, client_cx: &mut AppContext) -> Task<()> {
- let port = self.connection_options().port.unwrap();
+ let opts = self.connection_options();
client_cx.spawn(|cx| async move {
- let (channel, server_cx) = cx
- .update_global(|c: &mut fake::ServerConnections, _| c.get(port))
+ let connection = cx
+ .update_global(|c: &mut ConnectionPool, _| {
+ if let Some(ConnectionPoolEntry::Connecting(c)) = c.connections.get(&opts) {
+ c.clone()
+ } else {
+ panic!("missing test connection")
+ }
+ })
+ .unwrap()
+ .await
.unwrap();
- let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
- let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
- channel.reconnect(incoming_rx, outgoing_tx, &server_cx);
+ connection.simulate_disconnect(&cx);
})
}
@@ -1128,78 +995,190 @@ impl SshRemoteClient {
pub fn fake_server(
client_cx: &mut gpui::TestAppContext,
server_cx: &mut gpui::TestAppContext,
- ) -> (u16, Arc<ChannelClient>) {
- use gpui::BorrowAppContext;
+ ) -> (SshConnectionOptions, Arc<ChannelClient>) {
+ let port = client_cx
+ .update(|cx| cx.default_global::<ConnectionPool>().connections.len() as u16 + 1);
+ let opts = SshConnectionOptions {
+ host: "<fake>".to_string(),
+ port: Some(port),
+ ..Default::default()
+ };
let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
let server_client =
server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server"));
- let port = client_cx.update(|cx| {
- cx.update_default_global(|c: &mut fake::ServerConnections, _| {
- c.push(server_client.clone(), server_cx.to_async())
+ let connection: Arc<dyn RemoteConnection> = Arc::new(fake::FakeRemoteConnection {
+ connection_options: opts.clone(),
+ server_cx: fake::SendableCx::new(server_cx.to_async()),
+ server_channel: server_client.clone(),
+ });
+
+ client_cx.update(|cx| {
+ cx.update_default_global(|c: &mut ConnectionPool, cx| {
+ c.connections.insert(
+ opts.clone(),
+ ConnectionPoolEntry::Connecting(
+ cx.foreground_executor()
+ .spawn({
+ let connection = connection.clone();
+ async move { Ok(connection.clone()) }
+ })
+ .shared(),
+ ),
+ );
})
});
- (port, server_client)
+
+ (opts, server_client)
}
#[cfg(any(test, feature = "test-support"))]
- pub async fn fake_client(port: u16, client_cx: &mut gpui::TestAppContext) -> Model<Self> {
+ pub async fn fake_client(
+ opts: SshConnectionOptions,
+ client_cx: &mut gpui::TestAppContext,
+ ) -> Model<Self> {
let (_tx, rx) = oneshot::channel();
client_cx
- .update(|cx| {
- Self::new(
- "fake".to_string(),
- SshConnectionOptions {
- host: "<fake>".to_string(),
- port: Some(port),
- ..Default::default()
- },
- rx,
- Arc::new(fake::Delegate),
- cx,
- )
- })
+ .update(|cx| Self::new("fake".to_string(), opts, rx, Arc::new(fake::Delegate), cx))
.await
.unwrap()
.unwrap()
}
}
+enum ConnectionPoolEntry {
+ Connecting(Shared<Task<Result<Arc<dyn RemoteConnection>, Arc<anyhow::Error>>>>),
+ Connected(Weak<dyn RemoteConnection>),
+}
+
+#[derive(Default)]
+struct ConnectionPool {
+ connections: HashMap<SshConnectionOptions, ConnectionPoolEntry>,
+}
+
+impl Global for ConnectionPool {}
+
+impl ConnectionPool {
+ pub fn connect(
+ &mut self,
+ opts: SshConnectionOptions,
+ delegate: &Arc<dyn SshClientDelegate>,
+ cx: &mut AppContext,
+ ) -> Shared<Task<Result<Arc<dyn RemoteConnection>, Arc<anyhow::Error>>>> {
+ let connection = self.connections.get(&opts);
+ match connection {
+ Some(ConnectionPoolEntry::Connecting(task)) => {
+ let delegate = delegate.clone();
+ cx.spawn(|mut cx| async move {
+ delegate.set_status(Some("Waiting for existing connection attempt"), &mut cx);
+ })
+ .detach();
+ return task.clone();
+ }
+ Some(ConnectionPoolEntry::Connected(ssh)) => {
+ if let Some(ssh) = ssh.upgrade() {
+ if !ssh.has_been_killed() {
+ return Task::ready(Ok(ssh)).shared();
+ }
+ }
+ self.connections.remove(&opts);
+ }
+ None => {}
+ }
+
+ let task = cx
+ .spawn({
+ let opts = opts.clone();
+ let delegate = delegate.clone();
+ |mut cx| async move {
+ let connection = SshRemoteConnection::new(opts.clone(), delegate, &mut cx)
+ .await
+ .map(|connection| Arc::new(connection) as Arc<dyn RemoteConnection>);
+
+ cx.update_global(|pool: &mut Self, _| {
+ debug_assert!(matches!(
+ pool.connections.get(&opts),
+ Some(ConnectionPoolEntry::Connecting(_))
+ ));
+ match connection {
+ Ok(connection) => {
+ pool.connections.insert(
+ opts.clone(),
+ ConnectionPoolEntry::Connected(Arc::downgrade(&connection)),
+ );
+ Ok(connection)
+ }
+ Err(error) => {
+ pool.connections.remove(&opts);
+ Err(Arc::new(error))
+ }
+ }
+ })?
+ }
+ })
+ .shared();
+
+ self.connections
+ .insert(opts.clone(), ConnectionPoolEntry::Connecting(task.clone()));
+ task
+ }
+}
+
impl From<SshRemoteClient> for AnyProtoClient {
fn from(client: SshRemoteClient) -> Self {
AnyProtoClient::new(client.client.clone())
}
}
-#[async_trait]
-trait SshRemoteProcess: Send + Sync {
- async fn kill(&mut self) -> Result<()>;
+#[async_trait(?Send)]
+trait RemoteConnection: Send + Sync {
+ #[allow(clippy::too_many_arguments)]
+ fn start_proxy(
+ &self,
+ remote_binary_path: PathBuf,
+ unique_identifier: String,
+ reconnect: bool,
+ incoming_tx: UnboundedSender<Envelope>,
+ outgoing_rx: UnboundedReceiver<Envelope>,
+ connection_activity_tx: Sender<()>,
+ delegate: Arc<dyn SshClientDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Task<Result<i32>>;
+ async fn get_remote_binary_path(
+ &self,
+ delegate: &Arc<dyn SshClientDelegate>,
+ reconnect: bool,
+ cx: &mut AsyncAppContext,
+ ) -> Result<PathBuf>;
+ async fn kill(&self) -> Result<()>;
+ fn has_been_killed(&self) -> bool;
fn ssh_args(&self) -> Vec<String>;
fn connection_options(&self) -> SshConnectionOptions;
+
+ #[cfg(any(test, feature = "test-support"))]
+ fn simulate_disconnect(&self, _: &AsyncAppContext) {}
}
struct SshRemoteConnection {
socket: SshSocket,
- master_process: process::Child,
+ master_process: Mutex<Option<process::Child>>,
+ platform: SshPlatform,
_temp_dir: TempDir,
}
-impl Drop for SshRemoteConnection {
- fn drop(&mut self) {
- if let Err(error) = self.master_process.kill() {
- log::error!("failed to kill SSH master process: {}", error);
- }
+#[async_trait(?Send)]
+impl RemoteConnection for SshRemoteConnection {
+ async fn kill(&self) -> Result<()> {
+ let Some(mut process) = self.master_process.lock().take() else {
+ return Ok(());
+ };
+ process.kill().ok();
+ process.status().await?;
+ Ok(())
}
-}
-
-#[async_trait]
-impl SshRemoteProcess for SshRemoteConnection {
- async fn kill(&mut self) -> Result<()> {
- self.master_process.kill()?;
-
- self.master_process.status().await?;
- Ok(())
+ fn has_been_killed(&self) -> bool {
+ self.master_process.lock().is_none()
}
fn ssh_args(&self) -> Vec<String> {
@@ -1209,28 +1188,94 @@ impl SshRemoteProcess for SshRemoteConnection {
fn connection_options(&self) -> SshConnectionOptions {
self.socket.connection_options.clone()
}
-}
-
-impl SshRemoteConnection {
- #[cfg(not(unix))]
- async fn new(
- _connection_options: SshConnectionOptions,
- _delegate: Arc<dyn SshClientDelegate>,
- _cx: &mut AsyncAppContext,
- ) -> Result<Self> {
- Err(anyhow!("ssh is not supported on this platform"))
- }
- #[cfg(unix)]
- async fn new(
- connection_options: SshConnectionOptions,
- delegate: Arc<dyn SshClientDelegate>,
+ async fn get_remote_binary_path(
+ &self,
+ delegate: &Arc<dyn SshClientDelegate>,
+ reconnect: bool,
cx: &mut AsyncAppContext,
- ) -> Result<Self> {
- use futures::AsyncWriteExt as _;
- use futures::{io::BufReader, AsyncBufReadExt as _};
- use smol::{fs::unix::PermissionsExt as _, net::unix::UnixListener};
- use util::ResultExt as _;
+ ) -> Result<PathBuf> {
+ let platform = self.platform;
+ let remote_binary_path = delegate.remote_server_binary_path(platform, cx)?;
+ if !reconnect {
+ self.ensure_server_binary(&delegate, &remote_binary_path, platform, cx)
+ .await?;
+ }
+
+ let socket = self.socket.clone();
+ run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?;
+ Ok(remote_binary_path)
+ }
+
+ fn start_proxy(
+ &self,
+ remote_binary_path: PathBuf,
+ unique_identifier: String,
+ reconnect: bool,
+ incoming_tx: UnboundedSender<Envelope>,
+ outgoing_rx: UnboundedReceiver<Envelope>,
+ connection_activity_tx: Sender<()>,
+ delegate: Arc<dyn SshClientDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Task<Result<i32>> {
+ delegate.set_status(Some("Starting proxy"), cx);
+
+ let mut start_proxy_command = format!(
+ "RUST_LOG={} {} {:?} proxy --identifier {}",
+ std::env::var("RUST_LOG").unwrap_or_default(),
+ std::env::var("RUST_BACKTRACE")
+ .map(|b| { format!("RUST_BACKTRACE={}", b) })
+ .unwrap_or_default(),
+ remote_binary_path,
+ unique_identifier,
+ );
+ if reconnect {
+ start_proxy_command.push_str(" --reconnect");
+ }
+
+ let ssh_proxy_process = match self
+ .socket
+ .ssh_command(start_proxy_command)
+ // IMPORTANT: we kill this process when we drop the task that uses it.
+ .kill_on_drop(true)
+ .spawn()
+ {
+ Ok(process) => process,
+ Err(error) => {
+ return Task::ready(Err(anyhow!("failed to spawn remote server: {}", error)))
+ }
+ };
+
+ Self::multiplex(
+ ssh_proxy_process,
+ incoming_tx,
+ outgoing_rx,
+ connection_activity_tx,
+ &cx,
+ )
+ }
+}
+
+impl SshRemoteConnection {
+ #[cfg(not(unix))]
+ async fn new(
+ _connection_options: SshConnectionOptions,
+ _delegate: Arc<dyn SshClientDelegate>,
+ _cx: &mut AsyncAppContext,
+ ) -> Result<Self> {
+ Err(anyhow!("ssh is not supported on this platform"))
+ }
+
+ #[cfg(unix)]
+ async fn new(
+ connection_options: SshConnectionOptions,
+ delegate: Arc<dyn SshClientDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Result<Self> {
+ use futures::AsyncWriteExt as _;
+ use futures::{io::BufReader, AsyncBufReadExt as _};
+ use smol::{fs::unix::PermissionsExt as _, net::unix::UnixListener};
+ use util::ResultExt as _;
delegate.set_status(Some("Connecting"), cx);
@@ -1305,6 +1350,7 @@ impl SshRemoteConnection {
])
.arg(format!("ControlPath={}", socket_path.display()))
.arg(&url)
+ .kill_on_drop(true)
.spawn()?;
// Wait for this ssh process to close its stdout, indicating that authentication
@@ -1348,16 +1394,139 @@ impl SshRemoteConnection {
Err(anyhow!(error_message))?;
}
+ let socket = SshSocket {
+ connection_options,
+ socket_path,
+ };
+
+ let os = run_cmd(socket.ssh_command("uname").arg("-s")).await?;
+ let arch = run_cmd(socket.ssh_command("uname").arg("-m")).await?;
+
+ let os = match os.trim() {
+ "Darwin" => "macos",
+ "Linux" => "linux",
+ _ => Err(anyhow!("unknown uname os {os:?}"))?,
+ };
+ let arch = if arch.starts_with("arm") || arch.starts_with("aarch64") {
+ "aarch64"
+ } else if arch.starts_with("x86") || arch.starts_with("i686") {
+ "x86_64"
+ } else {
+ Err(anyhow!("unknown uname architecture {arch:?}"))?
+ };
+
+ let platform = SshPlatform { os, arch };
+
Ok(Self {
- socket: SshSocket {
- connection_options,
- socket_path,
- },
- master_process,
+ socket,
+ master_process: Mutex::new(Some(master_process)),
+ platform,
_temp_dir: temp_dir,
})
}
+ fn multiplex(
+ mut ssh_proxy_process: Child,
+ incoming_tx: UnboundedSender<Envelope>,
+ mut outgoing_rx: UnboundedReceiver<Envelope>,
+ mut connection_activity_tx: Sender<()>,
+ cx: &AsyncAppContext,
+ ) -> Task<Result<i32>> {
+ let mut child_stderr = ssh_proxy_process.stderr.take().unwrap();
+ let mut child_stdout = ssh_proxy_process.stdout.take().unwrap();
+ let mut child_stdin = ssh_proxy_process.stdin.take().unwrap();
+
+ let mut stdin_buffer = Vec::new();
+ let mut stdout_buffer = Vec::new();
+ let mut stderr_buffer = Vec::new();
+ let mut stderr_offset = 0;
+
+ let stdin_task = cx.background_executor().spawn(async move {
+ while let Some(outgoing) = outgoing_rx.next().await {
+ write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?;
+ }
+ anyhow::Ok(())
+ });
+
+ let stdout_task = cx.background_executor().spawn({
+ let mut connection_activity_tx = connection_activity_tx.clone();
+ async move {
+ loop {
+ stdout_buffer.resize(MESSAGE_LEN_SIZE, 0);
+ let len = child_stdout.read(&mut stdout_buffer).await?;
+
+ if len == 0 {
+ return anyhow::Ok(());
+ }
+
+ if len < MESSAGE_LEN_SIZE {
+ child_stdout.read_exact(&mut stdout_buffer[len..]).await?;
+ }
+
+ let message_len = message_len_from_buffer(&stdout_buffer);
+ let envelope =
+ read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len)
+ .await?;
+ connection_activity_tx.try_send(()).ok();
+ incoming_tx.unbounded_send(envelope).ok();
+ }
+ }
+ });
+
+ let stderr_task: Task<anyhow::Result<()>> = cx.background_executor().spawn(async move {
+ loop {
+ stderr_buffer.resize(stderr_offset + 1024, 0);
+
+ let len = child_stderr
+ .read(&mut stderr_buffer[stderr_offset..])
+ .await?;
+ if len == 0 {
+ return anyhow::Ok(());
+ }
+
+ stderr_offset += len;
+ let mut start_ix = 0;
+ while let Some(ix) = stderr_buffer[start_ix..stderr_offset]
+ .iter()
+ .position(|b| b == &b'\n')
+ {
+ let line_ix = start_ix + ix;
+ let content = &stderr_buffer[start_ix..line_ix];
+ start_ix = line_ix + 1;
+ if let Ok(record) = serde_json::from_slice::<LogRecord>(content) {
+ record.log(log::logger())
+ } else {
+ eprintln!("(remote) {}", String::from_utf8_lossy(content));
+ }
+ }
+ stderr_buffer.drain(0..start_ix);
+ stderr_offset -= start_ix;
+
+ connection_activity_tx.try_send(()).ok();
+ }
+ });
+
+ cx.spawn(|_| async move {
+ let result = futures::select! {
+ result = stdin_task.fuse() => {
+ result.context("stdin")
+ }
+ result = stdout_task.fuse() => {
+ result.context("stdout")
+ }
+ result = stderr_task.fuse() => {
+ result.context("stderr")
+ }
+ };
+
+ let status = ssh_proxy_process.status().await?.code().unwrap_or(1);
+ match result {
+ Ok(_) => Ok(status),
+ Err(error) => Err(error),
+ }
+ })
+ }
+
async fn ensure_server_binary(
&self,
delegate: &Arc<dyn SshClientDelegate>,
@@ -1366,11 +1535,14 @@ impl SshRemoteConnection {
cx: &mut AsyncAppContext,
) -> Result<()> {
let lock_file = dst_path.with_extension("lock");
- let timestamp = SystemTime::now()
- .duration_since(UNIX_EPOCH)
- .unwrap()
- .as_secs();
- let lock_content = timestamp.to_string();
+ let lock_content = {
+ let timestamp = SystemTime::now()
+ .duration_since(UNIX_EPOCH)
+ .context("failed to get timestamp")?
+ .as_secs();
+ let source_port = self.get_ssh_source_port().await?;
+ format!("{} {}", source_port, timestamp)
+ };
let lock_stale_age = Duration::from_secs(10 * 60);
let max_wait_time = Duration::from_secs(10 * 60);
@@ -1380,6 +1552,7 @@ impl SshRemoteConnection {
loop {
let lock_acquired = self.create_lock_file(&lock_file, &lock_content).await?;
if lock_acquired {
+ delegate.set_status(Some("Acquired lock file on host"), cx);
let result = self
.update_server_binary_if_needed(delegate, dst_path, platform, cx)
.await;
@@ -1390,6 +1563,10 @@ impl SshRemoteConnection {
} else {
if let Ok(is_stale) = self.is_lock_stale(&lock_file, &lock_stale_age).await {
if is_stale {
+ delegate.set_status(
+ Some("Detected lock file on host being stale. Removing"),
+ cx,
+ );
self.remove_lock_file(&lock_file).await?;
continue;
} else {
@@ -1420,18 +1597,29 @@ impl SshRemoteConnection {
}
}
+ async fn get_ssh_source_port(&self) -> Result<String> {
+ let output = run_cmd(
+ self.socket
+ .ssh_command("sh")
+ .arg("-c")
+ .arg(r#""echo $SSH_CLIENT | cut -d' ' -f2""#),
+ )
+ .await
+ .context("failed to get source port from SSH_CLIENT on host")?;
+
+ Ok(output.trim().to_string())
+ }
+
async fn create_lock_file(&self, lock_file: &Path, content: &str) -> Result<bool> {
let parent_dir = lock_file
.parent()
.ok_or_else(|| anyhow!("Lock file path has no parent directory"))?;
- // Be mindful of the escaping here: we need to make sure that we have quotes
- // inside the string, so that `sh -c` gets a quoted string passed to it.
let script = format!(
- "\"mkdir -p '{0}' && [ ! -f '{1}' ] && echo '{2}' > '{1}' && echo 'created' || echo 'exists'\"",
- parent_dir.display(),
- lock_file.display(),
- content
+ r#"'mkdir -p "{parent_dir}" && [ ! -f "{lock_file}" ] && echo "{content}" > "{lock_file}" && echo "created" || echo "exists"'"#,
+ parent_dir = parent_dir.display(),
+ lock_file = lock_file.display(),
+ content = content,
);
let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(&script))
@@ -1441,24 +1629,56 @@ impl SshRemoteConnection {
Ok(output.trim() == "created")
}
- async fn is_lock_stale(&self, lock_file: &Path, max_age: &Duration) -> Result<bool> {
- let threshold = max_age.as_secs();
+ fn generate_stale_check_script(lock_file: &Path, max_age: u64) -> String {
+ format!(
+ r#"
+ if [ ! -f "{lock_file}" ]; then
+ echo "lock file does not exist"
+ exit 0
+ fi
+
+ read -r port timestamp < "{lock_file}"
+
+ # Check if port is still active
+ if command -v ss >/dev/null 2>&1; then
+ if ! ss -n | grep -q ":$port[[:space:]]"; then
+ echo "ss reports port $port is not open"
+ exit 0
+ fi
+ elif command -v netstat >/dev/null 2>&1; then
+ if ! netstat -n | grep -q ":$port[[:space:]]"; then
+ echo "netstat reports port $port is not open"
+ exit 0
+ fi
+ fi
+
+ # Check timestamp
+ if [ $(( $(date +%s) - timestamp )) -gt {max_age} ]; then
+ echo "timestamp in lockfile is too old"
+ else
+ echo "recent"
+ fi"#,
+ lock_file = lock_file.display(),
+ max_age = max_age
+ )
+ }
- // Be mindful of the escaping here: we need to make sure that we have quotes
- // inside the string, so that `sh -c` gets a quoted string passed to it.
+ async fn is_lock_stale(&self, lock_file: &Path, max_age: &Duration) -> Result<bool> {
let script = format!(
- "\"[ -f '{0}' ] && [ $(( $(date +%s) - $(date -r '{0}' +%s) )) -gt {1} ] && echo 'stale' || echo 'recent'\"",
- lock_file.display(),
- threshold
+ "'{}'",
+ Self::generate_stale_check_script(lock_file, max_age.as_secs())
);
- let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(script))
+ let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(&script))
.await
.with_context(|| {
format!("failed to check whether lock file {:?} is stale", lock_file)
})?;
- Ok(output.trim() == "stale")
+ let trimmed = output.trim();
+ let is_stale = trimmed != "recent";
+ log::info!("checked lockfile for staleness. stale: {is_stale}, output: {trimmed:?}");
+ Ok(is_stale)
}
async fn remove_lock_file(&self, lock_file: &Path) -> Result<()> {
@@ -1,6 +1,6 @@
use anyhow::{anyhow, Result};
use fs::Fs;
-use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext, PromptLevel};
+use gpui::{AppContext, AsyncAppContext, Context as _, Model, ModelContext, PromptLevel};
use http_client::HttpClient;
use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
use node_runtime::NodeRuntime;
@@ -10,7 +10,7 @@ use project::{
search::SearchQuery,
task_store::TaskStore,
worktree_store::WorktreeStore,
- LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId,
+ LspStore, LspStoreEvent, PrettierStore, ProjectPath, ToolchainStore, WorktreeId,
};
use remote::ssh_session::ChannelClient;
use rpc::{
@@ -108,11 +108,14 @@ impl HeadlessProject {
observer.shared(SSH_PROJECT_ID, session.clone().into(), cx);
observer
});
+ let toolchain_store =
+ cx.new_model(|cx| ToolchainStore::local(languages.clone(), worktree_store.clone(), cx));
let lsp_store = cx.new_model(|cx| {
let mut lsp_store = LspStore::new_local(
buffer_store.clone(),
worktree_store.clone(),
prettier_store.clone(),
+ toolchain_store.clone(),
environment,
languages.clone(),
http_client,
@@ -143,10 +146,11 @@ impl HeadlessProject {
session.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle());
session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store);
session.subscribe_to_entity(SSH_PROJECT_ID, &task_store);
+ session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store);
session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory);
- client.add_request_handler(cx.weak_model(), Self::handle_check_file_exists);
+ client.add_request_handler(cx.weak_model(), Self::handle_get_path_metadata);
client.add_request_handler(cx.weak_model(), Self::handle_shutdown_remote_server);
client.add_request_handler(cx.weak_model(), Self::handle_ping);
@@ -166,6 +170,7 @@ impl HeadlessProject {
SettingsObserver::init(&client);
LspStore::init(&client);
TaskStore::init(Some(&client));
+ ToolchainStore::init(&client);
HeadlessProject {
session: client,
@@ -520,18 +525,20 @@ impl HeadlessProject {
Ok(proto::ListRemoteDirectoryResponse { entries })
}
- pub async fn handle_check_file_exists(
+ pub async fn handle_get_path_metadata(
this: Model<Self>,
- envelope: TypedEnvelope<proto::CheckFileExists>,
+ envelope: TypedEnvelope<proto::GetPathMetadata>,
cx: AsyncAppContext,
- ) -> Result<proto::CheckFileExistsResponse> {
+ ) -> Result<proto::GetPathMetadataResponse> {
let fs = cx.read_model(&this, |this, _| this.fs.clone())?;
let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
- let exists = fs.is_file(&PathBuf::from(expanded.clone())).await;
+ let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?;
+ let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
- Ok(proto::CheckFileExistsResponse {
- exists,
+ Ok(proto::GetPathMetadataResponse {
+ exists: metadata.is_some(),
+ is_dir,
path: expanded,
})
}
@@ -26,7 +26,29 @@ use std::{
#[gpui::test]
async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, _headless, fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ "project2": {
+ "README.md": "# project 2",
+ },
+ }),
+ )
+ .await;
+ fs.set_index_for_repo(
+ Path::new("/code/project1/.git"),
+ &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())],
+ );
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree("/code/project1", true, cx)
@@ -128,7 +150,22 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
#[gpui::test]
async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, headless, _) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, headless) = init_test(&fs, cx, server_cx).await;
project
.update(cx, |project, cx| {
@@ -193,7 +230,22 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes
#[gpui::test]
async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, headless, fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, headless) = init_test(&fs, cx, server_cx).await;
cx.update_global(|settings_store: &mut SettingsStore, cx| {
settings_store.set_user_settings(
@@ -304,7 +356,22 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo
#[gpui::test]
async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, headless, fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, headless) = init_test(&fs, cx, server_cx).await;
fs.insert_tree(
"/code/project1/.zed",
@@ -463,7 +530,22 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
#[gpui::test]
async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, _headless, fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree("/code/project1", true, cx)
@@ -522,8 +604,26 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
}
#[gpui::test]
-async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, _headless, _fs) = init_test(cx, server_cx).await;
+async fn test_remote_resolve_path_in_buffer(
+ cx: &mut TestAppContext,
+ server_cx: &mut TestAppContext,
+) {
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree("/code/project1", true, cx)
@@ -542,10 +642,11 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut
let path = project
.update(cx, |project, cx| {
- project.resolve_existing_file_path("/code/project1/README.md", &buffer, cx)
+ project.resolve_path_in_buffer("/code/project1/README.md", &buffer, cx)
})
.await
.unwrap();
+ assert!(path.is_file());
assert_eq!(
path.abs_path().unwrap().to_string_lossy(),
"/code/project1/README.md"
@@ -553,20 +654,100 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut
let path = project
.update(cx, |project, cx| {
- project.resolve_existing_file_path("../README.md", &buffer, cx)
+ project.resolve_path_in_buffer("../README.md", &buffer, cx)
})
.await
.unwrap();
-
+ assert!(path.is_file());
assert_eq!(
path.project_path().unwrap().clone(),
ProjectPath::from((worktree_id, "README.md"))
);
+
+ let path = project
+ .update(cx, |project, cx| {
+ project.resolve_path_in_buffer("../src", &buffer, cx)
+ })
+ .await
+ .unwrap();
+ assert_eq!(
+ path.project_path().unwrap().clone(),
+ ProjectPath::from((worktree_id, "src"))
+ );
+ assert!(path.is_dir());
+}
+
+#[gpui::test]
+async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
+
+ let path = project
+ .update(cx, |project, cx| {
+ project.resolve_abs_path("/code/project1/README.md", cx)
+ })
+ .await
+ .unwrap();
+
+ assert!(path.is_file());
+ assert_eq!(
+ path.abs_path().unwrap().to_string_lossy(),
+ "/code/project1/README.md"
+ );
+
+ let path = project
+ .update(cx, |project, cx| {
+ project.resolve_abs_path("/code/project1/src", cx)
+ })
+ .await
+ .unwrap();
+
+ assert!(path.is_dir());
+ assert_eq!(
+ path.abs_path().unwrap().to_string_lossy(),
+ "/code/project1/src"
+ );
+
+ let path = project
+ .update(cx, |project, cx| {
+ project.resolve_abs_path("/code/project1/DOESNOTEXIST", cx)
+ })
+ .await;
+ assert!(path.is_none());
}
#[gpui::test(iterations = 10)]
async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, _headless, _fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree("/code/project1", true, cx)
@@ -597,7 +778,25 @@ async fn test_adding_then_removing_then_adding_worktrees(
cx: &mut TestAppContext,
server_cx: &mut TestAppContext,
) {
- let (project, _headless, _fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ "project2": {
+ "README.md": "# project 2",
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (_worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree("/code/project1", true, cx)
@@ -636,9 +835,25 @@ async fn test_adding_then_removing_then_adding_worktrees(
#[gpui::test]
async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, _headless, _fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
let buffer = project.update(cx, |project, cx| project.open_server_settings(cx));
cx.executor().run_until_parked();
+
let buffer = buffer.await.unwrap();
cx.update(|cx| {
@@ -651,7 +866,22 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test
#[gpui::test(iterations = 20)]
async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
- let (project, _headless, fs) = init_test(cx, server_cx).await;
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "project1": {
+ ".git": {},
+ "README.md": "# project 1",
+ "src": {
+ "lib.rs": "fn one() -> usize { 1 }"
+ }
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
let (worktree, _) = project
.update(cx, |project, cx| {
@@ -690,19 +920,8 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext)
);
}
-fn init_logger() {
- if std::env::var("RUST_LOG").is_ok() {
- env_logger::try_init().ok();
- }
-}
-
-async fn init_test(
- cx: &mut TestAppContext,
- server_cx: &mut TestAppContext,
-) -> (Model<Project>, Model<HeadlessProject>, Arc<FakeFs>) {
- init_logger();
-
- let (forwarder, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx);
+#[gpui::test]
+async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
fs.insert_tree(
"/code",
@@ -710,32 +929,109 @@ async fn init_test(
"project1": {
".git": {},
"README.md": "# project 1",
- "src": {
- "lib.rs": "fn one() -> usize { 1 }"
- }
- },
- "project2": {
- "README.md": "# project 2",
},
}),
)
.await;
- fs.set_index_for_repo(
- Path::new("/code/project1/.git"),
- &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())],
- );
- server_cx.update(HeadlessProject::init);
+ let (project, headless_project) = init_test(&fs, cx, server_cx).await;
+ let branches = ["main", "dev", "feature-1"];
+ fs.insert_branches(Path::new("/code/project1/.git"), &branches);
+
+ let (worktree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/code/project1", true, cx)
+ })
+ .await
+ .unwrap();
+
+ let worktree_id = cx.update(|cx| worktree.read(cx).id());
+ let root_path = ProjectPath::root_path(worktree_id);
+ // Give the worktree a bit of time to index the file system
+ cx.run_until_parked();
+
+ let remote_branches = project
+ .update(cx, |project, cx| project.branches(root_path.clone(), cx))
+ .await
+ .unwrap();
+
+ let new_branch = branches[2];
+
+ let remote_branches = remote_branches
+ .into_iter()
+ .map(|branch| branch.name)
+ .collect::<Vec<_>>();
+
+ assert_eq!(&remote_branches, &branches);
+
+ cx.update(|cx| {
+ project.update(cx, |project, cx| {
+ project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
+ })
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ let server_branch = server_cx.update(|cx| {
+ headless_project.update(cx, |headless_project, cx| {
+ headless_project
+ .worktree_store
+ .update(cx, |worktree_store, cx| {
+ worktree_store
+ .current_branch(root_path.clone(), cx)
+ .unwrap()
+ })
+ })
+ });
+
+ assert_eq!(server_branch.as_ref(), branches[2]);
+
+ // Also try creating a new branch
+ cx.update(|cx| {
+ project.update(cx, |project, cx| {
+ project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
+ })
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ let server_branch = server_cx.update(|cx| {
+ headless_project.update(cx, |headless_project, cx| {
+ headless_project
+ .worktree_store
+ .update(cx, |worktree_store, cx| {
+ worktree_store.current_branch(root_path, cx).unwrap()
+ })
+ })
+ });
+
+ assert_eq!(server_branch.as_ref(), "totally-new-branch");
+}
+
+pub async fn init_test(
+ server_fs: &Arc<FakeFs>,
+ cx: &mut TestAppContext,
+ server_cx: &mut TestAppContext,
+) -> (Model<Project>, Model<HeadlessProject>) {
+ let server_fs = server_fs.clone();
+ init_logger();
+
+ let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx);
let http_client = Arc::new(BlockedHttpClient);
let node_runtime = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(cx.executor()));
+ server_cx.update(HeadlessProject::init);
let headless = server_cx.new_model(|cx| {
client::init_settings(cx);
HeadlessProject::new(
crate::HeadlessAppState {
session: ssh_server_client,
- fs: fs.clone(),
+ fs: server_fs.clone(),
http_client,
node_runtime,
languages,
@@ -744,7 +1040,7 @@ async fn init_test(
)
});
- let ssh = SshRemoteClient::fake_client(forwarder, cx).await;
+ let ssh = SshRemoteClient::fake_client(opts, cx).await;
let project = build_project(ssh, cx);
project
.update(cx, {
@@ -752,13 +1048,21 @@ async fn init_test(
|_, cx| cx.on_release(|_, _| drop(headless))
})
.detach();
- (project, headless, fs)
+ (project, headless)
+}
+
+fn init_logger() {
+ if std::env::var("RUST_LOG").is_ok() {
+ env_logger::try_init().ok();
+ }
}
fn build_project(ssh: Model<SshRemoteClient>, cx: &mut TestAppContext) -> Model<Project> {
cx.update(|cx| {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
+ if !cx.has_global::<SettingsStore>() {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ }
});
let client = cx.update(|cx| {
@@ -773,6 +1077,7 @@ fn build_project(ssh: Model<SshRemoteClient>, cx: &mut TestAppContext) -> Model<
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
let languages = Arc::new(LanguageRegistry::test(cx.executor()));
let fs = FakeFs::new(cx.executor());
+
cx.update(|cx| {
Project::init(&client, cx);
language::init(cx);
@@ -8,7 +8,7 @@ use client::telemetry::Telemetry;
use collections::{HashMap, HashSet};
use editor::{
display_map::{
- BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, CustomBlockId,
+ BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId,
RenderBlock,
},
scroll::Autoscroll,
@@ -90,12 +90,11 @@ impl EditorBlock {
let invalidation_anchor = buffer.read(cx).read(cx).anchor_before(next_row_start);
let block = BlockProperties {
- position: code_range.end,
+ placement: BlockPlacement::Below(code_range.end),
// Take up at least one height for status, allow the editor to determine the real height based on the content from render
height: 1,
style: BlockStyle::Sticky,
render: Self::create_output_area_renderer(execution_view.clone(), on_close.clone()),
- disposition: BlockDisposition::Below,
priority: 0,
};
@@ -123,7 +123,6 @@ impl ProtoMessageHandlerSet {
let extract_entity_id = *this.entity_id_extractors.get(&payload_type_id)?;
let entity_type_id = *this.entity_types_by_message_type.get(&payload_type_id)?;
let entity_id = (extract_entity_id)(message.as_ref());
-
match this
.entities_by_type_and_remote_id
.get_mut(&(entity_type_id, entity_id))?
@@ -145,6 +144,26 @@ pub enum EntityMessageSubscriber {
Pending(Vec<Box<dyn AnyTypedEnvelope>>),
}
+impl std::fmt::Debug for EntityMessageSubscriber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ EntityMessageSubscriber::Entity { handle } => f
+ .debug_struct("EntityMessageSubscriber::Entity")
+ .field("handle", handle)
+ .finish(),
+ EntityMessageSubscriber::Pending(vec) => f
+ .debug_struct("EntityMessageSubscriber::Pending")
+ .field(
+ "envelopes",
+ &vec.iter()
+ .map(|envelope| envelope.payload_type_name())
+ .collect::<Vec<_>>(),
+ )
+ .finish(),
+ }
+ }
+}
+
impl<T> From<Arc<T>> for AnyProtoClient
where
T: ProtoClient + 'static,
@@ -61,6 +61,7 @@ pub trait Settings: 'static + Send + Sync {
anyhow::anyhow!("missing default")
}
+ #[track_caller]
fn register(cx: &mut AppContext)
where
Self: Sized,
@@ -271,6 +272,7 @@ impl SettingsStore {
pub fn register_setting<T: Settings>(&mut self, cx: &mut AppContext) {
let setting_type_id = TypeId::of::<T>();
let entry = self.setting_values.entry(setting_type_id);
+
if matches!(entry, hash_map::Entry::Occupied(_)) {
return;
}
@@ -0,0 +1,83 @@
+use std::fmt::format;
+
+use gpui::{
+ colors, div, prelude::*, uniform_list, DefaultColor, DefaultThemeAppearance, Hsla, Render,
+ View, ViewContext, WindowContext,
+};
+use story::Story;
+use strum::IntoEnumIterator;
+use ui::{
+ h_flex, px, v_flex, AbsoluteLength, ActiveTheme, Color, DefiniteLength, Label, LabelCommon,
+};
+
+const LENGTH: usize = 100;
+
+pub struct IndentGuidesStory {
+ depths: Vec<usize>,
+}
+
+impl IndentGuidesStory {
+ pub fn view(cx: &mut WindowContext) -> View<Self> {
+ let mut depths = Vec::new();
+ depths.push(0);
+ depths.push(1);
+ depths.push(2);
+ for _ in 0..LENGTH - 6 {
+ depths.push(3);
+ }
+ depths.push(2);
+ depths.push(1);
+ depths.push(0);
+
+ cx.new_view(|_cx| Self { depths })
+ }
+}
+
+impl Render for IndentGuidesStory {
+ fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
+ Story::container()
+ .child(Story::title("Indent guides"))
+ .child(
+ v_flex().size_full().child(
+ uniform_list(
+ cx.view().clone(),
+ "some-list",
+ self.depths.len(),
+ |this, range, cx| {
+ this.depths
+ .iter()
+ .enumerate()
+ .skip(range.start)
+ .take(range.end - range.start)
+ .map(|(i, depth)| {
+ div()
+ .pl(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(
+ 16. * (*depth as f32),
+ ))))
+ .child(Label::new(format!("Item {}", i)).color(Color::Info))
+ })
+ .collect()
+ },
+ )
+ .with_sizing_behavior(gpui::ListSizingBehavior::Infer)
+ .with_decoration(ui::indent_guides(
+ cx.view().clone(),
+ px(16.),
+ ui::IndentGuideColors {
+ default: Color::Info.color(cx),
+ hovered: Color::Accent.color(cx),
+ active: Color::Accent.color(cx),
+ },
+ |this, range, cx| {
+ this.depths
+ .iter()
+ .skip(range.start)
+ .take(range.end - range.start)
+ .cloned()
+ .collect()
+ },
+ )),
+ ),
+ )
+ }
+}
@@ -59,6 +59,9 @@ impl ThemeColors {
search_match_background: neutral().light().step_5(),
panel_background: neutral().light().step_2(),
panel_focused_border: blue().light().step_5(),
+ panel_indent_guide: neutral().light_alpha().step_5(),
+ panel_indent_guide_hover: neutral().light_alpha().step_6(),
+ panel_indent_guide_active: neutral().light_alpha().step_6(),
pane_focused_border: blue().light().step_5(),
pane_group_border: neutral().light().step_6(),
scrollbar_thumb_background: neutral().light_alpha().step_3(),
@@ -162,6 +165,9 @@ impl ThemeColors {
search_match_background: neutral().dark().step_5(),
panel_background: neutral().dark().step_2(),
panel_focused_border: blue().dark().step_5(),
+ panel_indent_guide: neutral().dark_alpha().step_4(),
+ panel_indent_guide_hover: neutral().dark_alpha().step_6(),
+ panel_indent_guide_active: neutral().dark_alpha().step_6(),
pane_focused_border: blue().dark().step_5(),
pane_group_border: neutral().dark().step_6(),
scrollbar_thumb_background: neutral().dark_alpha().step_3(),
@@ -136,6 +136,9 @@ pub(crate) fn zed_default_dark() -> Theme {
terminal_ansi_dim_white: crate::neutral().dark().step_10(),
panel_background: bg,
panel_focused_border: blue,
+ panel_indent_guide: hsla(228. / 360., 8. / 100., 25. / 100., 1.),
+ panel_indent_guide_hover: hsla(225. / 360., 13. / 100., 12. / 100., 1.),
+ panel_indent_guide_active: hsla(225. / 360., 13. / 100., 12. / 100., 1.),
pane_focused_border: blue,
pane_group_border: hsla(225. / 360., 13. / 100., 12. / 100., 1.),
scrollbar_thumb_background: gpui::transparent_black(),
@@ -322,6 +322,15 @@ pub struct ThemeColorsContent {
#[serde(rename = "panel.focused_border")]
pub panel_focused_border: Option<String>,
+ #[serde(rename = "panel.indent_guide")]
+ pub panel_indent_guide: Option<String>,
+
+ #[serde(rename = "panel.indent_guide_hover")]
+ pub panel_indent_guide_hover: Option<String>,
+
+ #[serde(rename = "panel.indent_guide_active")]
+ pub panel_indent_guide_active: Option<String>,
+
#[serde(rename = "pane.focused_border")]
pub pane_focused_border: Option<String>,
@@ -710,6 +719,18 @@ impl ThemeColorsContent {
.panel_focused_border
.as_ref()
.and_then(|color| try_parse_color(color).ok()),
+ panel_indent_guide: self
+ .panel_indent_guide
+ .as_ref()
+ .and_then(|color| try_parse_color(color).ok()),
+ panel_indent_guide_hover: self
+ .panel_indent_guide_hover
+ .as_ref()
+ .and_then(|color| try_parse_color(color).ok()),
+ panel_indent_guide_active: self
+ .panel_indent_guide_active
+ .as_ref()
+ .and_then(|color| try_parse_color(color).ok()),
pane_focused_border: self
.pane_focused_border
.as_ref()
@@ -123,6 +123,9 @@ pub struct ThemeColors {
pub search_match_background: Hsla,
pub panel_background: Hsla,
pub panel_focused_border: Hsla,
+ pub panel_indent_guide: Hsla,
+ pub panel_indent_guide_hover: Hsla,
+ pub panel_indent_guide_active: Hsla,
pub pane_focused_border: Hsla,
pub pane_group_border: Hsla,
/// The color of the scrollbar thumb.
@@ -23,7 +23,6 @@ test-support = [
"gpui/test-support",
"http_client/test-support",
"project/test-support",
- "settings/test-support",
"util/test-support",
"workspace/test-support",
]
@@ -33,7 +32,6 @@ auto_update.workspace = true
call.workspace = true
client.workspace = true
command_palette.workspace = true
-dev_server_projects.workspace = true
extensions_ui.workspace = true
feedback.workspace = true
feature_flags.workspace = true
@@ -44,7 +42,6 @@ recent_projects.workspace = true
remote.workspace = true
rpc.workspace = true
serde.workspace = true
-settings.workspace = true
smallvec.workspace = true
story = { workspace = true, optional = true }
theme.workspace = true
@@ -285,8 +285,7 @@ impl TitleBar {
let room = room.read(cx);
let project = self.project.read(cx);
let is_local = project.is_local() || project.is_via_ssh();
- let is_dev_server_project = project.dev_server_project_id().is_some();
- let is_shared = (is_local || is_dev_server_project) && project.is_shared();
+ let is_shared = is_local && project.is_shared();
let is_muted = room.is_muted();
let is_deafened = room.is_deafened().unwrap_or(false);
let is_screen_sharing = room.is_screen_sharing();
@@ -299,7 +298,7 @@ impl TitleBar {
let mut children = Vec::new();
- if (is_local || is_dev_server_project) && can_share_projects {
+ if is_local && can_share_projects {
children.push(
Button::new(
"toggle_sharing",
@@ -18,10 +18,8 @@ use gpui::{
StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView,
};
use project::{Project, RepositoryEntry};
-use recent_projects::{OpenRemote, RecentProjects, SshSettings};
-use remote::SshConnectionOptions;
-use rpc::proto::{self, DevServerStatus};
-use settings::Settings;
+use recent_projects::{OpenRemote, RecentProjects};
+use rpc::proto;
use smallvec::SmallVec;
use std::sync::Arc;
use theme::ActiveTheme;
@@ -29,7 +27,7 @@ use ui::{
h_flex, prelude::*, Avatar, Button, ButtonLike, ButtonStyle, ContextMenu, Icon, IconName,
IconSize, IconWithIndicator, Indicator, PopoverMenu, Tooltip,
};
-use util::{maybe, ResultExt};
+use util::ResultExt;
use vcs_menu::{BranchList, OpenRecent as ToggleVcsMenu};
use workspace::{notifications::NotifyResultExt, Workspace};
@@ -268,15 +266,11 @@ impl TitleBar {
let options = self.project.read(cx).ssh_connection_options(cx)?;
let host: SharedString = options.connection_string().into();
- let nickname = maybe!({
- SshSettings::get_global(cx)
- .ssh_connections
- .as_ref()?
- .into_iter()
- .find(|connection| SshConnectionOptions::from((*connection).clone()) == options)
- .and_then(|connection| connection.nickname.clone())
- })
- .unwrap_or_else(|| host.clone());
+ let nickname = options
+ .nickname
+ .clone()
+ .map(|nick| nick.into())
+ .unwrap_or_else(|| host.clone());
let (indicator_color, meta) = match self.project.read(cx).ssh_connection_state(cx)? {
remote::ConnectionState::Connecting => (Color::Info, format!("Connecting to: {host}")),
@@ -334,39 +328,6 @@ impl TitleBar {
}
pub fn render_project_host(&self, cx: &mut ViewContext<Self>) -> Option<AnyElement> {
- if let Some(dev_server) =
- self.project
- .read(cx)
- .dev_server_project_id()
- .and_then(|dev_server_project_id| {
- dev_server_projects::Store::global(cx)
- .read(cx)
- .dev_server_for_project(dev_server_project_id)
- })
- {
- return Some(
- ButtonLike::new("dev_server_trigger")
- .child(Indicator::dot().color(
- if dev_server.status == DevServerStatus::Online {
- Color::Created
- } else {
- Color::Disabled
- },
- ))
- .child(
- Label::new(dev_server.name.clone())
- .size(LabelSize::Small)
- .line_height_style(LineHeightStyle::UiLabel),
- )
- .tooltip(move |cx| Tooltip::text("Project is hosted on a dev server", cx))
- .on_click(cx.listener(|this, _, cx| {
- if let Some(workspace) = this.workspace.upgrade() {
- recent_projects::RemoteServerProjects::open(workspace, cx)
- }
- }))
- .into_any_element(),
- );
- }
if self.project.read(cx).is_via_ssh() {
return self.render_ssh_project_host(cx);
}
@@ -486,7 +447,7 @@ impl TitleBar {
})
.on_click(move |_, cx| {
let _ = workspace.update(cx, |this, cx| {
- BranchList::open(this, &Default::default(), cx)
+ BranchList::open(this, &Default::default(), cx);
});
}),
)
@@ -0,0 +1,24 @@
+[package]
+name = "toolchain_selector"
+version = "0.1.0"
+edition = "2021"
+publish = false
+license = "GPL-3.0-or-later"
+
+[dependencies]
+editor.workspace = true
+fuzzy.workspace = true
+gpui.workspace = true
+language.workspace = true
+picker.workspace = true
+project.workspace = true
+ui.workspace = true
+util.workspace = true
+workspace.workspace = true
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/toolchain_selector.rs"
+doctest = false
@@ -0,0 +1,173 @@
+use editor::Editor;
+use gpui::{
+ div, AsyncWindowContext, EventEmitter, IntoElement, ParentElement, Render, Subscription, Task,
+ View, ViewContext, WeakModel, WeakView,
+};
+use language::{Buffer, BufferEvent, LanguageName, Toolchain};
+use project::WorktreeId;
+use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip};
+use workspace::{item::ItemHandle, StatusItemView, Workspace};
+
+use crate::ToolchainSelector;
+
+pub struct ActiveToolchain {
+ active_toolchain: Option<Toolchain>,
+ workspace: WeakView<Workspace>,
+ active_buffer: Option<(WorktreeId, WeakModel<Buffer>, Subscription)>,
+ _observe_language_changes: Subscription,
+ _update_toolchain_task: Task<Option<()>>,
+}
+
+struct LanguageChanged;
+
+impl EventEmitter<LanguageChanged> for ActiveToolchain {}
+
+impl ActiveToolchain {
+ pub fn new(workspace: &Workspace, cx: &mut ViewContext<Self>) -> Self {
+ let view = cx.view().clone();
+ Self {
+ active_toolchain: None,
+ active_buffer: None,
+ workspace: workspace.weak_handle(),
+ _observe_language_changes: cx.subscribe(&view, |this, _, _: &LanguageChanged, cx| {
+ this._update_toolchain_task = Self::spawn_tracker_task(cx);
+ }),
+ _update_toolchain_task: Self::spawn_tracker_task(cx),
+ }
+ }
+ fn spawn_tracker_task(cx: &mut ViewContext<Self>) -> Task<Option<()>> {
+ cx.spawn(|this, mut cx| async move {
+ let active_file = this
+ .update(&mut cx, |this, _| {
+ this.active_buffer
+ .as_ref()
+ .map(|(_, buffer, _)| buffer.clone())
+ })
+ .ok()
+ .flatten()?;
+ let workspace = this
+ .update(&mut cx, |this, _| this.workspace.clone())
+ .ok()?;
+
+ let language_name = active_file
+ .update(&mut cx, |this, _| Some(this.language()?.name()))
+ .ok()
+ .flatten()?;
+
+ let worktree_id = active_file
+ .update(&mut cx, |this, cx| Some(this.file()?.worktree_id(cx)))
+ .ok()
+ .flatten()?;
+ let toolchain =
+ Self::active_toolchain(workspace, worktree_id, language_name, cx.clone()).await?;
+ let _ = this.update(&mut cx, |this, cx| {
+ this.active_toolchain = Some(toolchain);
+
+ cx.notify();
+ });
+ Some(())
+ })
+ }
+
+ fn update_lister(&mut self, editor: View<Editor>, cx: &mut ViewContext<Self>) {
+ let editor = editor.read(cx);
+ if let Some((_, buffer, _)) = editor.active_excerpt(cx) {
+ if let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) {
+ let subscription = cx.subscribe(&buffer, |_, _, event: &BufferEvent, cx| {
+ if let BufferEvent::LanguageChanged = event {
+ cx.emit(LanguageChanged)
+ }
+ });
+ self.active_buffer = Some((worktree_id, buffer.downgrade(), subscription));
+ cx.emit(LanguageChanged);
+ }
+ }
+
+ cx.notify();
+ }
+
+ fn active_toolchain(
+ workspace: WeakView<Workspace>,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ cx: AsyncWindowContext,
+ ) -> Task<Option<Toolchain>> {
+ cx.spawn(move |mut cx| async move {
+ let workspace_id = workspace
+ .update(&mut cx, |this, _| this.database_id())
+ .ok()
+ .flatten()?;
+ let selected_toolchain = workspace
+ .update(&mut cx, |this, cx| {
+ this.project()
+ .read(cx)
+ .active_toolchain(worktree_id, language_name.clone(), cx)
+ })
+ .ok()?
+ .await;
+ if let Some(toolchain) = selected_toolchain {
+ Some(toolchain)
+ } else {
+ let project = workspace
+ .update(&mut cx, |this, _| this.project().clone())
+ .ok()?;
+ let toolchains = cx
+ .update(|cx| {
+ project
+ .read(cx)
+ .available_toolchains(worktree_id, language_name, cx)
+ })
+ .ok()?
+ .await?;
+ if let Some(toolchain) = toolchains.toolchains.first() {
+ // Since we don't have a selected toolchain, pick one for user here.
+ workspace::WORKSPACE_DB
+ .set_toolchain(workspace_id, worktree_id, toolchain.clone())
+ .await
+ .ok()?;
+ project
+ .update(&mut cx, |this, cx| {
+ this.activate_toolchain(worktree_id, toolchain.clone(), cx)
+ })
+ .ok()?
+ .await;
+ }
+
+ toolchains.toolchains.first().cloned()
+ }
+ })
+ }
+}
+
+impl Render for ActiveToolchain {
+ fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
+ div().when_some(self.active_toolchain.as_ref(), |el, active_toolchain| {
+ el.child(
+ Button::new("change-toolchain", active_toolchain.name.clone())
+ .label_size(LabelSize::Small)
+ .on_click(cx.listener(|this, _, cx| {
+ if let Some(workspace) = this.workspace.upgrade() {
+ workspace.update(cx, |workspace, cx| {
+ ToolchainSelector::toggle(workspace, cx)
+ });
+ }
+ }))
+ .tooltip(|cx| Tooltip::text("Select Toolchain", cx)),
+ )
+ })
+ }
+}
+
+impl StatusItemView for ActiveToolchain {
+ fn set_active_pane_item(
+ &mut self,
+ active_pane_item: Option<&dyn ItemHandle>,
+ cx: &mut ViewContext<Self>,
+ ) {
+ if let Some(editor) = active_pane_item.and_then(|item| item.act_as::<Editor>(cx)) {
+ self.active_toolchain.take();
+ self.update_lister(editor, cx);
+ }
+ cx.notify();
+ }
+}
@@ -0,0 +1,343 @@
+mod active_toolchain;
+
+pub use active_toolchain::ActiveToolchain;
+use editor::Editor;
+use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
+use gpui::{
+ actions, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model,
+ ParentElement, Render, Styled, Task, View, ViewContext, VisualContext, WeakView,
+};
+use language::{LanguageName, Toolchain, ToolchainList};
+use picker::{Picker, PickerDelegate};
+use project::{Project, WorktreeId};
+use std::{path::Path, sync::Arc};
+use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
+use util::ResultExt;
+use workspace::{ModalView, Workspace};
+
+actions!(toolchain, [Select]);
+
+pub fn init(cx: &mut AppContext) {
+ cx.observe_new_views(ToolchainSelector::register).detach();
+}
+
+pub struct ToolchainSelector {
+ picker: View<Picker<ToolchainSelectorDelegate>>,
+}
+
+impl ToolchainSelector {
+ fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
+ workspace.register_action(move |workspace, _: &Select, cx| {
+ Self::toggle(workspace, cx);
+ });
+ }
+
+ fn toggle(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) -> Option<()> {
+ let (_, buffer, _) = workspace
+ .active_item(cx)?
+ .act_as::<Editor>(cx)?
+ .read(cx)
+ .active_excerpt(cx)?;
+ let project = workspace.project().clone();
+
+ let language_name = buffer.read(cx).language()?.name();
+ let worktree_id = buffer.read(cx).file()?.worktree_id(cx);
+ let worktree_root_path = project
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)?
+ .read(cx)
+ .abs_path();
+ let workspace_id = workspace.database_id()?;
+ let weak = workspace.weak_handle();
+ cx.spawn(move |workspace, mut cx| async move {
+ let active_toolchain = workspace::WORKSPACE_DB
+ .toolchain(workspace_id, worktree_id, language_name.clone())
+ .await
+ .ok()
+ .flatten();
+ workspace
+ .update(&mut cx, |this, cx| {
+ this.toggle_modal(cx, move |cx| {
+ ToolchainSelector::new(
+ weak,
+ project,
+ active_toolchain,
+ worktree_id,
+ worktree_root_path,
+ language_name,
+ cx,
+ )
+ });
+ })
+ .ok();
+ })
+ .detach();
+
+ Some(())
+ }
+
+ fn new(
+ workspace: WeakView<Workspace>,
+ project: Model<Project>,
+ active_toolchain: Option<Toolchain>,
+ worktree_id: WorktreeId,
+ worktree_root: Arc<Path>,
+ language_name: LanguageName,
+ cx: &mut ViewContext<Self>,
+ ) -> Self {
+ let view = cx.view().downgrade();
+ let picker = cx.new_view(|cx| {
+ let delegate = ToolchainSelectorDelegate::new(
+ active_toolchain,
+ view,
+ workspace,
+ worktree_id,
+ worktree_root,
+ project,
+ language_name,
+ cx,
+ );
+ Picker::uniform_list(delegate, cx)
+ });
+ Self { picker }
+ }
+}
+
+impl Render for ToolchainSelector {
+ fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
+ v_flex().w(rems(34.)).child(self.picker.clone())
+ }
+}
+
+impl FocusableView for ToolchainSelector {
+ fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
+ self.picker.focus_handle(cx)
+ }
+}
+
+impl EventEmitter<DismissEvent> for ToolchainSelector {}
+impl ModalView for ToolchainSelector {}
+
+pub struct ToolchainSelectorDelegate {
+ toolchain_selector: WeakView<ToolchainSelector>,
+ candidates: ToolchainList,
+ matches: Vec<StringMatch>,
+ selected_index: usize,
+ workspace: WeakView<Workspace>,
+ worktree_id: WorktreeId,
+ worktree_abs_path_root: Arc<Path>,
+ _fetch_candidates_task: Task<Option<()>>,
+}
+
+impl ToolchainSelectorDelegate {
+ #[allow(clippy::too_many_arguments)]
+ fn new(
+ active_toolchain: Option<Toolchain>,
+ language_selector: WeakView<ToolchainSelector>,
+ workspace: WeakView<Workspace>,
+ worktree_id: WorktreeId,
+ worktree_abs_path_root: Arc<Path>,
+ project: Model<Project>,
+ language_name: LanguageName,
+ cx: &mut ViewContext<Picker<Self>>,
+ ) -> Self {
+ let _fetch_candidates_task = cx.spawn({
+ let project = project.clone();
+ move |this, mut cx| async move {
+ let available_toolchains = project
+ .update(&mut cx, |this, cx| {
+ this.available_toolchains(worktree_id, language_name, cx)
+ })
+ .ok()?
+ .await?;
+
+ let _ = this.update(&mut cx, move |this, cx| {
+ this.delegate.candidates = available_toolchains;
+ if let Some(active_toolchain) = active_toolchain {
+ if let Some(position) = this
+ .delegate
+ .candidates
+ .toolchains
+ .iter()
+ .position(|toolchain| *toolchain == active_toolchain)
+ {
+ this.delegate.set_selected_index(position, cx);
+ }
+ }
+ this.update_matches(this.query(cx), cx);
+ });
+
+ Some(())
+ }
+ });
+
+ Self {
+ toolchain_selector: language_selector,
+ candidates: Default::default(),
+ matches: vec![],
+ selected_index: 0,
+ workspace,
+ worktree_id,
+ worktree_abs_path_root,
+ _fetch_candidates_task,
+ }
+ }
+ fn relativize_path(path: SharedString, worktree_root: &Path) -> SharedString {
+ Path::new(&path.as_ref())
+ .strip_prefix(&worktree_root)
+ .ok()
+ .map(|suffix| Path::new(".").join(suffix))
+ .and_then(|path| path.to_str().map(String::from).map(SharedString::from))
+ .unwrap_or(path)
+ }
+}
+
+impl PickerDelegate for ToolchainSelectorDelegate {
+ type ListItem = ListItem;
+
+ fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc<str> {
+ "Select a toolchain...".into()
+ }
+
+ fn match_count(&self) -> usize {
+ self.matches.len()
+ }
+
+ fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
+ if let Some(string_match) = self.matches.get(self.selected_index) {
+ let toolchain = self.candidates.toolchains[string_match.candidate_id].clone();
+ if let Some(workspace_id) = self
+ .workspace
+ .update(cx, |this, _| this.database_id())
+ .ok()
+ .flatten()
+ {
+ let workspace = self.workspace.clone();
+ let worktree_id = self.worktree_id;
+ cx.spawn(|_, mut cx| async move {
+ workspace::WORKSPACE_DB
+ .set_toolchain(workspace_id, worktree_id, toolchain.clone())
+ .await
+ .log_err();
+ workspace
+ .update(&mut cx, |this, cx| {
+ this.project().update(cx, |this, cx| {
+ this.activate_toolchain(worktree_id, toolchain, cx)
+ })
+ })
+ .ok()?
+ .await;
+ Some(())
+ })
+ .detach();
+ }
+ }
+ self.dismissed(cx);
+ }
+
+ fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
+ self.toolchain_selector
+ .update(cx, |_, cx| cx.emit(DismissEvent))
+ .log_err();
+ }
+
+ fn selected_index(&self) -> usize {
+ self.selected_index
+ }
+
+ fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext<Picker<Self>>) {
+ self.selected_index = ix;
+ }
+
+ fn update_matches(
+ &mut self,
+ query: String,
+ cx: &mut ViewContext<Picker<Self>>,
+ ) -> gpui::Task<()> {
+ let background = cx.background_executor().clone();
+ let candidates = self.candidates.clone();
+ let worktree_root_path = self.worktree_abs_path_root.clone();
+ cx.spawn(|this, mut cx| async move {
+ let matches = if query.is_empty() {
+ candidates
+ .toolchains
+ .into_iter()
+ .enumerate()
+ .map(|(index, candidate)| {
+ let path = Self::relativize_path(candidate.path, &worktree_root_path);
+ let string = format!("{}{}", candidate.name, path);
+ StringMatch {
+ candidate_id: index,
+ string,
+ positions: Vec::new(),
+ score: 0.0,
+ }
+ })
+ .collect()
+ } else {
+ let candidates = candidates
+ .toolchains
+ .into_iter()
+ .enumerate()
+ .map(|(candidate_id, toolchain)| {
+ let path = Self::relativize_path(toolchain.path, &worktree_root_path);
+ let string = format!("{}{}", toolchain.name, path);
+ StringMatchCandidate::new(candidate_id, string)
+ })
+ .collect::<Vec<_>>();
+ match_strings(
+ &candidates,
+ &query,
+ false,
+ 100,
+ &Default::default(),
+ background,
+ )
+ .await
+ };
+
+ this.update(&mut cx, |this, cx| {
+ let delegate = &mut this.delegate;
+ delegate.matches = matches;
+ delegate.selected_index = delegate
+ .selected_index
+ .min(delegate.matches.len().saturating_sub(1));
+ cx.notify();
+ })
+ .log_err();
+ })
+ }
+
+ fn render_match(
+ &self,
+ ix: usize,
+ selected: bool,
+ _: &mut ViewContext<Picker<Self>>,
+ ) -> Option<Self::ListItem> {
+ let mat = &self.matches[ix];
+ let toolchain = &self.candidates.toolchains[mat.candidate_id];
+
+ let label = toolchain.name.clone();
+ let path = Self::relativize_path(toolchain.path.clone(), &self.worktree_abs_path_root);
+ let (name_highlights, mut path_highlights) = mat
+ .positions
+ .iter()
+ .cloned()
+ .partition::<Vec<_>, _>(|index| *index < label.len());
+ path_highlights.iter_mut().for_each(|index| {
+ *index -= label.len();
+ });
+ Some(
+ ListItem::new(ix)
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .selected(selected)
+ .child(HighlightedLabel::new(label, name_highlights))
+ .child(
+ HighlightedLabel::new(path, path_highlights)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
+ )
+ }
+}
@@ -8,6 +8,7 @@ mod dropdown_menu;
mod facepile;
mod icon;
mod image;
+mod indent_guides;
mod indicator;
mod keybinding;
mod label;
@@ -40,6 +41,7 @@ pub use dropdown_menu::*;
pub use facepile::*;
pub use icon::*;
pub use image::*;
+pub use indent_guides::*;
pub use indicator::*;
pub use keybinding::*;
pub use label::*;
@@ -149,8 +149,8 @@ pub(crate) struct ButtonLikeStyles {
fn element_bg_from_elevation(elevation: Option<ElevationIndex>, cx: &mut WindowContext) -> Hsla {
match elevation {
Some(ElevationIndex::Background) => cx.theme().colors().element_background,
- Some(ElevationIndex::ElevatedSurface) => cx.theme().colors().surface_background,
- Some(ElevationIndex::Surface) => cx.theme().colors().elevated_surface_background,
+ Some(ElevationIndex::ElevatedSurface) => cx.theme().colors().elevated_surface_background,
+ Some(ElevationIndex::Surface) => cx.theme().colors().surface_background,
Some(ElevationIndex::ModalSurface) => cx.theme().colors().background,
_ => cx.theme().colors().element_background,
}
@@ -162,11 +162,9 @@ impl ButtonStyle {
elevation: Option<ElevationIndex>,
cx: &mut WindowContext,
) -> ButtonLikeStyles {
- let filled_background = element_bg_from_elevation(elevation, cx);
-
match self {
ButtonStyle::Filled => ButtonLikeStyles {
- background: filled_background,
+ background: element_bg_from_elevation(elevation, cx),
border_color: transparent_black(),
label_color: Color::Default.color(cx),
icon_color: Color::Default.color(cx),
@@ -192,16 +190,18 @@ impl ButtonStyle {
elevation: Option<ElevationIndex>,
cx: &mut WindowContext,
) -> ButtonLikeStyles {
- let mut filled_background = element_bg_from_elevation(elevation, cx);
- filled_background.fade_out(0.92);
-
match self {
- ButtonStyle::Filled => ButtonLikeStyles {
- background: filled_background,
- border_color: transparent_black(),
- label_color: Color::Default.color(cx),
- icon_color: Color::Default.color(cx),
- },
+ ButtonStyle::Filled => {
+ let mut filled_background = element_bg_from_elevation(elevation, cx);
+ filled_background.fade_out(0.92);
+
+ ButtonLikeStyles {
+ background: filled_background,
+ border_color: transparent_black(),
+ label_color: Color::Default.color(cx),
+ icon_color: Color::Default.color(cx),
+ }
+ }
ButtonStyle::Tinted(tint) => tint.button_like_style(cx),
ButtonStyle::Subtle => ButtonLikeStyles {
background: cx.theme().colors().ghost_element_hover,
@@ -277,8 +277,6 @@ impl ButtonStyle {
elevation: Option<ElevationIndex>,
cx: &mut WindowContext,
) -> ButtonLikeStyles {
- element_bg_from_elevation(elevation, cx).fade_out(0.82);
-
match self {
ButtonStyle::Filled => ButtonLikeStyles {
background: cx.theme().colors().element_disabled,
@@ -0,0 +1,538 @@
+#![allow(missing_docs)]
+use std::{cmp::Ordering, ops::Range, rc::Rc};
+
+use gpui::{
+ fill, point, size, AnyElement, AppContext, Bounds, Hsla, Point, UniformListDecoration, View,
+};
+use smallvec::SmallVec;
+
+use crate::prelude::*;
+
+/// Represents the colors used for different states of indent guides.
+#[derive(Debug, Clone)]
+pub struct IndentGuideColors {
+ /// The color of the indent guide when it's neither active nor hovered.
+ pub default: Hsla,
+ /// The color of the indent guide when it's hovered.
+ pub hover: Hsla,
+ /// The color of the indent guide when it's active.
+ pub active: Hsla,
+}
+
+impl IndentGuideColors {
+ /// Returns the indent guide colors that should be used for panels.
+ pub fn panel(cx: &AppContext) -> Self {
+ Self {
+ default: cx.theme().colors().panel_indent_guide,
+ hover: cx.theme().colors().panel_indent_guide_hover,
+ active: cx.theme().colors().panel_indent_guide_active,
+ }
+ }
+}
+
+pub struct IndentGuides {
+ colors: IndentGuideColors,
+ indent_size: Pixels,
+ compute_indents_fn: Box<dyn Fn(Range<usize>, &mut WindowContext) -> SmallVec<[usize; 64]>>,
+ render_fn: Option<
+ Box<
+ dyn Fn(
+ RenderIndentGuideParams,
+ &mut WindowContext,
+ ) -> SmallVec<[RenderedIndentGuide; 12]>,
+ >,
+ >,
+ on_click: Option<Rc<dyn Fn(&IndentGuideLayout, &mut WindowContext)>>,
+}
+
+pub fn indent_guides<V: Render>(
+ view: View<V>,
+ indent_size: Pixels,
+ colors: IndentGuideColors,
+ compute_indents_fn: impl Fn(&mut V, Range<usize>, &mut ViewContext<V>) -> SmallVec<[usize; 64]>
+ + 'static,
+) -> IndentGuides {
+ let compute_indents_fn = Box::new(move |range, cx: &mut WindowContext| {
+ view.update(cx, |this, cx| compute_indents_fn(this, range, cx))
+ });
+ IndentGuides {
+ colors,
+ indent_size,
+ compute_indents_fn,
+ render_fn: None,
+ on_click: None,
+ }
+}
+
+impl IndentGuides {
+ /// Sets the callback that will be called when the user clicks on an indent guide.
+ pub fn on_click(
+ mut self,
+ on_click: impl Fn(&IndentGuideLayout, &mut WindowContext) + 'static,
+ ) -> Self {
+ self.on_click = Some(Rc::new(on_click));
+ self
+ }
+
+ /// Sets a custom callback that will be called when the indent guides need to be rendered.
+ pub fn with_render_fn<V: Render>(
+ mut self,
+ view: View<V>,
+ render_fn: impl Fn(
+ &mut V,
+ RenderIndentGuideParams,
+ &mut WindowContext,
+ ) -> SmallVec<[RenderedIndentGuide; 12]>
+ + 'static,
+ ) -> Self {
+ let render_fn = move |params, cx: &mut WindowContext| {
+ view.update(cx, |this, cx| render_fn(this, params, cx))
+ };
+ self.render_fn = Some(Box::new(render_fn));
+ self
+ }
+}
+
+/// Parameters for rendering indent guides.
+pub struct RenderIndentGuideParams {
+ /// The calculated layouts for the indent guides to be rendered.
+ pub indent_guides: SmallVec<[IndentGuideLayout; 12]>,
+ /// The size of each indentation level in pixels.
+ pub indent_size: Pixels,
+ /// The height of each item in pixels.
+ pub item_height: Pixels,
+}
+
+/// Represents a rendered indent guide with its visual properties and interaction areas.
+pub struct RenderedIndentGuide {
+ /// The bounds of the rendered indent guide in pixels.
+ pub bounds: Bounds<Pixels>,
+ /// The layout information for the indent guide.
+ pub layout: IndentGuideLayout,
+ /// Indicates whether the indent guide is currently active.
+ pub is_active: bool,
+ /// Can be used to customize the hitbox of the indent guide,
+ /// if this is set to `None`, the bounds of the indent guide will be used.
+ pub hitbox: Option<Bounds<Pixels>>,
+}
+
+/// Represents the layout information for an indent guide.
+#[derive(Debug, PartialEq, Eq, Hash)]
+pub struct IndentGuideLayout {
+ /// The starting position of the indent guide, where x is the indentation level
+ /// and y is the starting row.
+ pub offset: Point<usize>,
+ /// The length of the indent guide in rows.
+ pub length: usize,
+ /// Indicates whether the indent guide continues beyond the visible bounds.
+ pub continues_offscreen: bool,
+}
+
+/// Implements the necessary functionality for rendering indent guides inside a uniform list.
+mod uniform_list {
+ use gpui::{DispatchPhase, Hitbox, MouseButton, MouseDownEvent, MouseMoveEvent};
+
+ use super::*;
+
+ impl UniformListDecoration for IndentGuides {
+ fn compute(
+ &self,
+ visible_range: Range<usize>,
+ bounds: Bounds<Pixels>,
+ item_height: Pixels,
+ item_count: usize,
+ cx: &mut WindowContext,
+ ) -> AnyElement {
+ let mut visible_range = visible_range.clone();
+ let includes_trailing_indent = visible_range.end < item_count;
+ // Check if we have entries after the visible range,
+ // if so extend the visible range so we can fetch a trailing indent,
+ // which is needed to compute indent guides correctly.
+ if includes_trailing_indent {
+ visible_range.end += 1;
+ }
+ let visible_entries = &(self.compute_indents_fn)(visible_range.clone(), cx);
+ let indent_guides = compute_indent_guides(
+ &visible_entries,
+ visible_range.start,
+ includes_trailing_indent,
+ );
+ let mut indent_guides = if let Some(ref custom_render) = self.render_fn {
+ let params = RenderIndentGuideParams {
+ indent_guides,
+ indent_size: self.indent_size,
+ item_height,
+ };
+ custom_render(params, cx)
+ } else {
+ indent_guides
+ .into_iter()
+ .map(|layout| RenderedIndentGuide {
+ bounds: Bounds::new(
+ point(
+ px(layout.offset.x as f32) * self.indent_size,
+ px(layout.offset.y as f32) * item_height,
+ ),
+ size(px(1.), px(layout.length as f32) * item_height),
+ ),
+ layout,
+ is_active: false,
+ hitbox: None,
+ })
+ .collect()
+ };
+ for guide in &mut indent_guides {
+ guide.bounds.origin += bounds.origin;
+ if let Some(hitbox) = guide.hitbox.as_mut() {
+ hitbox.origin += bounds.origin;
+ }
+ }
+
+ let indent_guides = IndentGuidesElement {
+ indent_guides: Rc::new(indent_guides),
+ colors: self.colors.clone(),
+ on_hovered_indent_guide_click: self.on_click.clone(),
+ };
+ indent_guides.into_any_element()
+ }
+ }
+
+ struct IndentGuidesElement {
+ colors: IndentGuideColors,
+ indent_guides: Rc<SmallVec<[RenderedIndentGuide; 12]>>,
+ on_hovered_indent_guide_click: Option<Rc<dyn Fn(&IndentGuideLayout, &mut WindowContext)>>,
+ }
+
+ enum IndentGuidesElementPrepaintState {
+ Static,
+ Interactive {
+ hitboxes: Rc<SmallVec<[Hitbox; 12]>>,
+ on_hovered_indent_guide_click: Rc<dyn Fn(&IndentGuideLayout, &mut WindowContext)>,
+ },
+ }
+
+ impl Element for IndentGuidesElement {
+ type RequestLayoutState = ();
+ type PrepaintState = IndentGuidesElementPrepaintState;
+
+ fn id(&self) -> Option<ElementId> {
+ None
+ }
+
+ fn request_layout(
+ &mut self,
+ _id: Option<&gpui::GlobalElementId>,
+ cx: &mut WindowContext,
+ ) -> (gpui::LayoutId, Self::RequestLayoutState) {
+ (cx.request_layout(gpui::Style::default(), []), ())
+ }
+
+ fn prepaint(
+ &mut self,
+ _id: Option<&gpui::GlobalElementId>,
+ _bounds: Bounds<Pixels>,
+ _request_layout: &mut Self::RequestLayoutState,
+ cx: &mut WindowContext,
+ ) -> Self::PrepaintState {
+ if let Some(on_hovered_indent_guide_click) = self.on_hovered_indent_guide_click.clone()
+ {
+ let hitboxes = self
+ .indent_guides
+ .as_ref()
+ .iter()
+ .map(|guide| cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false))
+ .collect();
+ Self::PrepaintState::Interactive {
+ hitboxes: Rc::new(hitboxes),
+ on_hovered_indent_guide_click,
+ }
+ } else {
+ Self::PrepaintState::Static
+ }
+ }
+
+ fn paint(
+ &mut self,
+ _id: Option<&gpui::GlobalElementId>,
+ _bounds: Bounds<Pixels>,
+ _request_layout: &mut Self::RequestLayoutState,
+ prepaint: &mut Self::PrepaintState,
+ cx: &mut WindowContext,
+ ) {
+ match prepaint {
+ IndentGuidesElementPrepaintState::Static => {
+ for indent_guide in self.indent_guides.as_ref() {
+ let fill_color = if indent_guide.is_active {
+ self.colors.active
+ } else {
+ self.colors.default
+ };
+
+ cx.paint_quad(fill(indent_guide.bounds, fill_color));
+ }
+ }
+ IndentGuidesElementPrepaintState::Interactive {
+ hitboxes,
+ on_hovered_indent_guide_click,
+ } => {
+ cx.on_mouse_event({
+ let hitboxes = hitboxes.clone();
+ let indent_guides = self.indent_guides.clone();
+ let on_hovered_indent_guide_click = on_hovered_indent_guide_click.clone();
+ move |event: &MouseDownEvent, phase, cx| {
+ if phase == DispatchPhase::Bubble && event.button == MouseButton::Left {
+ let mut active_hitbox_ix = None;
+ for (i, hitbox) in hitboxes.iter().enumerate() {
+ if hitbox.is_hovered(cx) {
+ active_hitbox_ix = Some(i);
+ break;
+ }
+ }
+
+ let Some(active_hitbox_ix) = active_hitbox_ix else {
+ return;
+ };
+
+ let active_indent_guide = &indent_guides[active_hitbox_ix].layout;
+ on_hovered_indent_guide_click(active_indent_guide, cx);
+
+ cx.stop_propagation();
+ cx.prevent_default();
+ }
+ }
+ });
+ let mut hovered_hitbox_id = None;
+ for (i, hitbox) in hitboxes.iter().enumerate() {
+ cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox);
+ let indent_guide = &self.indent_guides[i];
+ let fill_color = if hitbox.is_hovered(cx) {
+ hovered_hitbox_id = Some(hitbox.id);
+ self.colors.hover
+ } else if indent_guide.is_active {
+ self.colors.active
+ } else {
+ self.colors.default
+ };
+
+ cx.paint_quad(fill(indent_guide.bounds, fill_color));
+ }
+
+ cx.on_mouse_event({
+ let prev_hovered_hitbox_id = hovered_hitbox_id;
+ let hitboxes = hitboxes.clone();
+ move |_: &MouseMoveEvent, phase, cx| {
+ let mut hovered_hitbox_id = None;
+ for hitbox in hitboxes.as_ref() {
+ if hitbox.is_hovered(cx) {
+ hovered_hitbox_id = Some(hitbox.id);
+ break;
+ }
+ }
+ if phase == DispatchPhase::Capture {
+ // If the hovered hitbox has changed, we need to re-paint the indent guides.
+ match (prev_hovered_hitbox_id, hovered_hitbox_id) {
+ (Some(prev_id), Some(id)) => {
+ if prev_id != id {
+ cx.refresh();
+ }
+ }
+ (None, Some(_)) => {
+ cx.refresh();
+ }
+ (Some(_), None) => {
+ cx.refresh();
+ }
+ (None, None) => {}
+ }
+ }
+ }
+ });
+ }
+ }
+ }
+ }
+
+ impl IntoElement for IndentGuidesElement {
+ type Element = Self;
+
+ fn into_element(self) -> Self::Element {
+ self
+ }
+ }
+}
+
+fn compute_indent_guides(
+ indents: &[usize],
+ offset: usize,
+ includes_trailing_indent: bool,
+) -> SmallVec<[IndentGuideLayout; 12]> {
+ let mut indent_guides = SmallVec::<[IndentGuideLayout; 12]>::new();
+ let mut indent_stack = SmallVec::<[IndentGuideLayout; 8]>::new();
+
+ let mut min_depth = usize::MAX;
+ for (row, &depth) in indents.iter().enumerate() {
+ if includes_trailing_indent && row == indents.len() - 1 {
+ continue;
+ }
+
+ let current_row = row + offset;
+ let current_depth = indent_stack.len();
+ if depth < min_depth {
+ min_depth = depth;
+ }
+
+ match depth.cmp(¤t_depth) {
+ Ordering::Less => {
+ for _ in 0..(current_depth - depth) {
+ if let Some(guide) = indent_stack.pop() {
+ indent_guides.push(guide);
+ }
+ }
+ }
+ Ordering::Greater => {
+ for new_depth in current_depth..depth {
+ indent_stack.push(IndentGuideLayout {
+ offset: Point::new(new_depth, current_row),
+ length: current_row,
+ continues_offscreen: false,
+ });
+ }
+ }
+ _ => {}
+ }
+
+ for indent in indent_stack.iter_mut() {
+ indent.length = current_row - indent.offset.y + 1;
+ }
+ }
+
+ indent_guides.extend(indent_stack);
+
+ for guide in indent_guides.iter_mut() {
+ if includes_trailing_indent
+ && guide.offset.y + guide.length == offset + indents.len().saturating_sub(1)
+ {
+ guide.continues_offscreen = indents
+ .last()
+ .map(|last_indent| guide.offset.x < *last_indent)
+ .unwrap_or(false);
+ }
+ }
+
+ indent_guides
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_compute_indent_guides() {
+ fn assert_compute_indent_guides(
+ input: &[usize],
+ offset: usize,
+ includes_trailing_indent: bool,
+ expected: Vec<IndentGuideLayout>,
+ ) {
+ use std::collections::HashSet;
+ assert_eq!(
+ compute_indent_guides(input, offset, includes_trailing_indent)
+ .into_vec()
+ .into_iter()
+ .collect::<HashSet<_>>(),
+ expected.into_iter().collect::<HashSet<_>>(),
+ );
+ }
+
+ assert_compute_indent_guides(
+ &[0, 1, 2, 2, 1, 0],
+ 0,
+ false,
+ vec![
+ IndentGuideLayout {
+ offset: Point::new(0, 1),
+ length: 4,
+ continues_offscreen: false,
+ },
+ IndentGuideLayout {
+ offset: Point::new(1, 2),
+ length: 2,
+ continues_offscreen: false,
+ },
+ ],
+ );
+
+ assert_compute_indent_guides(
+ &[2, 2, 2, 1, 1],
+ 0,
+ false,
+ vec![
+ IndentGuideLayout {
+ offset: Point::new(0, 0),
+ length: 5,
+ continues_offscreen: false,
+ },
+ IndentGuideLayout {
+ offset: Point::new(1, 0),
+ length: 3,
+ continues_offscreen: false,
+ },
+ ],
+ );
+
+ assert_compute_indent_guides(
+ &[1, 2, 3, 2, 1],
+ 0,
+ false,
+ vec![
+ IndentGuideLayout {
+ offset: Point::new(0, 0),
+ length: 5,
+ continues_offscreen: false,
+ },
+ IndentGuideLayout {
+ offset: Point::new(1, 1),
+ length: 3,
+ continues_offscreen: false,
+ },
+ IndentGuideLayout {
+ offset: Point::new(2, 2),
+ length: 1,
+ continues_offscreen: false,
+ },
+ ],
+ );
+
+ assert_compute_indent_guides(
+ &[0, 1, 0],
+ 0,
+ true,
+ vec![IndentGuideLayout {
+ offset: Point::new(0, 1),
+ length: 1,
+ continues_offscreen: false,
+ }],
+ );
+
+ assert_compute_indent_guides(
+ &[0, 1, 1],
+ 0,
+ true,
+ vec![IndentGuideLayout {
+ offset: Point::new(0, 1),
+ length: 1,
+ continues_offscreen: true,
+ }],
+ );
+ assert_compute_indent_guides(
+ &[0, 1, 2],
+ 0,
+ true,
+ vec![IndentGuideLayout {
+ offset: Point::new(0, 1),
+ length: 1,
+ continues_offscreen: true,
+ }],
+ );
+ }
+}
@@ -196,7 +196,7 @@ impl KeyIcon {
}
/// Returns a textual representation of the key binding for the given [`Action`].
-pub fn text_for_action(action: &dyn Action, cx: &mut WindowContext) -> Option<String> {
+pub fn text_for_action(action: &dyn Action, cx: &WindowContext) -> Option<String> {
let key_binding = cx.bindings_for_action(action).last().cloned()?;
Some(text_for_key_binding(key_binding, PlatformStyle::platform()))
}
@@ -75,6 +75,12 @@ impl From<String> for ArcCow<'_, str> {
}
}
+impl From<&String> for ArcCow<'_, str> {
+ fn from(value: &String) -> Self {
+ Self::Owned(value.clone().into())
+ }
+}
+
impl<'a> From<Cow<'a, str>> for ArcCow<'a, str> {
fn from(value: Cow<'a, str>) -> Self {
match value {
@@ -14,6 +14,7 @@ fuzzy.workspace = true
git.workspace = true
gpui.workspace = true
picker.workspace = true
+project.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
@@ -2,24 +2,23 @@ use anyhow::{Context, Result};
use fuzzy::{StringMatch, StringMatchCandidate};
use git::repository::Branch;
use gpui::{
- actions, rems, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView,
- InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription,
- Task, View, ViewContext, VisualContext, WindowContext,
+ actions, rems, AnyElement, AppContext, AsyncAppContext, DismissEvent, EventEmitter,
+ FocusHandle, FocusableView, InteractiveElement, IntoElement, ParentElement, Render,
+ SharedString, Styled, Subscription, Task, View, ViewContext, VisualContext, WindowContext,
};
use picker::{Picker, PickerDelegate};
+use project::ProjectPath;
use std::{ops::Not, sync::Arc};
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
use util::ResultExt;
-use workspace::notifications::NotificationId;
-use workspace::{ModalView, Toast, Workspace};
+use workspace::notifications::DetachAndPromptErr;
+use workspace::{ModalView, Workspace};
actions!(branches, [OpenRecent]);
pub fn init(cx: &mut AppContext) {
cx.observe_new_views(|workspace: &mut Workspace, _| {
- workspace.register_action(|workspace, action, cx| {
- BranchList::open(workspace, action, cx).log_err();
- });
+ workspace.register_action(BranchList::open);
})
.detach();
}
@@ -31,6 +30,21 @@ pub struct BranchList {
}
impl BranchList {
+ pub fn open(_: &mut Workspace, _: &OpenRecent, cx: &mut ViewContext<Workspace>) {
+ let this = cx.view().clone();
+ cx.spawn(|_, mut cx| async move {
+ // Modal branch picker has a longer trailoff than a popover one.
+ let delegate = BranchListDelegate::new(this.clone(), 70, &cx).await?;
+
+ this.update(&mut cx, |workspace, cx| {
+ workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx))
+ })?;
+
+ Ok(())
+ })
+ .detach_and_prompt_err("Failed to read branches", cx, |_, _| None)
+ }
+
fn new(delegate: BranchListDelegate, rem_width: f32, cx: &mut ViewContext<Self>) -> Self {
let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx));
let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent));
@@ -40,17 +54,6 @@ impl BranchList {
_subscription,
}
}
- pub fn open(
- workspace: &mut Workspace,
- _: &OpenRecent,
- cx: &mut ViewContext<Workspace>,
- ) -> Result<()> {
- // Modal branch picker has a longer trailoff than a popover one.
- let delegate = BranchListDelegate::new(workspace, cx.view().clone(), 70, cx)?;
- workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx));
-
- Ok(())
- }
}
impl ModalView for BranchList {}
impl EventEmitter<DismissEvent> for BranchList {}
@@ -100,36 +103,32 @@ pub struct BranchListDelegate {
}
impl BranchListDelegate {
- fn new(
- workspace: &Workspace,
- handle: View<Workspace>,
+ async fn new(
+ workspace: View<Workspace>,
branch_name_trailoff_after: usize,
- cx: &AppContext,
+ cx: &AsyncAppContext,
) -> Result<Self> {
- let project = workspace.project().read(cx);
- let repo = project
- .get_first_worktree_root_repo(cx)
- .context("failed to get root repository for first worktree")?;
+ let all_branches_request = cx.update(|cx| {
+ let project = workspace.read(cx).project().read(cx);
+ let first_worktree = project
+ .visible_worktrees(cx)
+ .next()
+ .context("No worktrees found")?;
+ let project_path = ProjectPath::root_path(first_worktree.read(cx).id());
+ anyhow::Ok(project.branches(project_path, cx))
+ })??;
+
+ let all_branches = all_branches_request.await?;
- let all_branches = repo.branches()?;
Ok(Self {
matches: vec![],
- workspace: handle,
+ workspace,
all_branches,
selected_index: 0,
last_query: Default::default(),
branch_name_trailoff_after,
})
}
-
- fn display_error_toast(&self, message: String, cx: &mut WindowContext<'_>) {
- self.workspace.update(cx, |model, ctx| {
- struct GitCheckoutFailure;
- let id = NotificationId::unique::<GitCheckoutFailure>();
-
- model.show_toast(Toast::new(id, message), ctx)
- });
- }
}
impl PickerDelegate for BranchListDelegate {
@@ -235,40 +234,32 @@ impl PickerDelegate for BranchListDelegate {
cx.spawn({
let branch = branch.clone();
|picker, mut cx| async move {
- picker
- .update(&mut cx, |this, cx| {
- let project = this.delegate.workspace.read(cx).project().read(cx);
- let repo = project
- .get_first_worktree_root_repo(cx)
- .context("failed to get root repository for first worktree")?;
-
- let branch_to_checkout = match branch {
- BranchEntry::Branch(branch) => branch.string,
- BranchEntry::NewBranch { name: branch_name } => {
- let status = repo.create_branch(&branch_name);
- if status.is_err() {
- this.delegate.display_error_toast(format!("Failed to create branch '{branch_name}', check for conflicts or unstashed files"), cx);
- status?;
- }
-
- branch_name
- }
- };
-
- let status = repo.change_branch(&branch_to_checkout);
- if status.is_err() {
- this.delegate.display_error_toast(format!("Failed to checkout branch '{branch_to_checkout}', check for conflicts or unstashed files"), cx);
- status?;
- }
+ let branch_change_task = picker.update(&mut cx, |this, cx| {
+ let project = this.delegate.workspace.read(cx).project().read(cx);
- cx.emit(DismissEvent);
+ let branch_to_checkout = match branch {
+ BranchEntry::Branch(branch) => branch.string,
+ BranchEntry::NewBranch { name: branch_name } => branch_name,
+ };
+ let worktree = project
+ .worktrees(cx)
+ .next()
+ .context("worktree disappeared")?;
+ let repository = ProjectPath::root_path(worktree.read(cx).id());
- Ok::<(), anyhow::Error>(())
- })
- .log_err();
+ anyhow::Ok(project.update_or_create_branch(repository, branch_to_checkout, cx))
+ })??;
+
+ branch_change_task.await?;
+
+ picker.update(&mut cx, |_, cx| {
+ cx.emit(DismissEvent);
+
+ Ok::<(), anyhow::Error>(())
+ })
}
})
- .detach();
+ .detach_and_prompt_err("Failed to change branch", cx, |_, _| None);
}
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
@@ -328,14 +328,18 @@ impl Vim {
.into_iter()
.map(|selection| selection.start.row)
.collect();
- let edits = selection_start_rows.into_iter().map(|row| {
- let indent = snapshot
- .indent_size_for_line(MultiBufferRow(row))
- .chars()
- .collect::<String>();
- let start_of_line = Point::new(row, 0);
- (start_of_line..start_of_line, indent + "\n")
- });
+ let edits = selection_start_rows
+ .into_iter()
+ .map(|row| {
+ let indent = snapshot
+ .indent_and_comment_for_line(MultiBufferRow(row), cx)
+ .chars()
+ .collect::<String>();
+
+ let start_of_line = Point::new(row, 0);
+ (start_of_line..start_of_line, indent + "\n")
+ })
+ .collect::<Vec<_>>();
editor.edit_with_autoindent(edits, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.move_cursors_with(|map, cursor, _| {
@@ -361,14 +365,18 @@ impl Vim {
.into_iter()
.map(|selection| selection.end.row)
.collect();
- let edits = selection_end_rows.into_iter().map(|row| {
- let indent = snapshot
- .indent_size_for_line(MultiBufferRow(row))
- .chars()
- .collect::<String>();
- let end_of_line = Point::new(row, snapshot.line_len(MultiBufferRow(row)));
- (end_of_line..end_of_line, "\n".to_string() + &indent)
- });
+ let edits = selection_end_rows
+ .into_iter()
+ .map(|row| {
+ let indent = snapshot
+ .indent_and_comment_for_line(MultiBufferRow(row), cx)
+ .chars()
+ .collect::<String>();
+
+ let end_of_line = Point::new(row, snapshot.line_len(MultiBufferRow(row)));
+ (end_of_line..end_of_line, "\n".to_string() + &indent)
+ })
+ .collect::<Vec<_>>();
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.maybe_move_cursors_with(|map, cursor, goal| {
Motion::CurrentLine.move_point(
@@ -1414,4 +1422,16 @@ mod test {
.await
.assert_eq("th th\nth th\nth th\nth th\nth th\nˇth th\n");
}
+
+ #[gpui::test]
+ async fn test_o_comment(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+ cx.set_neovim_option("filetype=rust").await;
+
+ cx.set_shared_state("// helloˇ\n").await;
+ cx.simulate_shared_keystrokes("o").await;
+ cx.shared_state().await.assert_eq("// hello\n// ˇ\n");
+ cx.simulate_shared_keystrokes("x escape shift-o").await;
+ cx.shared_state().await.assert_eq("// hello\n// ˇ\n// x\n");
+ }
}
@@ -0,0 +1,8 @@
+{"SetOption":{"value":"filetype=rust"}}
+{"Put":{"state":"// helloˇ\n"}}
+{"Key":"o"}
+{"Get":{"state":"// hello\n// ˇ\n","mode":"Insert"}}
+{"Key":"x"}
+{"Key":"escape"}
+{"Key":"shift-o"}
+{"Get":{"state":"// hello\n// ˇ\n// x\n","mode":"Insert"}}
@@ -49,7 +49,6 @@ node_runtime.workspace = true
parking_lot.workspace = true
postage.workspace = true
project.workspace = true
-dev_server_projects.workspace = true
task.workspace = true
release_channel.workspace = true
remote.workspace = true
@@ -7,6 +7,8 @@ use client::DevServerProjectId;
use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId};
+use language::{LanguageName, Toolchain};
+use project::WorktreeId;
use remote::ssh_session::SshProjectId;
use sqlez::{
bindable::{Bind, Column, StaticColumnCount},
@@ -24,9 +26,7 @@ use model::{
SerializedSshProject, SerializedWorkspace,
};
-use self::model::{
- DockStructure, LocalPathsOrder, SerializedDevServerProject, SerializedWorkspaceLocation,
-};
+use self::model::{DockStructure, LocalPathsOrder, SerializedWorkspaceLocation};
#[derive(Copy, Clone, Debug, PartialEq)]
pub(crate) struct SerializedAxis(pub(crate) gpui::Axis);
@@ -206,7 +206,8 @@ define_connection! {
// preview: bool // Indicates if this item is a preview item
// )
pub static ref DB: WorkspaceDb<()> =
- &[sql!(
+ &[
+ sql!(
CREATE TABLE workspaces(
workspace_id INTEGER PRIMARY KEY,
workspace_location BLOB UNIQUE,
@@ -369,6 +370,16 @@ define_connection! {
sql!(
ALTER TABLE ssh_projects RENAME COLUMN path TO paths;
),
+ sql!(
+ CREATE TABLE toolchains (
+ workspace_id INTEGER,
+ worktree_id INTEGER,
+ language_name TEXT NOT NULL,
+ name TEXT NOT NULL,
+ path TEXT NOT NULL,
+ PRIMARY KEY (workspace_id, worktree_id, language_name)
+ );
+ ),
];
}
@@ -460,89 +471,6 @@ impl WorkspaceDb {
})
}
- pub(crate) fn workspace_for_dev_server_project(
- &self,
- dev_server_project_id: DevServerProjectId,
- ) -> Option<SerializedWorkspace> {
- // Note that we re-assign the workspace_id here in case it's empty
- // and we've grabbed the most recent workspace
- let (
- workspace_id,
- dev_server_project_id,
- window_bounds,
- display,
- centered_layout,
- docks,
- window_id,
- ): (
- WorkspaceId,
- Option<u64>,
- Option<SerializedWindowBounds>,
- Option<Uuid>,
- Option<bool>,
- DockStructure,
- Option<u64>,
- ) = self
- .select_row_bound(sql! {
- SELECT
- workspace_id,
- dev_server_project_id,
- window_state,
- window_x,
- window_y,
- window_width,
- window_height,
- display,
- centered_layout,
- left_dock_visible,
- left_dock_active_panel,
- left_dock_zoom,
- right_dock_visible,
- right_dock_active_panel,
- right_dock_zoom,
- bottom_dock_visible,
- bottom_dock_active_panel,
- bottom_dock_zoom,
- window_id
- FROM workspaces
- WHERE dev_server_project_id = ?
- })
- .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id.0))
- .context("No workspaces found")
- .warn_on_err()
- .flatten()?;
-
- let dev_server_project_id = dev_server_project_id?;
-
- let dev_server_project: SerializedDevServerProject = self
- .select_row_bound(sql! {
- SELECT id, path, dev_server_name
- FROM dev_server_projects
- WHERE id = ?
- })
- .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id))
- .context("No remote project found")
- .warn_on_err()
- .flatten()?;
-
- let location = SerializedWorkspaceLocation::DevServer(dev_server_project);
-
- Some(SerializedWorkspace {
- id: workspace_id,
- location,
- center_group: self
- .get_center_pane_group(workspace_id)
- .context("Getting center group")
- .log_err()?,
- window_bounds,
- centered_layout: centered_layout.unwrap_or(false),
- display,
- docks,
- session_id: None,
- window_id,
- })
- }
-
pub(crate) fn workspace_for_ssh_project(
&self,
ssh_project: &SerializedSshProject,
@@ -613,6 +541,7 @@ impl WorkspaceDb {
match workspace.location {
SerializedWorkspaceLocation::Local(local_paths, local_paths_order) => {
conn.exec_bound(sql!(
+ DELETE FROM toolchains WHERE workspace_id = ?1;
DELETE FROM workspaces WHERE local_paths = ? AND workspace_id != ?
))?((&local_paths, workspace.id))
.context("clearing out old locations")?;
@@ -659,63 +588,9 @@ impl WorkspaceDb {
prepared_query(args).context("Updating workspace")?;
}
- SerializedWorkspaceLocation::DevServer(dev_server_project) => {
- conn.exec_bound(sql!(
- DELETE FROM workspaces WHERE dev_server_project_id = ? AND workspace_id != ?
- ))?((dev_server_project.id.0, workspace.id))
- .context("clearing out old locations")?;
-
- conn.exec_bound(sql!(
- INSERT INTO dev_server_projects(
- id,
- path,
- dev_server_name
- ) VALUES (?1, ?2, ?3)
- ON CONFLICT DO
- UPDATE SET
- path = ?2,
- dev_server_name = ?3
- ))?(&dev_server_project)?;
-
- // Upsert
- conn.exec_bound(sql!(
- INSERT INTO workspaces(
- workspace_id,
- dev_server_project_id,
- left_dock_visible,
- left_dock_active_panel,
- left_dock_zoom,
- right_dock_visible,
- right_dock_active_panel,
- right_dock_zoom,
- bottom_dock_visible,
- bottom_dock_active_panel,
- bottom_dock_zoom,
- timestamp
- )
- VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP)
- ON CONFLICT DO
- UPDATE SET
- dev_server_project_id = ?2,
- left_dock_visible = ?3,
- left_dock_active_panel = ?4,
- left_dock_zoom = ?5,
- right_dock_visible = ?6,
- right_dock_active_panel = ?7,
- right_dock_zoom = ?8,
- bottom_dock_visible = ?9,
- bottom_dock_active_panel = ?10,
- bottom_dock_zoom = ?11,
- timestamp = CURRENT_TIMESTAMP
- ))?((
- workspace.id,
- dev_server_project.id.0,
- workspace.docks,
- ))
- .context("Updating workspace")?;
- },
SerializedWorkspaceLocation::Ssh(ssh_project) => {
conn.exec_bound(sql!(
+ DELETE FROM toolchains WHERE workspace_id = ?1;
DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ?
))?((ssh_project.id.0, workspace.id))
.context("clearing out old locations")?;
@@ -824,11 +699,10 @@ impl WorkspaceDb {
}
query! {
- fn recent_workspaces() -> Result<Vec<(WorkspaceId, LocalPaths, LocalPathsOrder, Option<u64>, Option<u64>)>> {
- SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id, ssh_project_id
+ fn recent_workspaces() -> Result<Vec<(WorkspaceId, LocalPaths, LocalPathsOrder, Option<u64>)>> {
+ SELECT workspace_id, local_paths, local_paths_order, ssh_project_id
FROM workspaces
WHERE local_paths IS NOT NULL
- OR dev_server_project_id IS NOT NULL
OR ssh_project_id IS NOT NULL
ORDER BY timestamp DESC
}
@@ -843,13 +717,6 @@ impl WorkspaceDb {
}
}
- query! {
- fn dev_server_projects() -> Result<Vec<SerializedDevServerProject>> {
- SELECT id, path, dev_server_name
- FROM dev_server_projects
- }
- }
-
query! {
fn ssh_projects() -> Result<Vec<SerializedSshProject>> {
SELECT id, host, port, paths, user
@@ -885,6 +752,7 @@ impl WorkspaceDb {
query! {
pub async fn delete_workspace_by_id(id: WorkspaceId) -> Result<()> {
+ DELETE FROM toolchains WHERE workspace_id = ?1;
DELETE FROM workspaces
WHERE workspace_id IS ?
}
@@ -899,6 +767,7 @@ impl WorkspaceDb {
DELETE FROM dev_server_projects WHERE id = ?
))?(id.0)?;
conn.exec_bound(sql!(
+ DELETE FROM toolchains WHERE workspace_id = ?1;
DELETE FROM workspaces
WHERE dev_server_project_id IS ?
))?(id.0)
@@ -913,24 +782,9 @@ impl WorkspaceDb {
) -> Result<Vec<(WorkspaceId, SerializedWorkspaceLocation)>> {
let mut result = Vec::new();
let mut delete_tasks = Vec::new();
- let dev_server_projects = self.dev_server_projects()?;
let ssh_projects = self.ssh_projects()?;
- for (id, location, order, dev_server_project_id, ssh_project_id) in
- self.recent_workspaces()?
- {
- if let Some(dev_server_project_id) = dev_server_project_id.map(DevServerProjectId) {
- if let Some(dev_server_project) = dev_server_projects
- .iter()
- .find(|rp| rp.id == dev_server_project_id)
- {
- result.push((id, dev_server_project.clone().into()));
- } else {
- delete_tasks.push(self.delete_workspace_by_id(id));
- }
- continue;
- }
-
+ for (id, location, order, ssh_project_id) in self.recent_workspaces()? {
if let Some(ssh_project_id) = ssh_project_id.map(SshProjectId) {
if let Some(ssh_project) = ssh_projects.iter().find(|rp| rp.id == ssh_project_id) {
result.push((id, SerializedWorkspaceLocation::Ssh(ssh_project.clone())));
@@ -1216,6 +1070,83 @@ impl WorkspaceDb {
WHERE workspace_id = ?1
}
}
+
+ pub async fn toolchain(
+ &self,
+ workspace_id: WorkspaceId,
+ worktree_id: WorktreeId,
+ language_name: LanguageName,
+ ) -> Result<Option<Toolchain>> {
+ self.write(move |this| {
+ let mut select = this
+ .select_bound(sql!(
+ SELECT name, path FROM toolchains WHERE workspace_id = ? AND language_name = ? AND worktree_id = ?
+ ))
+ .context("Preparing insertion")?;
+
+ let toolchain: Vec<(String, String)> =
+ select((workspace_id, language_name.0.to_owned(), worktree_id.to_usize()))?;
+
+ Ok(toolchain.into_iter().next().map(|(name, path)| Toolchain {
+ name: name.into(),
+ path: path.into(),
+ language_name,
+ }))
+ })
+ .await
+ }
+
+ pub(crate) async fn toolchains(
+ &self,
+ workspace_id: WorkspaceId,
+ ) -> Result<Vec<(Toolchain, WorktreeId)>> {
+ self.write(move |this| {
+ let mut select = this
+ .select_bound(sql!(
+ SELECT name, path, worktree_id, language_name FROM toolchains WHERE workspace_id = ?
+ ))
+ .context("Preparing insertion")?;
+
+ let toolchain: Vec<(String, String, u64, String)> =
+ select(workspace_id)?;
+
+ Ok(toolchain.into_iter().map(|(name, path, worktree_id, language_name)| (Toolchain {
+ name: name.into(),
+ path: path.into(),
+ language_name: LanguageName::new(&language_name),
+ }, WorktreeId::from_proto(worktree_id))).collect())
+ })
+ .await
+ }
+ pub async fn set_toolchain(
+ &self,
+ workspace_id: WorkspaceId,
+ worktree_id: WorktreeId,
+ toolchain: Toolchain,
+ ) -> Result<()> {
+ self.write(move |conn| {
+ let mut insert = conn
+ .exec_bound(sql!(
+ INSERT INTO toolchains(workspace_id, worktree_id, language_name, name, path) VALUES (?, ?, ?, ?, ?)
+ ON CONFLICT DO
+ UPDATE SET
+ name = ?4,
+ path = ?5
+
+ ))
+ .context("Preparing insertion")?;
+
+ insert((
+ workspace_id,
+ worktree_id.to_usize(),
+ toolchain.language_name.0.as_ref(),
+ toolchain.name.as_ref(),
+ toolchain.path.as_ref(),
+ ))?;
+
+ Ok(())
+ }).await
+ }
}
#[cfg(test)]
@@ -4,7 +4,6 @@ use crate::{
};
use anyhow::{Context, Result};
use async_recursion::async_recursion;
-use client::DevServerProjectId;
use db::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
@@ -17,7 +16,6 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
-use ui::SharedString;
use util::ResultExt;
use uuid::Uuid;
@@ -92,13 +90,6 @@ impl Column for SerializedSshProject {
}
}
-#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
-pub struct SerializedDevServerProject {
- pub id: DevServerProjectId,
- pub dev_server_name: String,
- pub paths: Vec<SharedString>,
-}
-
#[derive(Debug, PartialEq, Clone)]
pub struct LocalPaths(Arc<Vec<PathBuf>>);
@@ -176,49 +167,10 @@ impl Column for LocalPathsOrder {
}
}
-impl From<SerializedDevServerProject> for SerializedWorkspaceLocation {
- fn from(dev_server_project: SerializedDevServerProject) -> Self {
- Self::DevServer(dev_server_project)
- }
-}
-
-impl StaticColumnCount for SerializedDevServerProject {}
-impl Bind for &SerializedDevServerProject {
- fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
- let next_index = statement.bind(&self.id.0, start_index)?;
- let next_index = statement.bind(&self.dev_server_name, next_index)?;
- let paths = serde_json::to_string(&self.paths)?;
- statement.bind(&paths, next_index)
- }
-}
-
-impl Column for SerializedDevServerProject {
- fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
- let id = statement.column_int64(start_index)?;
- let dev_server_name = statement.column_text(start_index + 1)?.to_string();
- let paths = statement.column_text(start_index + 2)?.to_string();
- let paths: Vec<SharedString> = if paths.starts_with('[') {
- serde_json::from_str(&paths).context("JSON deserialization of paths failed")?
- } else {
- vec![paths.into()]
- };
-
- Ok((
- Self {
- id: DevServerProjectId(id as u64),
- dev_server_name,
- paths,
- },
- start_index + 3,
- ))
- }
-}
-
#[derive(Debug, PartialEq, Clone)]
pub enum SerializedWorkspaceLocation {
Local(LocalPaths, LocalPathsOrder),
Ssh(SerializedSshProject),
- DevServer(SerializedDevServerProject),
}
impl SerializedWorkspaceLocation {
@@ -16,7 +16,7 @@ use anyhow::{anyhow, Context as _, Result};
use call::{call_settings::CallSettings, ActiveCall};
use client::{
proto::{self, ErrorCode, PanelId, PeerId},
- ChannelId, Client, DevServerProjectId, ErrorExt, ProjectId, Status, TypedEnvelope, UserStore,
+ ChannelId, Client, ErrorExt, Status, TypedEnvelope, UserStore,
};
use collections::{hash_map, HashMap, HashSet};
use derive_more::{Deref, DerefMut};
@@ -52,7 +52,7 @@ use notifications::{
pub use pane::*;
pub use pane_group::*;
pub use persistence::{
- model::{ItemId, LocalPaths, SerializedDevServerProject, SerializedWorkspaceLocation},
+ model::{ItemId, LocalPaths, SerializedWorkspaceLocation},
WorkspaceDb, DB as WORKSPACE_DB,
};
use persistence::{
@@ -97,7 +97,7 @@ use ui::{
IntoElement, ParentElement as _, Pixels, SharedString, Styled as _, ViewContext,
VisualContext as _, WindowContext,
};
-use util::{maybe, ResultExt, TryFutureExt};
+use util::{ResultExt, TryFutureExt};
use uuid::Uuid;
pub use workspace_settings::{
AutosaveSetting, RestoreOnStartupBehavior, TabBarSettings, WorkspaceSettings,
@@ -1153,6 +1153,14 @@ impl Workspace {
DB.next_id().await.unwrap_or_else(|_| Default::default())
};
+ let toolchains = DB.toolchains(workspace_id).await?;
+ for (toolchain, worktree_id) in toolchains {
+ project_handle
+ .update(&mut cx, |this, cx| {
+ this.activate_toolchain(worktree_id, toolchain, cx)
+ })?
+ .await;
+ }
let window = if let Some(window) = requesting_window {
cx.update_window(window.into(), |_, cx| {
cx.replace_root_view(|cx| {
@@ -1210,7 +1218,7 @@ impl Workspace {
notify_if_database_failed(window, &mut cx);
let opened_items = window
.update(&mut cx, |_workspace, cx| {
- open_items(serialized_workspace, project_paths, app_state, cx)
+ open_items(serialized_workspace, project_paths, cx)
})?
.await
.unwrap_or_default();
@@ -2050,14 +2058,16 @@ impl Workspace {
cx: &mut ViewContext<Self>,
) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
match path {
- ResolvedPath::ProjectPath(project_path) => self.open_path(project_path, None, true, cx),
- ResolvedPath::AbsPath(path) => self.open_abs_path(path, false, cx),
+ ResolvedPath::ProjectPath { project_path, .. } => {
+ self.open_path(project_path, None, true, cx)
+ }
+ ResolvedPath::AbsPath { path, .. } => self.open_abs_path(path, false, cx),
}
}
fn add_folder_to_project(&mut self, _: &AddFolderToProject, cx: &mut ViewContext<Self>) {
let project = self.project.read(cx);
- if project.is_via_collab() && project.dev_server_project_id().is_none() {
+ if project.is_via_collab() {
self.show_error(
&anyhow!("You cannot add folders to someone else's project"),
cx,
@@ -4137,20 +4147,6 @@ impl Workspace {
} else {
None
}
- } else if let Some(dev_server_project_id) = self.project().read(cx).dev_server_project_id()
- {
- let store = dev_server_projects::Store::global(cx).read(cx);
- maybe!({
- let project = store.dev_server_project(dev_server_project_id)?;
- let dev_server = store.dev_server(project.dev_server_id)?;
-
- let dev_server_project = SerializedDevServerProject {
- id: dev_server_project_id,
- dev_server_name: dev_server.name.to_string(),
- paths: project.paths.to_vec(),
- };
- Some(SerializedWorkspaceLocation::DevServer(dev_server_project))
- })
} else {
None
};
@@ -4569,7 +4565,6 @@ fn window_bounds_env_override() -> Option<Bounds<Pixels>> {
fn open_items(
serialized_workspace: Option<SerializedWorkspace>,
mut project_paths_to_open: Vec<(PathBuf, Option<ProjectPath>)>,
- app_state: Arc<AppState>,
cx: &mut ViewContext<Workspace>,
) -> impl 'static + Future<Output = Result<Vec<Option<Result<Box<dyn ItemHandle>>>>>> {
let restored_items = serialized_workspace.map(|serialized_workspace| {
@@ -4625,14 +4620,20 @@ fn open_items(
.enumerate()
.map(|(ix, (abs_path, project_path))| {
let workspace = workspace.clone();
- cx.spawn(|mut cx| {
- let fs = app_state.fs.clone();
- async move {
- let file_project_path = project_path?;
- if fs.is_dir(&abs_path).await {
- None
- } else {
- Some((
+ cx.spawn(|mut cx| async move {
+ let file_project_path = project_path?;
+ let abs_path_task = workspace.update(&mut cx, |workspace, cx| {
+ workspace.project().update(cx, |project, cx| {
+ project.resolve_abs_path(abs_path.to_string_lossy().as_ref(), cx)
+ })
+ });
+
+ // We only want to open file paths here. If one of the items
+ // here is a directory, it was already opened further above
+ // with a `find_or_create_worktree`.
+ if let Ok(task) = abs_path_task {
+ if task.await.map_or(true, |p| p.is_file()) {
+ return Some((
ix,
workspace
.update(&mut cx, |workspace, cx| {
@@ -4640,9 +4641,10 @@ fn open_items(
})
.log_err()?
.await,
- ))
+ ));
}
}
+ None
})
});
@@ -5183,13 +5185,12 @@ async fn join_channel_internal(
if let Some(workspace) = requesting_window {
let project = workspace.update(cx, |workspace, cx| {
let project = workspace.project.read(cx);
- let is_dev_server = project.dev_server_project_id().is_some();
- if !is_dev_server && !CallSettings::get_global(cx).share_on_join {
+ if !CallSettings::get_global(cx).share_on_join {
return None;
}
- if (project.is_local() || project.is_via_ssh() || is_dev_server)
+ if (project.is_local() || project.is_via_ssh())
&& project.visible_worktrees(cx).any(|tree| {
tree.read(cx)
.root_entry()
@@ -5484,58 +5485,6 @@ pub fn create_and_open_local_file(
})
}
-pub fn join_hosted_project(
- hosted_project_id: ProjectId,
- app_state: Arc<AppState>,
- cx: &mut AppContext,
-) -> Task<Result<()>> {
- cx.spawn(|mut cx| async move {
- let existing_window = cx.update(|cx| {
- cx.windows().into_iter().find_map(|window| {
- let workspace = window.downcast::<Workspace>()?;
- workspace
- .read(cx)
- .is_ok_and(|workspace| {
- workspace.project().read(cx).hosted_project_id() == Some(hosted_project_id)
- })
- .then_some(workspace)
- })
- })?;
-
- let workspace = if let Some(existing_window) = existing_window {
- existing_window
- } else {
- let project = Project::hosted(
- hosted_project_id,
- app_state.user_store.clone(),
- app_state.client.clone(),
- app_state.languages.clone(),
- app_state.fs.clone(),
- cx.clone(),
- )
- .await?;
-
- let window_bounds_override = window_bounds_env_override();
- cx.update(|cx| {
- let mut options = (app_state.build_window_options)(None, cx);
- options.window_bounds = window_bounds_override.map(WindowBounds::Windowed);
- cx.open_window(options, |cx| {
- cx.new_view(|cx| {
- Workspace::new(Default::default(), project, app_state.clone(), cx)
- })
- })
- })??
- };
-
- workspace.update(&mut cx, |_, cx| {
- cx.activate(true);
- cx.activate_window();
- })?;
-
- Ok(())
- })
-}
-
pub fn open_ssh_project(
window: WindowHandle<Workspace>,
connection_options: SshConnectionOptions,
@@ -5589,6 +5538,14 @@ pub fn open_ssh_project(
)
})?;
+ let toolchains = DB.toolchains(workspace_id).await?;
+ for (toolchain, worktree_id) in toolchains {
+ project
+ .update(&mut cx, |this, cx| {
+ this.activate_toolchain(worktree_id, toolchain, cx)
+ })?
+ .await;
+ }
let mut project_paths_to_open = vec![];
let mut project_path_errors = vec![];
@@ -5631,7 +5588,7 @@ pub fn open_ssh_project(
.update(&mut cx, |_, cx| {
cx.activate_window();
- open_items(serialized_workspace, project_paths_to_open, app_state, cx)
+ open_items(serialized_workspace, project_paths_to_open, cx)
})?
.await?;
@@ -5688,84 +5645,6 @@ fn serialize_ssh_project(
})
}
-pub fn join_dev_server_project(
- dev_server_project_id: DevServerProjectId,
- project_id: ProjectId,
- app_state: Arc<AppState>,
- window_to_replace: Option<WindowHandle<Workspace>>,
- cx: &mut AppContext,
-) -> Task<Result<WindowHandle<Workspace>>> {
- let windows = cx.windows();
- cx.spawn(|mut cx| async move {
- let existing_workspace = windows.into_iter().find_map(|window| {
- window.downcast::<Workspace>().and_then(|window| {
- window
- .update(&mut cx, |workspace, cx| {
- if workspace.project().read(cx).remote_id() == Some(project_id.0) {
- Some(window)
- } else {
- None
- }
- })
- .unwrap_or(None)
- })
- });
-
- let serialized_workspace: Option<SerializedWorkspace> =
- persistence::DB.workspace_for_dev_server_project(dev_server_project_id);
-
- let workspace = if let Some(existing_workspace) = existing_workspace {
- existing_workspace
- } else {
- let project = Project::remote(
- project_id.0,
- app_state.client.clone(),
- app_state.user_store.clone(),
- app_state.languages.clone(),
- app_state.fs.clone(),
- cx.clone(),
- )
- .await?;
-
- let workspace_id = if let Some(ref serialized_workspace) = serialized_workspace {
- serialized_workspace.id
- } else {
- persistence::DB.next_id().await?
- };
-
- if let Some(window_to_replace) = window_to_replace {
- cx.update_window(window_to_replace.into(), |_, cx| {
- cx.replace_root_view(|cx| {
- Workspace::new(Some(workspace_id), project, app_state.clone(), cx)
- });
- })?;
- window_to_replace
- } else {
- let window_bounds_override = window_bounds_env_override();
- cx.update(|cx| {
- let mut options = (app_state.build_window_options)(None, cx);
- options.window_bounds = window_bounds_override.map(WindowBounds::Windowed);
- cx.open_window(options, |cx| {
- cx.new_view(|cx| {
- Workspace::new(Some(workspace_id), project, app_state.clone(), cx)
- })
- })
- })??
- }
- };
-
- workspace
- .update(&mut cx, |_, cx| {
- cx.activate(true);
- cx.activate_window();
- open_items(serialized_workspace, vec![], app_state, cx)
- })?
- .await?;
-
- anyhow::Ok(workspace)
- })
-}
-
pub fn join_in_room_project(
project_id: u64,
follow_user_id: u64,
@@ -2385,6 +2385,12 @@ impl Snapshot {
.map(|entry| entry.to_owned())
}
+ pub fn git_entry(&self, work_directory_path: Arc<Path>) -> Option<RepositoryEntry> {
+ self.repository_entries
+ .get(&RepositoryWorkDirectory(work_directory_path))
+ .map(|entry| entry.to_owned())
+ }
+
pub fn git_entries(&self) -> impl Iterator<Item = &RepositoryEntry> {
self.repository_entries.values()
}
@@ -36,7 +36,6 @@ command_palette.workspace = true
command_palette_hooks.workspace = true
copilot.workspace = true
db.workspace = true
-dev_server_projects.workspace = true
diagnostics.workspace = true
editor.workspace = true
env_logger.workspace = true
@@ -52,7 +51,6 @@ git.workspace = true
git_hosting_providers.workspace = true
go_to_line.workspace = true
gpui = { workspace = true, features = ["wayland", "x11", "font-kit"] }
-headless.workspace = true
http_client.workspace = true
image_viewer.workspace = true
inline_completion_button.workspace = true
@@ -106,6 +104,7 @@ terminal_view.workspace = true
theme.workspace = true
theme_selector.workspace = true
time.workspace = true
+toolchain_selector.workspace = true
ui.workspace = true
reqwest_client.workspace = true
url.workspace = true
@@ -7,11 +7,10 @@ mod reliability;
mod zed;
use anyhow::{anyhow, Context as _, Result};
-use assistant::PromptBuilder;
use chrono::Offset;
use clap::{command, Parser};
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
-use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore};
+use client::{parse_zed_link, Client, ProxySettings, UserStore};
use collab_ui::channel_view::ChannelView;
use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE};
use editor::Editor;
@@ -20,13 +19,12 @@ use fs::{Fs, RealFs};
use futures::{future, StreamExt};
use git::GitHostingProviderRegistry;
use gpui::{
- Action, App, AppContext, AsyncAppContext, Context, DismissEvent, Global, Task,
- UpdateGlobal as _, VisualContext,
+ Action, App, AppContext, AsyncAppContext, Context, DismissEvent, UpdateGlobal as _,
+ VisualContext,
};
use http_client::{read_proxy_from_env, Uri};
use language::LanguageRegistry;
use log::LevelFilter;
-use remote::SshConnectionOptions;
use reqwest_client::ReqwestClient;
use assets::Assets;
@@ -136,193 +134,6 @@ fn fail_to_open_window(e: anyhow::Error, _cx: &mut AppContext) {
}
}
-enum AppMode {
- Headless(DevServerToken),
- Ui,
-}
-impl Global for AppMode {}
-
-fn init_headless(
- dev_server_token: DevServerToken,
- app_state: Arc<AppState>,
- cx: &mut AppContext,
-) -> Task<Result<()>> {
- match cx.try_global::<AppMode>() {
- Some(AppMode::Headless(token)) if token == &dev_server_token => return Task::ready(Ok(())),
- Some(_) => {
- return Task::ready(Err(anyhow!(
- "zed is already running. Use `kill {}` to stop it",
- process::id()
- )))
- }
- None => {
- cx.set_global(AppMode::Headless(dev_server_token.clone()));
- }
- };
- let client = app_state.client.clone();
- client.set_dev_server_token(dev_server_token);
- headless::init(
- client.clone(),
- headless::AppState {
- languages: app_state.languages.clone(),
- user_store: app_state.user_store.clone(),
- fs: app_state.fs.clone(),
- node_runtime: app_state.node_runtime.clone(),
- },
- cx,
- )
-}
-
-// init_common is called for both headless and normal mode.
-fn init_common(app_state: Arc<AppState>, cx: &mut AppContext) -> Arc<PromptBuilder> {
- SystemAppearance::init(cx);
- theme::init(theme::LoadThemes::All(Box::new(Assets)), cx);
- command_palette::init(cx);
- let copilot_language_server_id = app_state.languages.next_language_server_id();
- copilot::init(
- copilot_language_server_id,
- app_state.fs.clone(),
- app_state.client.http_client(),
- app_state.node_runtime.clone(),
- cx,
- );
- supermaven::init(app_state.client.clone(), cx);
- language_model::init(
- app_state.user_store.clone(),
- app_state.client.clone(),
- app_state.fs.clone(),
- cx,
- );
- snippet_provider::init(cx);
- inline_completion_registry::init(app_state.client.telemetry().clone(), cx);
- let prompt_builder = assistant::init(
- app_state.fs.clone(),
- app_state.client.clone(),
- stdout_is_a_pty(),
- cx,
- );
- repl::init(
- app_state.fs.clone(),
- app_state.client.telemetry().clone(),
- cx,
- );
- extension::init(
- app_state.fs.clone(),
- app_state.client.clone(),
- app_state.node_runtime.clone(),
- app_state.languages.clone(),
- ThemeRegistry::global(cx),
- cx,
- );
- recent_projects::init(cx);
- prompt_builder
-}
-
-fn init_ui(
- app_state: Arc<AppState>,
- prompt_builder: Arc<PromptBuilder>,
- cx: &mut AppContext,
-) -> Result<()> {
- match cx.try_global::<AppMode>() {
- Some(AppMode::Headless(_)) => {
- return Err(anyhow!(
- "zed is already running in headless mode. Use `kill {}` to stop it",
- process::id()
- ))
- }
- Some(AppMode::Ui) => return Ok(()),
- None => {
- cx.set_global(AppMode::Ui);
- }
- };
-
- load_embedded_fonts(cx);
-
- #[cfg(target_os = "linux")]
- crate::zed::linux_prompts::init(cx);
-
- app_state.languages.set_theme(cx.theme().clone());
- editor::init(cx);
- image_viewer::init(cx);
- diagnostics::init(cx);
-
- audio::init(Assets, cx);
- workspace::init(app_state.clone(), cx);
-
- go_to_line::init(cx);
- file_finder::init(cx);
- tab_switcher::init(cx);
- dev_server_projects::init(app_state.client.clone(), cx);
- outline::init(cx);
- project_symbols::init(cx);
- project_panel::init(Assets, cx);
- outline_panel::init(Assets, cx);
- tasks_ui::init(cx);
- snippets_ui::init(cx);
- channel::init(&app_state.client.clone(), app_state.user_store.clone(), cx);
- search::init(cx);
- vim::init(cx);
- terminal_view::init(cx);
- journal::init(app_state.clone(), cx);
- language_selector::init(cx);
- theme_selector::init(cx);
- language_tools::init(cx);
- call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
- notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx);
- collab_ui::init(&app_state, cx);
- feedback::init(cx);
- markdown_preview::init(cx);
- welcome::init(cx);
- settings_ui::init(cx);
- extensions_ui::init(cx);
-
- cx.observe_global::<SettingsStore>({
- let languages = app_state.languages.clone();
- let http = app_state.client.http_client();
- let client = app_state.client.clone();
-
- move |cx| {
- for &mut window in cx.windows().iter_mut() {
- let background_appearance = cx.theme().window_background_appearance();
- window
- .update(cx, |_, cx| {
- cx.set_background_appearance(background_appearance)
- })
- .ok();
- }
- languages.set_theme(cx.theme().clone());
- let new_host = &client::ClientSettings::get_global(cx).server_url;
- if &http.base_url() != new_host {
- http.set_base_url(new_host);
- if client.status().borrow().is_connected() {
- client.reconnect(&cx.to_async());
- }
- }
- }
- })
- .detach();
- let telemetry = app_state.client.telemetry();
- telemetry.report_setting_event("theme", cx.theme().name.to_string());
- telemetry.report_setting_event("keymap", BaseKeymap::get_global(cx).to_string());
- telemetry.flush_events();
-
- let fs = app_state.fs.clone();
- load_user_themes_in_background(fs.clone(), cx);
- watch_themes(fs.clone(), cx);
- watch_languages(fs.clone(), app_state.languages.clone(), cx);
- watch_file_types(fs.clone(), cx);
-
- cx.set_menus(app_menus());
- initialize_workspace(app_state.clone(), prompt_builder, cx);
-
- cx.activate(true);
-
- cx.spawn(|cx| async move { authenticate(app_state.client.clone(), &cx).await })
- .detach_and_log_err(cx);
-
- Ok(())
-}
-
fn main() {
menu::init();
zed_actions::init();
@@ -426,22 +237,15 @@ fn main() {
app.on_reopen(move |cx| {
if let Some(app_state) = AppState::try_global(cx).and_then(|app_state| app_state.upgrade())
{
- let ui_has_launched = cx
- .try_global::<AppMode>()
- .map(|mode| matches!(mode, AppMode::Ui))
- .unwrap_or(false);
-
- if ui_has_launched {
- cx.spawn({
- let app_state = app_state.clone();
- |mut cx| async move {
- if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await {
- fail_to_open_window_async(e, &mut cx)
- }
+ cx.spawn({
+ let app_state = app_state.clone();
+ |mut cx| async move {
+ if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await {
+ fail_to_open_window_async(e, &mut cx)
}
- })
- .detach();
- }
+ }
+ })
+ .detach();
}
});
@@ -567,7 +371,134 @@ fn main() {
installation_id.clone().map(|id| id.to_string()),
cx,
);
- let prompt_builder = init_common(app_state.clone(), cx);
+
+ SystemAppearance::init(cx);
+ theme::init(theme::LoadThemes::All(Box::new(Assets)), cx);
+ command_palette::init(cx);
+ let copilot_language_server_id = app_state.languages.next_language_server_id();
+ copilot::init(
+ copilot_language_server_id,
+ app_state.fs.clone(),
+ app_state.client.http_client(),
+ app_state.node_runtime.clone(),
+ cx,
+ );
+ supermaven::init(app_state.client.clone(), cx);
+ language_model::init(
+ app_state.user_store.clone(),
+ app_state.client.clone(),
+ app_state.fs.clone(),
+ cx,
+ );
+ snippet_provider::init(cx);
+ inline_completion_registry::init(app_state.client.telemetry().clone(), cx);
+ let prompt_builder = assistant::init(
+ app_state.fs.clone(),
+ app_state.client.clone(),
+ stdout_is_a_pty(),
+ cx,
+ );
+ repl::init(
+ app_state.fs.clone(),
+ app_state.client.telemetry().clone(),
+ cx,
+ );
+ extension::init(
+ app_state.fs.clone(),
+ app_state.client.clone(),
+ app_state.node_runtime.clone(),
+ app_state.languages.clone(),
+ ThemeRegistry::global(cx),
+ cx,
+ );
+ recent_projects::init(cx);
+
+ load_embedded_fonts(cx);
+
+ #[cfg(target_os = "linux")]
+ crate::zed::linux_prompts::init(cx);
+
+ app_state.languages.set_theme(cx.theme().clone());
+ editor::init(cx);
+ image_viewer::init(cx);
+ diagnostics::init(cx);
+
+ audio::init(Assets, cx);
+ workspace::init(app_state.clone(), cx);
+
+ go_to_line::init(cx);
+ file_finder::init(cx);
+ tab_switcher::init(cx);
+ outline::init(cx);
+ project_symbols::init(cx);
+ project_panel::init(Assets, cx);
+ outline_panel::init(Assets, cx);
+ tasks_ui::init(cx);
+ snippets_ui::init(cx);
+ channel::init(&app_state.client.clone(), app_state.user_store.clone(), cx);
+ search::init(cx);
+ vim::init(cx);
+ terminal_view::init(cx);
+ journal::init(app_state.clone(), cx);
+ language_selector::init(cx);
+ toolchain_selector::init(cx);
+ theme_selector::init(cx);
+ language_tools::init(cx);
+ call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
+ notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx);
+ collab_ui::init(&app_state, cx);
+ feedback::init(cx);
+ markdown_preview::init(cx);
+ welcome::init(cx);
+ settings_ui::init(cx);
+ extensions_ui::init(cx);
+
+ cx.observe_global::<SettingsStore>({
+ let languages = app_state.languages.clone();
+ let http = app_state.client.http_client();
+ let client = app_state.client.clone();
+
+ move |cx| {
+ for &mut window in cx.windows().iter_mut() {
+ let background_appearance = cx.theme().window_background_appearance();
+ window
+ .update(cx, |_, cx| {
+ cx.set_background_appearance(background_appearance)
+ })
+ .ok();
+ }
+ languages.set_theme(cx.theme().clone());
+ let new_host = &client::ClientSettings::get_global(cx).server_url;
+ if &http.base_url() != new_host {
+ http.set_base_url(new_host);
+ if client.status().borrow().is_connected() {
+ client.reconnect(&cx.to_async());
+ }
+ }
+ }
+ })
+ .detach();
+ let telemetry = app_state.client.telemetry();
+ telemetry.report_setting_event("theme", cx.theme().name.to_string());
+ telemetry.report_setting_event("keymap", BaseKeymap::get_global(cx).to_string());
+ telemetry.flush_events();
+
+ let fs = app_state.fs.clone();
+ load_user_themes_in_background(fs.clone(), cx);
+ watch_themes(fs.clone(), cx);
+ watch_languages(fs.clone(), app_state.languages.clone(), cx);
+ watch_file_types(fs.clone(), cx);
+
+ cx.set_menus(app_menus());
+ initialize_workspace(app_state.clone(), prompt_builder, cx);
+
+ cx.activate(true);
+
+ cx.spawn({
+ let client = app_state.client.clone();
+ |cx| async move { authenticate(client, &cx).await }
+ })
+ .detach_and_log_err(cx);
let args = Args::parse();
let urls: Vec<_> = args
@@ -587,43 +518,27 @@ fn main() {
.and_then(|urls| OpenRequest::parse(urls, cx).log_err())
{
Some(request) => {
- handle_open_request(request, app_state.clone(), prompt_builder.clone(), cx);
+ handle_open_request(request, app_state.clone(), cx);
}
None => {
- if let Some(dev_server_token) = args.dev_server_token {
- let task =
- init_headless(DevServerToken(dev_server_token), app_state.clone(), cx);
- cx.spawn(|cx| async move {
- if let Err(e) = task.await {
- log::error!("{}", e);
- cx.update(|cx| cx.quit()).log_err();
- } else {
- log::info!("connected!");
- }
- })
- .detach();
- } else {
- init_ui(app_state.clone(), prompt_builder.clone(), cx).unwrap();
- cx.spawn({
- let app_state = app_state.clone();
- |mut cx| async move {
- if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await {
- fail_to_open_window_async(e, &mut cx)
- }
+ cx.spawn({
+ let app_state = app_state.clone();
+ |mut cx| async move {
+ if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await {
+ fail_to_open_window_async(e, &mut cx)
}
- })
- .detach();
- }
+ }
+ })
+ .detach();
}
}
let app_state = app_state.clone();
- let prompt_builder = prompt_builder.clone();
cx.spawn(move |cx| async move {
while let Some(urls) = open_rx.next().await {
cx.update(|cx| {
if let Some(request) = OpenRequest::parse(urls, cx).log_err() {
- handle_open_request(request, app_state.clone(), prompt_builder.clone(), cx);
+ handle_open_request(request, app_state.clone(), cx);
}
})
.ok();
@@ -693,44 +608,23 @@ fn handle_settings_changed(error: Option<anyhow::Error>, cx: &mut AppContext) {
}
}
-fn handle_open_request(
- request: OpenRequest,
- app_state: Arc<AppState>,
- prompt_builder: Arc<PromptBuilder>,
- cx: &mut AppContext,
-) {
+fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut AppContext) {
if let Some(connection) = request.cli_connection {
let app_state = app_state.clone();
- cx.spawn(move |cx| handle_cli_connection(connection, app_state, prompt_builder, cx))
+ cx.spawn(move |cx| handle_cli_connection(connection, app_state, cx))
.detach();
return;
}
- if let Err(e) = init_ui(app_state.clone(), prompt_builder, cx) {
- fail_to_open_window(e, cx);
- return;
- };
-
- if let Some(connection_info) = request.ssh_connection {
+ if let Some(connection_options) = request.ssh_connection {
cx.spawn(|mut cx| async move {
- let nickname = cx
- .update(|cx| {
- SshSettings::get_global(cx).nickname_for(
- &connection_info.host,
- connection_info.port,
- &connection_info.username,
- )
- })
- .ok()
- .flatten();
let paths_with_position =
derive_paths_with_position(app_state.fs.as_ref(), request.open_paths).await;
open_ssh_project(
- connection_info,
+ connection_options,
paths_with_position.into_iter().map(|p| p.path).collect(),
app_state,
workspace::OpenOptions::default(),
- nickname,
&mut cx,
)
.await
@@ -893,25 +787,10 @@ async fn restore_or_create_workspace(
task.await?;
}
SerializedWorkspaceLocation::Ssh(ssh) => {
- let args = cx
- .update(|cx| {
- SshSettings::get_global(cx).args_for(&ssh.host, ssh.port, &ssh.user)
- })
- .ok()
- .flatten();
- let nickname = cx
- .update(|cx| {
- SshSettings::get_global(cx).nickname_for(&ssh.host, ssh.port, &ssh.user)
- })
- .ok()
- .flatten();
- let connection_options = SshConnectionOptions {
- args,
- host: ssh.host.clone(),
- username: ssh.user.clone(),
- port: ssh.port,
- password: None,
- };
+ let connection_options = cx.update(|cx| {
+ SshSettings::get_global(cx)
+ .connection_options_for(ssh.host, ssh.port, ssh.user)
+ })?;
let app_state = app_state.clone();
cx.spawn(move |mut cx| async move {
recent_projects::open_ssh_project(
@@ -919,7 +798,6 @@ async fn restore_or_create_workspace(
ssh.paths.into_iter().map(PathBuf::from).collect(),
app_state,
workspace::OpenOptions::default(),
- nickname,
&mut cx,
)
.await
@@ -927,7 +805,6 @@ async fn restore_or_create_workspace(
})
.detach();
}
- SerializedWorkspaceLocation::DevServer(_) => {}
}
}
} else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
@@ -208,6 +208,8 @@ pub fn initialize_workspace(
activity_indicator::ActivityIndicator::new(workspace, app_state.languages.clone(), cx);
let active_buffer_language =
cx.new_view(|_| language_selector::ActiveBufferLanguage::new(workspace));
+ let active_toolchain_language =
+ cx.new_view(|cx| toolchain_selector::ActiveToolchain::new(workspace, cx));
let vim_mode_indicator = cx.new_view(vim::ModeIndicator::new);
let cursor_position =
cx.new_view(|_| go_to_line::cursor_position::CursorPosition::new(workspace));
@@ -216,6 +218,7 @@ pub fn initialize_workspace(
status_bar.add_left_item(activity_indicator, cx);
status_bar.add_right_item(inline_completion_button, cx);
status_bar.add_right_item(active_buffer_language, cx);
+ status_bar.add_right_item(active_toolchain_language, cx);
status_bar.add_right_item(vim_mode_indicator, cx);
status_bar.add_right_item(cursor_position, cx);
});
@@ -881,12 +884,6 @@ pub fn open_new_ssh_project_from_project(
return Task::ready(Err(anyhow::anyhow!("Not an ssh project")));
};
let connection_options = ssh_client.read(cx).connection_options();
- let nickname = recent_projects::SshSettings::get_global(cx).nickname_for(
- &connection_options.host,
- connection_options.port,
- &connection_options.username,
- );
-
cx.spawn(|_, mut cx| async move {
open_ssh_project(
connection_options,
@@ -897,7 +894,6 @@ pub fn open_new_ssh_project_from_project(
replace_window: None,
env: None,
},
- nickname,
&mut cx,
)
.await
@@ -1,7 +1,6 @@
+use crate::handle_open_request;
use crate::restorable_workspace_locations;
-use crate::{handle_open_request, init_headless, init_ui};
use anyhow::{anyhow, Context, Result};
-use assistant::PromptBuilder;
use cli::{ipc, IpcHandshake};
use cli::{ipc::IpcSender, CliRequest, CliResponse};
use client::parse_zed_link;
@@ -21,8 +20,8 @@ use remote::SshConnectionOptions;
use settings::Settings;
use std::path::{Path, PathBuf};
use std::sync::Arc;
+use std::thread;
use std::time::Duration;
-use std::{process, thread};
use util::paths::PathWithPosition;
use util::ResultExt;
use welcome::{show_welcome_view, FIRST_OPEN};
@@ -73,25 +72,24 @@ impl OpenRequest {
.ok_or_else(|| anyhow!("missing host in ssh url: {}", file))?
.to_string();
let username = Some(url.username().to_string()).filter(|s| !s.is_empty());
- let password = url.password().map(|s| s.to_string());
let port = url.port();
if !self.open_paths.is_empty() {
return Err(anyhow!("cannot open both local and ssh paths"));
}
- let args = SshSettings::get_global(cx).args_for(&host, port, &username);
- let connection = SshConnectionOptions {
- username,
- password,
- host,
+ let mut connection_options = SshSettings::get_global(cx).connection_options_for(
+ host.clone(),
port,
- args,
- };
+ username.clone(),
+ );
+ if let Some(password) = url.password() {
+ connection_options.password = Some(password.to_string());
+ }
if let Some(ssh_connection) = &self.ssh_connection {
- if *ssh_connection != connection {
+ if *ssh_connection != connection_options {
return Err(anyhow!("cannot open multiple ssh connections"));
}
}
- self.ssh_connection = Some(connection);
+ self.ssh_connection = Some(connection_options);
self.parse_file_path(url.path());
Ok(())
}
@@ -252,7 +250,6 @@ pub async fn open_paths_with_positions(
pub async fn handle_cli_connection(
(mut requests, responses): (mpsc::Receiver<CliRequest>, IpcSender<CliResponse>),
app_state: Arc<AppState>,
- prompt_builder: Arc<PromptBuilder>,
mut cx: AsyncAppContext,
) {
if let Some(request) = requests.next().await {
@@ -262,48 +259,13 @@ pub async fn handle_cli_connection(
paths,
wait,
open_new_workspace,
- dev_server_token,
env,
} => {
- if let Some(dev_server_token) = dev_server_token {
- match cx
- .update(|cx| {
- init_headless(client::DevServerToken(dev_server_token), app_state, cx)
- })
- .unwrap()
- .await
- {
- Ok(_) => {
- responses
- .send(CliResponse::Stdout {
- message: format!("zed (pid {}) connected!", process::id()),
- })
- .log_err();
- responses.send(CliResponse::Exit { status: 0 }).log_err();
- }
- Err(error) => {
- responses
- .send(CliResponse::Stderr {
- message: format!("{error}"),
- })
- .log_err();
- responses.send(CliResponse::Exit { status: 1 }).log_err();
- cx.update(|cx| cx.quit()).log_err();
- }
- }
- return;
- }
-
if !urls.is_empty() {
cx.update(|cx| {
match OpenRequest::parse(urls, cx) {
Ok(open_request) => {
- handle_open_request(
- open_request,
- app_state.clone(),
- prompt_builder.clone(),
- cx,
- );
+ handle_open_request(open_request, app_state.clone(), cx);
responses.send(CliResponse::Exit { status: 0 }).log_err();
}
Err(e) => {
@@ -320,19 +282,6 @@ pub async fn handle_cli_connection(
return;
}
- if let Err(e) = cx
- .update(|cx| init_ui(app_state.clone(), prompt_builder.clone(), cx))
- .and_then(|r| r)
- {
- responses
- .send(CliResponse::Stderr {
- message: format!("{e}"),
- })
- .log_err();
- responses.send(CliResponse::Exit { status: 1 }).log_err();
- return;
- }
-
let open_workspace_result = open_workspaces(
paths,
open_new_workspace,
@@ -424,42 +373,29 @@ async fn open_workspaces(
}
SerializedWorkspaceLocation::Ssh(ssh) => {
let app_state = app_state.clone();
- let args = cx
- .update(|cx| {
- SshSettings::get_global(cx).args_for(&ssh.host, ssh.port, &ssh.user)
- })
- .ok()
- .flatten();
- let connection_options = SshConnectionOptions {
- args,
- host: ssh.host.clone(),
- username: ssh.user.clone(),
- port: ssh.port,
- password: None,
- };
- let nickname = cx
- .update(|cx| {
- SshSettings::get_global(cx).nickname_for(&ssh.host, ssh.port, &ssh.user)
+ let connection_options = cx.update(|cx| {
+ SshSettings::get_global(cx)
+ .connection_options_for(ssh.host, ssh.port, ssh.user)
+ });
+ if let Ok(connection_options) = connection_options {
+ cx.spawn(|mut cx| async move {
+ open_ssh_project(
+ connection_options,
+ ssh.paths.into_iter().map(PathBuf::from).collect(),
+ app_state,
+ OpenOptions::default(),
+ &mut cx,
+ )
+ .await
+ .log_err();
})
- .ok()
- .flatten();
- cx.spawn(|mut cx| async move {
- open_ssh_project(
- connection_options,
- ssh.paths.into_iter().map(PathBuf::from).collect(),
- app_state,
- OpenOptions::default(),
- nickname,
- &mut cx,
- )
- .await
- .log_err();
- })
- .detach();
- // We don't set `errored` here, because for ssh projects, the
- // error is displayed in the window.
+ .detach();
+ // We don't set `errored` here if `open_ssh_project` fails, because for ssh projects, the
+ // error is displayed in the window.
+ } else {
+ errored = false;
+ }
}
- SerializedWorkspaceLocation::DevServer(_) => {}
}
}
@@ -2042,6 +2042,7 @@ Run the `theme selector: toggle` action in the command palette to see a current
"folder_icons": true,
"git_status": true,
"indent_size": 20,
+ "indent_guides": true,
"auto_reveal_entries": true,
"auto_fold_dirs": true,
"scrollbar": {
@@ -2163,6 +2164,12 @@ Run the `theme selector: toggle` action in the command palette to see a current
- Setting: `indent_size`
- Default: `20`
+### Indent Guides
+
+- Description: Whether to show indent guides in the project panel.
+- Setting: `indent_guides`
+- Default: `true`
+
### Scrollbar
- Description: Scrollbar related settings. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details.
@@ -2230,6 +2237,7 @@ Run the `theme selector: toggle` action in the command palette to see a current
"folder_icons": true,
"git_status": true,
"indent_size": 20,
+ "indent_guides": true,
"auto_reveal_entries": true,
"auto_fold_dirs": true,
}
@@ -21,13 +21,26 @@ def fib(n):
## Configuration
-If you wish change the default language settings for Markdown files, perhaps to disable auto format on save or if your markdown relies upon trailing whitespace ` ` being converted to `<br />` you can add change these values in your `settings.json`:
+### Format
+
+Zed supports using Prettier to automatically re-format Markdown documents. You can trigger this manually via the {#action editor::Format} action or via the {#kb editor::Format} keyboard shortcut. Alternately, you can automattically format by enabling [`format_on_save`](./configuring-zed.md#format-on-save) in your settings.json:
```json
"languages": {
"Markdown": {
- "remove_trailing_whitespace_on_save": true,
"format_on_save": "on"
}
},
```
+
+### Trailing Whitespace
+
+By default Zed will remove trailing whitespace on save. If you rely on invisible trailing whitespace being converted to `<br />` in Markdown files you can disable this behavior with:
+
+```json
+ "languages": {
+ "Markdown": {
+ "remove_trailing_whitespace_on_save": false
+ }
+ },
+```
@@ -2,109 +2,152 @@
Remote Development allows you to code at the speed of thought, even when your codebase is not on your local machine. You use Zed locally so the UI is immediately responsive, but offload heavy computation to the development server so that you can work effectively.
-> **Note:** Remoting is still "alpha". We have several changes we would like to make before it is fully released.
+> **Note:** Remoting is still "beta". We are still refining the reliability and performance.
## Overview
-Remote development requires running two instances of Zed. A headless instance on the remote machine, and the editor interface on your local computer. All configuration is done on your local computer.
+Remote development requires two computers, your local machine that runs the Zed UI and the remote server which runs a Zed headless server. The two communicate over SSH, so you will need to be able to SSH from your local machine into the remote server to use this feature.
-Currently the two instances connect via Zed's servers, but we intend to build peer to peer communication before the feature is fully released.
+
+
+On your local machine, Zed runs its UI, talks to language models, uses Tree-sitter to parse and syntax-highlight code, and store unsaved changes and recent projects. The source code, language servers, tasks, and the terminal all run on the remote server.
+
+> **Note:** The original version of remote development sent traffic via Zed's servers. As of Zed v0.157 you can no-longer use that mode.
## Setup
-1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview).
-1. Open the remote projects dialogue with `cmd-shift-p remote`.
-1. Click "New Server".
-1. Choose whether to setup via SSH, or to follow the manual setup.
- > **Note:** With both options your laptop and the remote machine will communicate
- > via https://collab.zed.dev/, so you will need outbound internet access on the remote machine.
-1. On your laptop you can now open folders on the remote machine.
+1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview). You need at least Zed v0.159.
+1. Open the remote projects dialogue with <kbd>cmd-shift-p remote</kbd> or <kbd>cmd-control-o</kbd>.
+1. Click "Connect New Server" and enter the command you use to SSH into the server. See [Supported SSH options](#supported-ssh-options) for options you can pass.
+1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, Zed will download the server on the remote host and start it.
+1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server.
> **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos.
-## Troubleshooting
+For simple cases where you don't need any SSH arguments, you can run `zed ssh://[<user>@]<host>[:<port>]/<path>` to open a remote folder/file directly.
-### UI is not showing up
+## Supported platforms
-You need to be on a relatively recent Zed (v0.145.0 or later).
+The remote machine must be able to run Zed's server. The following platforms should work, though note that we have not exhaustively tested every Linux distribution:
-### SSH connections
+- macOS Catalina or later (Intel or Apple Silicon)
+- Linux (x86_64 or arm64, we do not yet support 32-bit platforms)
+- Windows is not yet supported.
-If you chose to connect via SSH, the command you specify will be run in a Zed terminal given you an opportunity to type any passwords/keyphrases etc. that you need.
-Once a connection is established, Zed will be downloaded and installed to `~/.local/bin/zed` on the remote machine, and run.
+## Configuration
-If you don't see any output from the Zed command, it is likely that Zed is crashing
-on startup. You can troubleshoot this by switching to manual mode and passing the `--foreground` flag. Please [file a bug](https://github.com/zed-industries/zed) so we can debug it together.
+The list of remote servers is stored in your settings file {#kb zed::OpenSettings}. You can edit this list using the Remote Projects dialogue {#kb projects::OpenRemote}, which provides some robustness - for example it checks that the connection can be established before writing it to the settings file.
-If you are trying to connect to a platform like GitHub Codespaces or Google Cloud, you may want to first make sure that your SSH configuration is set up correctly. Once you can `ssh X` to connect to the machine, then Zed will be able to connect.
+```json
+{
+ "ssh_connections": [
+ {
+ "host": "192.168.1.10",
+ "projects": ["~/code/zed/zed"]
+ }
+ ]
+}
+```
-> **Note:** In an earlier version of remoting, we supported typing in `gh cs ssh` or `gcloud compute ssh` directly. This is no longer supported. Instead you should make sure your SSH configuration is up to date with `gcloud compute ssh --config` or `gh cs ssh --config`, or use Manual setup mode if you cannot ssh directly to the machine.
+Zed shells out to the `ssh` on your path, and so it will inherit any configuration you have in `~/.ssh/config` for the given host. That said, if you need to override anything you can configure the following additional options on each connection:
-### zed --dev-server-token isn't connecting
+```json
+{
+ "ssh_connections": [
+ {
+ "host": "192.168.1.10",
+ "projects": ["~/code/zed/zed"],
+ // any argument to pass to the ssh master process
+ "args": ["-i", "~/.ssh/work_id_file"],
+ "port": 22, // defaults to 22
+ // defaults to your username on your local machine
+ "username": "me"
+ }
+ ]
+}
+```
-There are a few likely causes of failure:
+There are two additional Zed-specific options per connection, `upload_binary_over_ssh` and `nickname`:
-- `zed --dev-server-token` runs but outputs nothing. This is probably because the Zed background process is crashing on startup. Try running `zed --dev-server-token XX --foreground` to see any output, and [file a bug](https://github.com/zed-industries/zed) so we can debug it together.
-- `zed --dev-server-token` outputs something like "Connection refused" or "Unauthorized" and immediately exits. This is likely due to issues making outbound HTTP requests to https://collab.zed.dev from your host. You can try to debug this with `curl https://collab.zed.dev`, but we have seen cases where curl is whitelisted, but other binaries are not allowed network access.
-- `zed --dev-server-token` outputs "Zed is already running". If you are editing an existing server, it is possible that clicking "Connect" a second time will work, but if not you will have to manually log into the server and kill the Zed process.
+```json
+{
+ "ssh_connections": [
+ {
+ "host": "192.168.1.10",
+ "projects": ["~/code/zed/zed"],
+ // by default Zed will download the server binary from the internet on the remote.
+ // When this is true, it'll be downloaded to your laptop and uploaded over SSH.
+ // This is useful when your remote server has restricted internet access.
+ "upload_binary_over_ssh": true,
+ // Shown in the Zed UI to help distinguish multiple hosts.
+ "nickname": "lil-linux"
+ }
+ ]
+}
+```
-## Supported platforms
+If you use the command line to open a connection to a host by doing `zed ssh://192.168.1.10/~/.vimrc`, then extra options are read from your settings file by finding the first connection that matches the host/username/port of the URL on the command line.
-The remote machine must be able to run Zed. The following platforms should work, though note that we have not exhaustively tested every Linux distribution:
+Additionally it's worth noting that while you can pass a password on the command line `zed ssh://user:password@host/~`, we do not support writing a password to your settings file. If you're connecting repeatedly to the same host, you should configure key-based authentication.
-- macOS Catalina or later (Intel or Apple Silicon)
-- Linux (x86_64 or arm64, we do not yet support 32-bit platforms). You must have `glibc` installed at version 2.29 (released in 2019) or greater and available globally.
-- Windows is not yet supported.
+## Zed settings
-## Settings and extensions
+When opening a remote project there are three relevant settings locations:
-> **Note:** This may change as the alpha program continues.
+- The local Zed settings (in `~/.zed/settings.json` on macOS or `~/.config/zed/settings.json` on Linux) on your local machine.
+- The server Zed settings (in the same place) on the remote server.
+- The project settings (in `.zed/settings.json` or `.editorconfig` of your project)
-<!--
-TBD: Remote user settings need a name. Perhaps `zed: remote user settings`?
--->
+Both the local Zed and the server Zed read the project settings, but they are not aware of the other's main `settings.json`.
-You can edit the settings file on the remote instance. To do so, add a new project to your server in the directory `~/.config/zed`. You can create a file called `settings.json` if it does not yet exist.
+Depending on the kind of setting you want to make, which settings file you should use:
-Note that this is most useful for configuring language servers, as any UI related settings do not apply.
+- Project settings should be used for things that affect the project: indentation settings, which formatter / language server to use, etc.
+- Server settings should be used for things that affect the server: paths to language servers, etc.
+- Local settings should be used for things that affect the UI: font size, etc.
-If you'd like to install language-server extensions, you can add them to the list of `auto_installed_extensions`. Again you don't need to do this to get syntax highlighting (which is handled by the local zed).
+## Initializing the remote server
-```json
-{
- "auto_install_extensions": {
- "java": true
- }
-}
-```
+Once you provide the SSH options, Zed shells out to `ssh` on your local machine to create a ControlMaster connection with the options you provide.
-## Known Limitations
+Any prompts that SSH needs will be shown in the UI, so you can verify host keys, type key passwords, etc.
-- You can't use the Terminal or Tasks if you choose "Manual Connection"
-- You can't run `zed` in headless mode and in GUI mode at the same time on the same machine.
-- You can't open files from the remote Terminal by typing the `zed` command.
+Once the master connection is established, Zed will check to see if the remote server binary is present in `~/.zed_server` on the remote, and that its version matches the current version of Zed that you're using.
-## Feedback
+If it is not there or the version mismatches, Zed will try to download the latest version. By default, it will download from `https://zed.dev` directly, but if you set: `{"remote_server": {"download":false}}` in your local settings, it will download the binary to your local machine and then upload it to the remote server.
-Please join the #remoting-feedback channel in the [Zed Discord](https://discord.gg/zed-community).
+## Maintaining the SSH connection
-# Direct SSH Connections
+Once the server is initialized. Zed will create new SSH connections (reusing the existing ControlMaster) to run the remote development server.
-The current alpha release of Zed always connects via our servers. This was to get experience building the feature on top of our existing collaboration support. We plan to move to direct SSH connections for any machine that can be SSH'd into.
+Each connection tries to run the development server in proxy mode. This mode will start the daemon if it is not running, and reconnect to it if it is. This way when your connection drops and is restarted, you can continue to work without interruption.
-We are working on a direct SSH connection feature, which you can try out if you'd like.
+In the case that reconnecting fails, the daemon will not be re-used. That said, unsaved changes are by default persisted locally, so that you do not lose work. You can always reconnect to the project at a later date and Zed will restore unsaved changes.
-> **Note:** Direct SSH support does not support most features yet! You cannot use project search, language servers, or basically do anything except edit files...
+If you are struggling with connection issues, you should be able to see more information in the Zed log `cmd-shift-p Open Log`. If you are seeing things that are unexpected, please file a [GitHub issue](https://github.com/zed-industries/zed/issues/new) or reach out in the #remoting-feedback channel in the [Zed Discord](https://discord.gg/zed-community).
-To try this out you can either from the command line run:
+## Supported SSH Options
-```sh
-zed ssh://user@host:port/path/to/project
-```
+Under the hood, Zed shells out to the `ssh` binary to connect to the remote server. We create one SSH control master per project, and use then use that to multiplex SSH connections for the Zed protocol itself, any terminals you open and tasks you run. We read settings from your SSH config file, but if you want to specify additional options to the SSH control master you can configure Zed to set them.
-Or you can (in your settings file) add:
+When typing in the "Connect New Server" dialogue, you can use bash-style quoting to pass options containing a space. Once you have created a server it will be added to the `"ssh_connections": []` array in your settings file. You can edit the settings file directly to make changes to SSH connections.
-```json
-"ssh_connections": []
-```
+Supported options:
+
+- `-p` / `-l` - these are equivalent to passing the port and the username in the host string.
+- `-L` / `-R` for port forwarding
+- `-i` - to use a specific key file
+- `-o` - to set custom options
+- `-J` / `-w` - to proxy the SSH connection
+- And also... `-4`, `-6`, `-A`, `-a`, `-C`, `-K`, `-k`, `-X`, `-x`, `-Y`, `-y`, `-B`, `-b`, `-c`, `-D`, `-I`, `-i`, `-J`, `-l`, `-m`, `-o`, `-P`, `-p`, `-w`
+
+Note that we deliberately disallow some options (for example `-t` or `-T`) that Zed will set for you.
+
+## Known Limitations
-And then from the command palette choose `projects: Open Remote` and configure an SSH connection from there.
+- Zed extensions are not yet supported on remotes, so languages that need them for support do not work.
+- You can't open files from the remote Terminal by typing the `zed` command.
+- Zed does not yet support automatic port-forwarding. You can use `-R` and `-L` in your SSH arguments for now.
+
+## Feedback
+
+Please join the #remoting-feedback channel in the [Zed Discord](https://discord.gg/zed-community).
@@ -34,7 +34,7 @@ By default, Zed maintains two themes: one for light mode and one for dark mode.
To override specific attributes of a theme, use the `experimental.theme_overrides` setting.
-For example, to override the background color of the editor and the font style of comments, add the following to your `settings.json` file:
+For example, add the following to your `settings.json` if you wish to to override the background color of the editor and display comments and doc comments as italics:
```json
{
@@ -43,13 +43,18 @@ For example, to override the background color of the editor and the font style o
"syntax": {
"comment": {
"font_style": "italic"
+ },
+ "comment.doc": {
+ "font_style": "italic"
}
}
}
}
```
-See which attributes are available to override by looking at the JSON format of your theme. For example, [here is the JSON format for the `One` themes](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json).
+To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see: [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting).
+
+To see a list of available theme attributes look at the JSON file for your theme. For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes.
## Local Themes
@@ -1,6 +1,6 @@
[package]
name = "zed_dart"
-version = "0.1.1"
+version = "0.1.2"
edition = "2021"
publish = false
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "dart"
name = "Dart"
description = "Dart support."
-version = "0.1.1"
+version = "0.1.2"
schema_version = 1
authors = ["Abdullah Alsigar <abdullah.alsigar@gmail.com>", "Flo <flo80@users.noreply.github.com>", "ybbond <hi@ybbond.id>"]
repository = "https://github.com/zed-industries/zed"
@@ -1,7 +1,7 @@
name = "Dart"
grammar = "dart"
path_suffixes = ["dart"]
-line_comments = ["// "]
+line_comments = ["// ", "/// "]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
@@ -19,6 +19,9 @@
(
TestDecl (
"test" @context
- (STRINGLITERALSINGLE)? @name
+ [
+ (STRINGLITERALSINGLE)
+ (IDENTIFIER)
+ ]? @name
)
) @item
@@ -97,7 +97,7 @@ popd
pushd crates/zed
cp Cargo.toml Cargo.toml.backup
sed \
- -i .backup \
+ -i.backup \
"s/package.metadata.bundle-${channel}/package.metadata.bundle/" \
Cargo.toml
@@ -8,7 +8,7 @@ prHygiene({
},
});
-const RELEASE_NOTES_PATTERN = new RegExp("Release Notes:\\r?\\n\\s+-", "gm");
+const RELEASE_NOTES_PATTERN = /Release Notes:\r?\n\s+-/gm;
const body = danger.github.pr.body;
const hasReleaseNotes = RELEASE_NOTES_PATTERN.test(body);
@@ -36,28 +36,22 @@ if (!hasReleaseNotes) {
);
}
-const ISSUE_LINK_PATTERN = new RegExp(
- "(?<!(?:Close[sd]?|Fixe[sd]|Resolve[sd]|Implement[sed])\\s+)https://github\\.com/[\\w-]+/[\\w-]+/issues/\\d+",
- "gi"
-);
+const ISSUE_LINK_PATTERN =
+ /(?<!(?:Close[sd]?|Fixe[sd]|Resolve[sd]|Implement[sed]|Follow-up of|Part of)\s+)https:\/\/github\.com\/[\w-]+\/[\w-]+\/issues\/\d+/gi;
-
-const includesIssueUrl = ISSUE_LINK_PATTERN.test(body);
+const bodyWithoutReleaseNotes = hasReleaseNotes ? body.split(/Release Notes:/)[0] : body;
+const includesIssueUrl = ISSUE_LINK_PATTERN.test(bodyWithoutReleaseNotes);
if (includesIssueUrl) {
- const matches = body.match(ISSUE_LINK_PATTERN) ?? [];
+ const matches = bodyWithoutReleaseNotes.match(ISSUE_LINK_PATTERN) ?? [];
const issues = matches
- .map((match) =>
- match
- .replace(/^#/, "")
- .replace(/https:\/\/github\.com\/zed-industries\/zed\/issues\//, ""),
- )
+ .map((match) => match.replace(/^#/, "").replace(/https:\/\/github\.com\/zed-industries\/zed\/issues\//, ""))
.filter((issue, index, self) => self.indexOf(issue) === index);
+ const issuesToReport = issues.map((issue) => `#${issue}`).join(", ");
message(
[
- "This PR includes links to the following GitHub Issues: " +
- issues.map((issue) => `#${issue}`).join(", "),
+ `This PR includes links to the following GitHub Issues: ${issuesToReport}`,
"If this PR aims to close an issue, please include a `Closes #ISSUE` line at the top of the PR body.",
].join("\n"),
);
@@ -36,3 +36,141 @@ license = "BSD-3-Clause"
[[fuchsia-cprng.clarify.files]]
path = 'LICENSE'
checksum = '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b'
+
+[pet.clarify]
+license = "MIT"
+[[pet.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-conda.clarify]
+license = "MIT"
+[[pet-conda.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-core.clarify]
+license = "MIT"
+[[pet-core.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-env-var-path.clarify]
+license = "MIT"
+[[pet-env-var-path.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-fs.clarify]
+license = "MIT"
+[[pet-fs.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-global-virtualenvs.clarify]
+license = "MIT"
+[[pet-global-virtualenvs.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-homebrew.clarify]
+license = "MIT"
+[[pet-homebrew.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-jsonrpc.clarify]
+license = "MIT"
+[[pet-jsonrpc.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-linux-global-python.clarify]
+license = "MIT"
+[[pet-linux-global-python.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-mac-commandlinetools.clarify]
+license = "MIT"
+[[pet-mac-commandlinetools.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-mac-python-org.clarify]
+license = "MIT"
+[[pet-mac-python-org.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-mac-xcode.clarify]
+license = "MIT"
+[[pet-mac-xcode.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-pipenv.clarify]
+license = "MIT"
+[[pet-pipenv.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-poetry.clarify]
+license = "MIT"
+[[pet-poetry.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-pyenv.clarify]
+license = "MIT"
+[[pet-pyenv.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-python-utils.clarify]
+license = "MIT"
+[[pet-python-utils.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-reporter.clarify]
+license = "MIT"
+[[pet-reporter.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-telemetry.clarify]
+license = "MIT"
+[[pet-telemetry.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-venv.clarify]
+license = "MIT"
+[[pet-venv.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-virtualenv.clarify]
+license = "MIT"
+[[pet-virtualenv.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-virtualenvwrapper.clarify]
+license = "MIT"
+[[pet-virtualenvwrapper.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-windows-registry.clarify]
+license = "MIT"
+[[pet-windows-registry.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'
+
+[pet-windows-store.clarify]
+license = "MIT"
+[[pet-windows-store.clarify.git]]
+path = 'LICENSE'
+checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383'