.github/pull_request_template.md 🔗
@@ -0,0 +1,5 @@
+[[PR Description]]
+
+Release Notes:
+
+* [[Added foo / Fixed bar / No notes]]
Max Brunsfeld created
.github/pull_request_template.md | 5
.github/workflows/build_dmg.yml | 54
.github/workflows/ci.yml | 6
.github/workflows/release_actions.yml | 2
Cargo.lock | 18
assets/keymaps/default.json | 6
assets/keymaps/jetbrains.json | 3
crates/auto_update/src/auto_update.rs | 2
crates/cli/Cargo.toml | 1
crates/cli/src/cli.rs | 12
crates/cli/src/main.rs | 233
crates/client/src/telemetry.rs | 5
crates/collab/Cargo.toml | 2
crates/collab/migrations.sqlite/20221109000000_test_schema.sql | 19
crates/collab/migrations/20230511004019_add_repository_statuses.sql | 15
crates/collab/src/db.rs | 149
crates/collab/src/db/worktree_repository_statuses.rs | 23
crates/collab/src/rpc.rs | 2
crates/collab/src/tests/integration_tests.rs | 150
crates/collab/src/tests/randomized_integration_tests.rs | 259
crates/copilot/src/copilot.rs | 2
crates/diagnostics/src/diagnostics.rs | 1
crates/editor/src/editor.rs | 145
crates/editor/src/editor_tests.rs | 3
crates/editor/src/hover_popover.rs | 3
crates/editor/src/items.rs | 18
crates/file_finder/Cargo.toml | 1
crates/file_finder/src/file_finder.rs | 324
crates/fs/Cargo.toml | 1
crates/fs/src/fs.rs | 34
crates/fs/src/repository.rs | 129
crates/go_to_line/Cargo.toml | 1
crates/go_to_line/src/go_to_line.rs | 43
crates/gpui/Cargo.toml | 2
crates/gpui/src/color.rs | 2
crates/gpui/src/elements.rs | 9
crates/gpui/src/keymap_matcher/binding.rs | 13
crates/gpui/src/platform/mac/window.rs | 2
crates/project/Cargo.toml | 1
crates/project/src/project.rs | 175
crates/project/src/worktree.rs | 670
crates/project_panel/src/project_panel.rs | 36
crates/rpc/proto/zed.proto | 14
crates/rpc/src/proto.rs | 40
crates/rpc/src/rpc.rs | 2
crates/search/Cargo.toml | 1
crates/search/src/project_search.rs | 1
crates/sum_tree/src/sum_tree.rs | 2
crates/sum_tree/src/tree_map.rs | 176
crates/theme/Cargo.toml | 1
crates/theme/src/ui.rs | 57
crates/util/Cargo.toml | 1
crates/util/src/paths.rs | 207
crates/util/src/util.rs | 2
crates/workspace/src/dock.rs | 7
crates/workspace/src/persistence/model.rs | 28
crates/workspace/src/workspace.rs | 354
crates/zed/Cargo.toml | 2
crates/zed/src/main.rs | 183
script/clear-target-dir-if-larger-than | 20
60 files changed, 2,978 insertions(+), 701 deletions(-)
@@ -0,0 +1,5 @@
+[[PR Description]]
+
+Release Notes:
+
+* [[Added foo / Fixed bar / No notes]]
@@ -0,0 +1,54 @@
+name: Build Zed.dmg
+
+on:
+ push:
+ branches:
+ - main
+ - "v[0-9]+.[0-9]+.x"
+ pull_request:
+
+defaults:
+ run:
+ shell: bash -euxo pipefail {0}
+
+concurrency:
+ # Allow only one workflow per any non-`main` branch.
+ group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+ cancel-in-progress: true
+
+env:
+ RUST_BACKTRACE: 1
+ COPT: '-Werror'
+
+jobs:
+ build-dmg:
+ if: github.ref_name == 'main' || contains(github.event.pull_request.labels.*.name, 'run-build-dmg')
+ runs-on:
+ - self-hosted
+ - test
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ clean: false
+ submodules: 'recursive'
+
+ - name: Install Rust
+ run: |
+ rustup set profile minimal
+ rustup update stable
+
+ - name: Install node
+ uses: actions/setup-node@v3
+ with:
+ node-version: 18
+
+ - name: Build dmg bundle
+ run: ./script/bundle
+
+ - name: Upload the build artifact
+ uses: actions/upload-artifact@v3
+ with:
+ name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
+ path: ./target/release/Zed.dmg
@@ -62,6 +62,9 @@ jobs:
clean: false
submodules: 'recursive'
+ - name: Limit target directory size
+ run: script/clear-target-dir-if-larger-than 70
+
- name: Run check
run: cargo check --workspace
@@ -110,6 +113,9 @@ jobs:
clean: false
submodules: 'recursive'
+ - name: Limit target directory size
+ run: script/clear-target-dir-if-larger-than 70
+
- name: Determine version and release channel
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: |
@@ -14,7 +14,7 @@ jobs:
content: |
📣 Zed ${{ github.event.release.tag_name }} was just released!
- Restart your Zed or head to https://zed.dev/releases/latest to grab it.
+ Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it.
```md
# Changelog
@@ -1135,6 +1135,7 @@ dependencies = [
"plist",
"serde",
"serde_derive",
+ "util",
]
[[package]]
@@ -1228,7 +1229,7 @@ dependencies = [
[[package]]
name = "collab"
-version = "0.12.0"
+version = "0.12.1"
dependencies = [
"anyhow",
"async-tungstenite",
@@ -2253,6 +2254,7 @@ dependencies = [
"project",
"serde_json",
"settings",
+ "text",
"theme",
"util",
"workspace",
@@ -2418,6 +2420,7 @@ dependencies = [
"serde_derive",
"serde_json",
"smol",
+ "sum_tree",
"tempfile",
"util",
]
@@ -2747,6 +2750,7 @@ dependencies = [
"postage",
"settings",
"text",
+ "util",
"workspace",
]
@@ -4838,6 +4842,7 @@ dependencies = [
"futures 0.3.28",
"fuzzy",
"git",
+ "git2",
"glob",
"gpui",
"ignore",
@@ -5930,6 +5935,7 @@ name = "search"
version = "0.1.0"
dependencies = [
"anyhow",
+ "client",
"collections",
"editor",
"futures 0.3.28",
@@ -6702,6 +6708,12 @@ dependencies = [
"winx",
]
+[[package]]
+name = "take-until"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8bdb6fa0dfa67b38c1e66b7041ba9dcf23b99d8121907cd31c807a332f7a0bbb"
+
[[package]]
name = "target-lexicon"
version = "0.12.7"
@@ -6847,6 +6859,7 @@ name = "theme"
version = "0.1.0"
dependencies = [
"anyhow",
+ "fs",
"gpui",
"indexmap",
"parking_lot 0.11.2",
@@ -7756,6 +7769,7 @@ dependencies = [
"serde",
"serde_json",
"smol",
+ "take-until",
"tempdir",
"url",
]
@@ -8736,7 +8750,7 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
[[package]]
name = "zed"
-version = "0.86.0"
+version = "0.87.0"
dependencies = [
"activity_indicator",
"anyhow",
@@ -191,7 +191,7 @@
}
},
{
- "context": "BufferSearchBar > Editor",
+ "context": "BufferSearchBar",
"bindings": {
"escape": "buffer_search::Dismiss",
"tab": "buffer_search::FocusEditor",
@@ -200,13 +200,13 @@
}
},
{
- "context": "ProjectSearchBar > Editor",
+ "context": "ProjectSearchBar",
"bindings": {
"escape": "project_search::ToggleFocus"
}
},
{
- "context": "ProjectSearchView > Editor",
+ "context": "ProjectSearchView",
"bindings": {
"escape": "project_search::ToggleFocus"
}
@@ -11,6 +11,7 @@
"ctrl->": "zed::IncreaseBufferFontSize",
"ctrl-<": "zed::DecreaseBufferFontSize",
"cmd-d": "editor::DuplicateLine",
+ "cmd-backspace": "editor::DeleteLine",
"cmd-pagedown": "editor::MovePageDown",
"cmd-pageup": "editor::MovePageUp",
"ctrl-alt-shift-b": "editor::SelectToPreviousWordStart",
@@ -33,6 +34,7 @@
],
"shift-alt-up": "editor::MoveLineUp",
"shift-alt-down": "editor::MoveLineDown",
+ "cmd-alt-l": "editor::Format",
"cmd-[": "pane::GoBack",
"cmd-]": "pane::GoForward",
"alt-f7": "editor::FindAllReferences",
@@ -63,6 +65,7 @@
{
"context": "Workspace",
"bindings": {
+ "cmd-shift-o": "file_finder::Toggle",
"cmd-shift-a": "command_palette::Toggle",
"cmd-alt-o": "project_symbols::Toggle",
"cmd-1": "workspace::ToggleLeftSidebar",
@@ -121,7 +121,7 @@ fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
{
format!("{server_url}/releases/preview/latest")
} else {
- format!("{server_url}/releases/latest")
+ format!("{server_url}/releases/stable/latest")
};
cx.platform().open_url(&latest_release_url);
}
@@ -19,6 +19,7 @@ dirs = "3.0"
ipc-channel = "0.16"
serde.workspace = true
serde_derive.workspace = true
+util = { path = "../util" }
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation = "0.9"
@@ -1,6 +1,5 @@
pub use ipc_channel::ipc;
use serde::{Deserialize, Serialize};
-use std::path::PathBuf;
#[derive(Serialize, Deserialize)]
pub struct IpcHandshake {
@@ -10,7 +9,12 @@ pub struct IpcHandshake {
#[derive(Debug, Serialize, Deserialize)]
pub enum CliRequest {
- Open { paths: Vec<PathBuf>, wait: bool },
+ // The filed is named `path` for compatibility, but now CLI can request
+ // opening a path at a certain row and/or column: `some/path:123` and `some/path:123:456`.
+ //
+ // Since Zed CLI has to be installed separately, there can be situations when old CLI is
+ // querying new Zed editors, support both formats by using `String` here and parsing it on Zed side later.
+ Open { paths: Vec<String>, wait: bool },
}
#[derive(Debug, Serialize, Deserialize)]
@@ -20,3 +24,7 @@ pub enum CliResponse {
Stderr { message: String },
Exit { status: i32 },
}
+
+/// When Zed started not as an *.app but as a binary (e.g. local development),
+/// there's a possibility to tell it to behave "regularly".
+pub const FORCE_CLI_MODE_ENV_VAR_NAME: &str = "ZED_FORCE_CLI_MODE";
@@ -1,6 +1,6 @@
-use anyhow::{anyhow, Result};
+use anyhow::{anyhow, Context, Result};
use clap::Parser;
-use cli::{CliRequest, CliResponse, IpcHandshake};
+use cli::{CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME};
use core_foundation::{
array::{CFArray, CFIndex},
string::kCFStringEncodingUTF8,
@@ -16,16 +16,20 @@ use std::{
path::{Path, PathBuf},
ptr,
};
+use util::paths::PathLikeWithPosition;
#[derive(Parser)]
#[clap(name = "zed", global_setting(clap::AppSettings::NoAutoVersion))]
struct Args {
- /// Wait for all of the given paths to be closed before exiting.
+ /// Wait for all of the given paths to be opened/closed before exiting.
#[clap(short, long)]
wait: bool,
/// A sequence of space-separated paths that you want to open.
- #[clap()]
- paths: Vec<PathBuf>,
+ ///
+ /// Use `path:line:row` syntax to open a file at a specific location.
+ /// Non-existing paths and directories will ignore `:line:row` suffix.
+ #[clap(value_parser = parse_path_with_position)]
+ paths_with_position: Vec<PathLikeWithPosition<PathBuf>>,
/// Print Zed's version and the app path.
#[clap(short, long)]
version: bool,
@@ -34,6 +38,14 @@ struct Args {
bundle_path: Option<PathBuf>,
}
+fn parse_path_with_position(
+ argument_str: &str,
+) -> Result<PathLikeWithPosition<PathBuf>, std::convert::Infallible> {
+ PathLikeWithPosition::parse_str(argument_str, |path_str| {
+ Ok(Path::new(path_str).to_path_buf())
+ })
+}
+
#[derive(Debug, Deserialize)]
struct InfoPlist {
#[serde(rename = "CFBundleShortVersionString")]
@@ -43,37 +55,37 @@ struct InfoPlist {
fn main() -> Result<()> {
let args = Args::parse();
- let bundle_path = if let Some(bundle_path) = args.bundle_path {
- bundle_path.canonicalize()?
- } else {
- locate_bundle()?
- };
+ let bundle = Bundle::detect(args.bundle_path.as_deref()).context("Bundle detection")?;
if args.version {
- let plist_path = bundle_path.join("Contents/Info.plist");
- let plist = plist::from_file::<_, InfoPlist>(plist_path)?;
- println!(
- "Zed {} – {}",
- plist.bundle_short_version_string,
- bundle_path.to_string_lossy()
- );
+ println!("{}", bundle.zed_version_string());
return Ok(());
}
- for path in args.paths.iter() {
+ for path in args
+ .paths_with_position
+ .iter()
+ .map(|path_with_position| &path_with_position.path_like)
+ {
if !path.exists() {
touch(path.as_path())?;
}
}
- let (tx, rx) = launch_app(bundle_path)?;
+ let (tx, rx) = bundle.launch()?;
tx.send(CliRequest::Open {
paths: args
- .paths
+ .paths_with_position
.into_iter()
- .map(|path| fs::canonicalize(path).map_err(|error| anyhow!(error)))
- .collect::<Result<Vec<PathBuf>>>()?,
+ .map(|path_with_position| {
+ let path_with_position = path_with_position.map_path_like(|path| {
+ fs::canonicalize(&path)
+ .with_context(|| format!("path {path:?} canonicalization"))
+ })?;
+ Ok(path_with_position.to_string(|path| path.display().to_string()))
+ })
+ .collect::<Result<_>>()?,
wait: args.wait,
})?;
@@ -89,6 +101,148 @@ fn main() -> Result<()> {
Ok(())
}
+enum Bundle {
+ App {
+ app_bundle: PathBuf,
+ plist: InfoPlist,
+ },
+ LocalPath {
+ executable: PathBuf,
+ plist: InfoPlist,
+ },
+}
+
+impl Bundle {
+ fn detect(args_bundle_path: Option<&Path>) -> anyhow::Result<Self> {
+ let bundle_path = if let Some(bundle_path) = args_bundle_path {
+ bundle_path
+ .canonicalize()
+ .with_context(|| format!("Args bundle path {bundle_path:?} canonicalization"))?
+ } else {
+ locate_bundle().context("bundle autodiscovery")?
+ };
+
+ match bundle_path.extension().and_then(|ext| ext.to_str()) {
+ Some("app") => {
+ let plist_path = bundle_path.join("Contents/Info.plist");
+ let plist = plist::from_file::<_, InfoPlist>(&plist_path).with_context(|| {
+ format!("Reading *.app bundle plist file at {plist_path:?}")
+ })?;
+ Ok(Self::App {
+ app_bundle: bundle_path,
+ plist,
+ })
+ }
+ _ => {
+ println!("Bundle path {bundle_path:?} has no *.app extension, attempting to locate a dev build");
+ let plist_path = bundle_path
+ .parent()
+ .with_context(|| format!("Bundle path {bundle_path:?} has no parent"))?
+ .join("WebRTC.framework/Resources/Info.plist");
+ let plist = plist::from_file::<_, InfoPlist>(&plist_path)
+ .with_context(|| format!("Reading dev bundle plist file at {plist_path:?}"))?;
+ Ok(Self::LocalPath {
+ executable: bundle_path,
+ plist,
+ })
+ }
+ }
+ }
+
+ fn plist(&self) -> &InfoPlist {
+ match self {
+ Self::App { plist, .. } => plist,
+ Self::LocalPath { plist, .. } => plist,
+ }
+ }
+
+ fn path(&self) -> &Path {
+ match self {
+ Self::App { app_bundle, .. } => app_bundle,
+ Self::LocalPath {
+ executable: excutable,
+ ..
+ } => excutable,
+ }
+ }
+
+ fn launch(&self) -> anyhow::Result<(IpcSender<CliRequest>, IpcReceiver<CliResponse>)> {
+ let (server, server_name) =
+ IpcOneShotServer::<IpcHandshake>::new().context("Handshake before Zed spawn")?;
+ let url = format!("zed-cli://{server_name}");
+
+ match self {
+ Self::App { app_bundle, .. } => {
+ let app_path = app_bundle;
+
+ let status = unsafe {
+ let app_url = CFURL::from_path(app_path, true)
+ .with_context(|| format!("invalid app path {app_path:?}"))?;
+ let url_to_open = CFURL::wrap_under_create_rule(CFURLCreateWithBytes(
+ ptr::null(),
+ url.as_ptr(),
+ url.len() as CFIndex,
+ kCFStringEncodingUTF8,
+ ptr::null(),
+ ));
+ let urls_to_open = CFArray::from_copyable(&[url_to_open.as_concrete_TypeRef()]);
+ LSOpenFromURLSpec(
+ &LSLaunchURLSpec {
+ appURL: app_url.as_concrete_TypeRef(),
+ itemURLs: urls_to_open.as_concrete_TypeRef(),
+ passThruParams: ptr::null(),
+ launchFlags: kLSLaunchDefaults,
+ asyncRefCon: ptr::null_mut(),
+ },
+ ptr::null_mut(),
+ )
+ };
+
+ anyhow::ensure!(
+ status == 0,
+ "cannot start app bundle {}",
+ self.zed_version_string()
+ );
+ }
+ Self::LocalPath { executable, .. } => {
+ let executable_parent = executable
+ .parent()
+ .with_context(|| format!("Executable {executable:?} path has no parent"))?;
+ let subprocess_stdout_file =
+ fs::File::create(executable_parent.join("zed_dev.log"))
+ .with_context(|| format!("Log file creation in {executable_parent:?}"))?;
+ let subprocess_stdin_file =
+ subprocess_stdout_file.try_clone().with_context(|| {
+ format!("Cloning descriptor for file {subprocess_stdout_file:?}")
+ })?;
+ let mut command = std::process::Command::new(executable);
+ let command = command
+ .env(FORCE_CLI_MODE_ENV_VAR_NAME, "")
+ .stderr(subprocess_stdout_file)
+ .stdout(subprocess_stdin_file)
+ .arg(url);
+
+ command
+ .spawn()
+ .with_context(|| format!("Spawning {command:?}"))?;
+ }
+ }
+
+ let (_, handshake) = server.accept().context("Handshake after Zed spawn")?;
+ Ok((handshake.requests, handshake.responses))
+ }
+
+ fn zed_version_string(&self) -> String {
+ let is_dev = matches!(self, Self::LocalPath { .. });
+ format!(
+ "Zed {}{} – {}",
+ self.plist().bundle_short_version_string,
+ if is_dev { " (dev)" } else { "" },
+ self.path().display(),
+ )
+ }
+}
+
fn touch(path: &Path) -> io::Result<()> {
match OpenOptions::new().create(true).write(true).open(path) {
Ok(_) => Ok(()),
@@ -106,38 +260,3 @@ fn locate_bundle() -> Result<PathBuf> {
}
Ok(app_path)
}
-
-fn launch_app(app_path: PathBuf) -> Result<(IpcSender<CliRequest>, IpcReceiver<CliResponse>)> {
- let (server, server_name) = IpcOneShotServer::<IpcHandshake>::new()?;
- let url = format!("zed-cli://{server_name}");
-
- let status = unsafe {
- let app_url =
- CFURL::from_path(&app_path, true).ok_or_else(|| anyhow!("invalid app path"))?;
- let url_to_open = CFURL::wrap_under_create_rule(CFURLCreateWithBytes(
- ptr::null(),
- url.as_ptr(),
- url.len() as CFIndex,
- kCFStringEncodingUTF8,
- ptr::null(),
- ));
- let urls_to_open = CFArray::from_copyable(&[url_to_open.as_concrete_TypeRef()]);
- LSOpenFromURLSpec(
- &LSLaunchURLSpec {
- appURL: app_url.as_concrete_TypeRef(),
- itemURLs: urls_to_open.as_concrete_TypeRef(),
- passThruParams: ptr::null(),
- launchFlags: kLSLaunchDefaults,
- asyncRefCon: ptr::null_mut(),
- },
- ptr::null_mut(),
- )
- };
-
- if status == 0 {
- let (_, handshake) = server.accept()?;
- Ok((handshake.requests, handshake.responses))
- } else {
- Err(anyhow!("cannot start {:?}", app_path))
- }
-}
@@ -85,6 +85,11 @@ pub enum ClickhouseEvent {
copilot_enabled: bool,
copilot_enabled_for_language: bool,
},
+ Copilot {
+ suggestion_id: Option<String>,
+ suggestion_accepted: bool,
+ file_extension: Option<String>,
+ },
}
#[derive(Serialize, Debug)]
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
-version = "0.12.0"
+version = "0.12.1"
publish = false
[[bin]]
@@ -86,8 +86,8 @@ CREATE TABLE "worktree_repositories" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
"work_directory_id" INTEGER NOT NULL,
- "scan_id" INTEGER NOT NULL,
"branch" VARCHAR,
+ "scan_id" INTEGER NOT NULL,
"is_deleted" BOOL NOT NULL,
PRIMARY KEY(project_id, worktree_id, work_directory_id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
@@ -96,6 +96,23 @@ CREATE TABLE "worktree_repositories" (
CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
+CREATE TABLE "worktree_repository_statuses" (
+ "project_id" INTEGER NOT NULL,
+ "worktree_id" INTEGER NOT NULL,
+ "work_directory_id" INTEGER NOT NULL,
+ "repo_path" VARCHAR NOT NULL,
+ "status" INTEGER NOT NULL,
+ "scan_id" INTEGER NOT NULL,
+ "is_deleted" BOOL NOT NULL,
+ PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
+ FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
+ FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_worktree_repository_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
+CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
+CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id_and_work_directory_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
+
+
CREATE TABLE "worktree_diagnostic_summaries" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
@@ -0,0 +1,15 @@
+CREATE TABLE "worktree_repository_statuses" (
+ "project_id" INTEGER NOT NULL,
+ "worktree_id" INT8 NOT NULL,
+ "work_directory_id" INT8 NOT NULL,
+ "repo_path" VARCHAR NOT NULL,
+ "status" INT8 NOT NULL,
+ "scan_id" INT8 NOT NULL,
+ "is_deleted" BOOL NOT NULL,
+ PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
+ FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
+ FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
+);
+CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
+CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
+CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
@@ -15,6 +15,7 @@ mod worktree;
mod worktree_diagnostic_summary;
mod worktree_entry;
mod worktree_repository;
+mod worktree_repository_statuses;
use crate::executor::Executor;
use crate::{Error, Result};
@@ -1568,6 +1569,50 @@ impl Database {
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
+ removed_repo_paths: Default::default(),
+ updated_statuses: Default::default(),
+ });
+ }
+ }
+ }
+
+ // Repository Status Entries
+ for repository in worktree.updated_repositories.iter_mut() {
+ let repository_status_entry_filter =
+ if let Some(rejoined_worktree) = rejoined_worktree {
+ worktree_repository_statuses::Column::ScanId
+ .gt(rejoined_worktree.scan_id)
+ } else {
+ worktree_repository_statuses::Column::IsDeleted.eq(false)
+ };
+
+ let mut db_repository_statuses =
+ worktree_repository_statuses::Entity::find()
+ .filter(
+ Condition::all()
+ .add(
+ worktree_repository_statuses::Column::WorktreeId
+ .eq(worktree.id),
+ )
+ .add(
+ worktree_repository_statuses::Column::WorkDirectoryId
+ .eq(repository.work_directory_id),
+ )
+ .add(repository_status_entry_filter),
+ )
+ .stream(&*tx)
+ .await?;
+
+ while let Some(db_status_entry) = db_repository_statuses.next().await {
+ let db_status_entry = db_status_entry?;
+ if db_status_entry.is_deleted {
+ repository
+ .removed_repo_paths
+ .push(db_status_entry.repo_path);
+ } else {
+ repository.updated_statuses.push(proto::StatusEntry {
+ repo_path: db_status_entry.repo_path,
+ status: db_status_entry.status as i32,
});
}
}
@@ -2395,6 +2440,68 @@ impl Database {
)
.exec(&*tx)
.await?;
+
+ for repository in update.updated_repositories.iter() {
+ if !repository.updated_statuses.is_empty() {
+ worktree_repository_statuses::Entity::insert_many(
+ repository.updated_statuses.iter().map(|status_entry| {
+ worktree_repository_statuses::ActiveModel {
+ project_id: ActiveValue::set(project_id),
+ worktree_id: ActiveValue::set(worktree_id),
+ work_directory_id: ActiveValue::set(
+ repository.work_directory_id as i64,
+ ),
+ repo_path: ActiveValue::set(status_entry.repo_path.clone()),
+ status: ActiveValue::set(status_entry.status as i64),
+ scan_id: ActiveValue::set(update.scan_id as i64),
+ is_deleted: ActiveValue::set(false),
+ }
+ }),
+ )
+ .on_conflict(
+ OnConflict::columns([
+ worktree_repository_statuses::Column::ProjectId,
+ worktree_repository_statuses::Column::WorktreeId,
+ worktree_repository_statuses::Column::WorkDirectoryId,
+ worktree_repository_statuses::Column::RepoPath,
+ ])
+ .update_columns([
+ worktree_repository_statuses::Column::ScanId,
+ worktree_repository_statuses::Column::Status,
+ worktree_repository_statuses::Column::IsDeleted,
+ ])
+ .to_owned(),
+ )
+ .exec(&*tx)
+ .await?;
+ }
+
+ if !repository.removed_repo_paths.is_empty() {
+ worktree_repository_statuses::Entity::update_many()
+ .filter(
+ worktree_repository_statuses::Column::ProjectId
+ .eq(project_id)
+ .and(
+ worktree_repository_statuses::Column::WorktreeId
+ .eq(worktree_id),
+ )
+ .and(
+ worktree_repository_statuses::Column::WorkDirectoryId
+ .eq(repository.work_directory_id as i64),
+ )
+ .and(worktree_repository_statuses::Column::RepoPath.is_in(
+ repository.removed_repo_paths.iter().map(String::as_str),
+ )),
+ )
+ .set(worktree_repository_statuses::ActiveModel {
+ is_deleted: ActiveValue::Set(true),
+ scan_id: ActiveValue::Set(update.scan_id as i64),
+ ..Default::default()
+ })
+ .exec(&*tx)
+ .await?;
+ }
+ }
}
if !update.removed_repositories.is_empty() {
@@ -2645,10 +2752,42 @@ impl Database {
if let Some(worktree) =
worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
{
- worktree.repository_entries.push(proto::RepositoryEntry {
- work_directory_id: db_repository_entry.work_directory_id as u64,
- branch: db_repository_entry.branch,
- });
+ worktree.repository_entries.insert(
+ db_repository_entry.work_directory_id as u64,
+ proto::RepositoryEntry {
+ work_directory_id: db_repository_entry.work_directory_id as u64,
+ branch: db_repository_entry.branch,
+ removed_repo_paths: Default::default(),
+ updated_statuses: Default::default(),
+ },
+ );
+ }
+ }
+ }
+
+ {
+ let mut db_status_entries = worktree_repository_statuses::Entity::find()
+ .filter(
+ Condition::all()
+ .add(worktree_repository_statuses::Column::ProjectId.eq(project_id))
+ .add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
+ )
+ .stream(&*tx)
+ .await?;
+
+ while let Some(db_status_entry) = db_status_entries.next().await {
+ let db_status_entry = db_status_entry?;
+ if let Some(worktree) = worktrees.get_mut(&(db_status_entry.worktree_id as u64))
+ {
+ if let Some(repository_entry) = worktree
+ .repository_entries
+ .get_mut(&(db_status_entry.work_directory_id as u64))
+ {
+ repository_entry.updated_statuses.push(proto::StatusEntry {
+ repo_path: db_status_entry.repo_path,
+ status: db_status_entry.status as i32,
+ });
+ }
}
}
}
@@ -3390,7 +3529,7 @@ pub struct Worktree {
pub root_name: String,
pub visible: bool,
pub entries: Vec<proto::Entry>,
- pub repository_entries: Vec<proto::RepositoryEntry>,
+ pub repository_entries: BTreeMap<u64, proto::RepositoryEntry>,
pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
pub scan_id: u64,
pub completed_scan_id: u64,
@@ -0,0 +1,23 @@
+use super::ProjectId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "worktree_repository_statuses")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub project_id: ProjectId,
+ #[sea_orm(primary_key)]
+ pub worktree_id: i64,
+ #[sea_orm(primary_key)]
+ pub work_directory_id: i64,
+ #[sea_orm(primary_key)]
+ pub repo_path: String,
+ pub status: i64,
+ pub scan_id: i64,
+ pub is_deleted: bool,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {}
+
+impl ActiveModelBehavior for ActiveModel {}
@@ -1385,7 +1385,7 @@ async fn join_project(
removed_entries: Default::default(),
scan_id: worktree.scan_id,
is_last_update: worktree.scan_id == worktree.completed_scan_id,
- updated_repositories: worktree.repository_entries,
+ updated_repositories: worktree.repository_entries.into_values().collect(),
removed_repositories: Default::default(),
};
for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
@@ -10,7 +10,7 @@ use editor::{
ConfirmRename, Editor, ExcerptRange, MultiBuffer, Redo, Rename, ToOffset, ToggleCodeActions,
Undo,
};
-use fs::{FakeFs, Fs as _, LineEnding, RemoveOptions};
+use fs::{repository::GitFileStatus, FakeFs, Fs as _, LineEnding, RemoveOptions};
use futures::StreamExt as _;
use gpui::{
executor::Deterministic, geometry::vector::vec2f, test::EmptyView, AppContext, ModelHandle,
@@ -2693,6 +2693,154 @@ async fn test_git_branch_name(
});
}
+#[gpui::test]
+async fn test_git_status_sync(
+ deterministic: Arc<Deterministic>,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+ cx_c: &mut TestAppContext,
+) {
+ deterministic.forbid_parking();
+ let mut server = TestServer::start(&deterministic).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ let client_c = server.create_client(cx_c, "user_c").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+
+ client_a
+ .fs
+ .insert_tree(
+ "/dir",
+ json!({
+ ".git": {},
+ "a.txt": "a",
+ "b.txt": "b",
+ }),
+ )
+ .await;
+
+ const A_TXT: &'static str = "a.txt";
+ const B_TXT: &'static str = "b.txt";
+
+ client_a
+ .fs
+ .as_fake()
+ .set_status_for_repo(
+ Path::new("/dir/.git"),
+ &[
+ (&Path::new(A_TXT), GitFileStatus::Added),
+ (&Path::new(B_TXT), GitFileStatus::Added),
+ ],
+ )
+ .await;
+
+ let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| {
+ call.share_project(project_local.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ let project_remote = client_b.build_remote_project(project_id, cx_b).await;
+
+ // Wait for it to catch up to the new status
+ deterministic.run_until_parked();
+
+ #[track_caller]
+ fn assert_status(
+ file: &impl AsRef<Path>,
+ status: Option<GitFileStatus>,
+ project: &Project,
+ cx: &AppContext,
+ ) {
+ let file = file.as_ref();
+ let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
+ assert_eq!(worktrees.len(), 1);
+ let worktree = worktrees[0].clone();
+ let snapshot = worktree.read(cx).snapshot();
+ let root_entry = snapshot.root_git_entry().unwrap();
+ assert_eq!(root_entry.status_for_file(&snapshot, file), status);
+ }
+
+ // Smoke test status reading
+ project_local.read_with(cx_a, |project, cx| {
+ assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx);
+ assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx);
+ });
+ project_remote.read_with(cx_b, |project, cx| {
+ assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx);
+ assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx);
+ });
+
+ client_a
+ .fs
+ .as_fake()
+ .set_status_for_repo(
+ Path::new("/dir/.git"),
+ &[
+ (&Path::new(A_TXT), GitFileStatus::Modified),
+ (&Path::new(B_TXT), GitFileStatus::Modified),
+ ],
+ )
+ .await;
+
+ // Wait for buffer_local_a to receive it
+ deterministic.run_until_parked();
+
+ // Smoke test status reading
+ project_local.read_with(cx_a, |project, cx| {
+ assert_status(
+ &Path::new(A_TXT),
+ Some(GitFileStatus::Modified),
+ project,
+ cx,
+ );
+ assert_status(
+ &Path::new(B_TXT),
+ Some(GitFileStatus::Modified),
+ project,
+ cx,
+ );
+ });
+ project_remote.read_with(cx_b, |project, cx| {
+ assert_status(
+ &Path::new(A_TXT),
+ Some(GitFileStatus::Modified),
+ project,
+ cx,
+ );
+ assert_status(
+ &Path::new(B_TXT),
+ Some(GitFileStatus::Modified),
+ project,
+ cx,
+ );
+ });
+
+ // And synchronization while joining
+ let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
+ deterministic.run_until_parked();
+
+ project_remote_c.read_with(cx_c, |project, cx| {
+ assert_status(
+ &Path::new(A_TXT),
+ Some(GitFileStatus::Modified),
+ project,
+ cx,
+ );
+ assert_status(
+ &Path::new(B_TXT),
+ Some(GitFileStatus::Modified),
+ project,
+ cx,
+ );
+ });
+}
+
#[gpui::test(iterations = 10)]
async fn test_fs_operations(
deterministic: Arc<Deterministic>,
@@ -8,12 +8,13 @@ use call::ActiveCall;
use client::RECEIVE_TIMEOUT;
use collections::BTreeMap;
use editor::Bias;
-use fs::{FakeFs, Fs as _};
+use fs::{repository::GitFileStatus, FakeFs, Fs as _};
use futures::StreamExt as _;
use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext};
use language::{range_to_lsp, FakeLspAdapter, Language, LanguageConfig, PointUtf16};
use lsp::FakeLanguageServer;
use parking_lot::Mutex;
+use pretty_assertions::assert_eq;
use project::{search::SearchQuery, Project, ProjectPath};
use rand::{
distributions::{Alphanumeric, DistString},
@@ -766,53 +767,85 @@ async fn apply_client_operation(
}
}
- ClientOperation::WriteGitIndex {
- repo_path,
- contents,
- } => {
- if !client.fs.directories().contains(&repo_path) {
- return Err(TestError::Inapplicable);
- }
-
- log::info!(
- "{}: writing git index for repo {:?}: {:?}",
- client.username,
+ ClientOperation::GitOperation { operation } => match operation {
+ GitOperation::WriteGitIndex {
repo_path,
- contents
- );
+ contents,
+ } => {
+ if !client.fs.directories().contains(&repo_path) {
+ return Err(TestError::Inapplicable);
+ }
- let dot_git_dir = repo_path.join(".git");
- let contents = contents
- .iter()
- .map(|(path, contents)| (path.as_path(), contents.clone()))
- .collect::<Vec<_>>();
- if client.fs.metadata(&dot_git_dir).await?.is_none() {
- client.fs.create_dir(&dot_git_dir).await?;
- }
- client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
- }
+ log::info!(
+ "{}: writing git index for repo {:?}: {:?}",
+ client.username,
+ repo_path,
+ contents
+ );
- ClientOperation::WriteGitBranch {
- repo_path,
- new_branch,
- } => {
- if !client.fs.directories().contains(&repo_path) {
- return Err(TestError::Inapplicable);
+ let dot_git_dir = repo_path.join(".git");
+ let contents = contents
+ .iter()
+ .map(|(path, contents)| (path.as_path(), contents.clone()))
+ .collect::<Vec<_>>();
+ if client.fs.metadata(&dot_git_dir).await?.is_none() {
+ client.fs.create_dir(&dot_git_dir).await?;
+ }
+ client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
}
+ GitOperation::WriteGitBranch {
+ repo_path,
+ new_branch,
+ } => {
+ if !client.fs.directories().contains(&repo_path) {
+ return Err(TestError::Inapplicable);
+ }
- log::info!(
- "{}: writing git branch for repo {:?}: {:?}",
- client.username,
+ log::info!(
+ "{}: writing git branch for repo {:?}: {:?}",
+ client.username,
+ repo_path,
+ new_branch
+ );
+
+ let dot_git_dir = repo_path.join(".git");
+ if client.fs.metadata(&dot_git_dir).await?.is_none() {
+ client.fs.create_dir(&dot_git_dir).await?;
+ }
+ client.fs.set_branch_name(&dot_git_dir, new_branch).await;
+ }
+ GitOperation::WriteGitStatuses {
repo_path,
- new_branch
- );
+ statuses,
+ } => {
+ if !client.fs.directories().contains(&repo_path) {
+ return Err(TestError::Inapplicable);
+ }
+
+ log::info!(
+ "{}: writing git statuses for repo {:?}: {:?}",
+ client.username,
+ repo_path,
+ statuses
+ );
+
+ let dot_git_dir = repo_path.join(".git");
- let dot_git_dir = repo_path.join(".git");
- if client.fs.metadata(&dot_git_dir).await?.is_none() {
- client.fs.create_dir(&dot_git_dir).await?;
+ let statuses = statuses
+ .iter()
+ .map(|(path, val)| (path.as_path(), val.clone()))
+ .collect::<Vec<_>>();
+
+ if client.fs.metadata(&dot_git_dir).await?.is_none() {
+ client.fs.create_dir(&dot_git_dir).await?;
+ }
+
+ client
+ .fs
+ .set_status_for_repo(&dot_git_dir, statuses.as_slice())
+ .await;
}
- client.fs.set_branch_name(&dot_git_dir, new_branch).await;
- }
+ },
}
Ok(())
}
@@ -1181,6 +1214,13 @@ enum ClientOperation {
is_dir: bool,
content: String,
},
+ GitOperation {
+ operation: GitOperation,
+ },
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+enum GitOperation {
WriteGitIndex {
repo_path: PathBuf,
contents: Vec<(PathBuf, String)>,
@@ -1189,6 +1229,10 @@ enum ClientOperation {
repo_path: PathBuf,
new_branch: Option<String>,
},
+ WriteGitStatuses {
+ repo_path: PathBuf,
+ statuses: Vec<(PathBuf, GitFileStatus)>,
+ },
}
#[derive(Clone, Debug, Serialize, Deserialize)]
@@ -1701,57 +1745,10 @@ impl TestPlan {
}
}
- // Update a git index
- 91..=93 => {
- let repo_path = client
- .fs
- .directories()
- .into_iter()
- .choose(&mut self.rng)
- .unwrap()
- .clone();
-
- let mut file_paths = client
- .fs
- .files()
- .into_iter()
- .filter(|path| path.starts_with(&repo_path))
- .collect::<Vec<_>>();
- let count = self.rng.gen_range(0..=file_paths.len());
- file_paths.shuffle(&mut self.rng);
- file_paths.truncate(count);
-
- let mut contents = Vec::new();
- for abs_child_file_path in &file_paths {
- let child_file_path = abs_child_file_path
- .strip_prefix(&repo_path)
- .unwrap()
- .to_path_buf();
- let new_base = Alphanumeric.sample_string(&mut self.rng, 16);
- contents.push((child_file_path, new_base));
- }
-
- break ClientOperation::WriteGitIndex {
- repo_path,
- contents,
- };
- }
-
- // Update a git branch
- 94..=95 => {
- let repo_path = client
- .fs
- .directories()
- .choose(&mut self.rng)
- .unwrap()
- .clone();
-
- let new_branch = (self.rng.gen_range(0..10) > 3)
- .then(|| Alphanumeric.sample_string(&mut self.rng, 8));
-
- break ClientOperation::WriteGitBranch {
- repo_path,
- new_branch,
+ // Update a git related action
+ 91..=95 => {
+ break ClientOperation::GitOperation {
+ operation: self.generate_git_operation(client),
};
}
@@ -1789,6 +1786,86 @@ impl TestPlan {
})
}
+ fn generate_git_operation(&mut self, client: &TestClient) -> GitOperation {
+ fn generate_file_paths(
+ repo_path: &Path,
+ rng: &mut StdRng,
+ client: &TestClient,
+ ) -> Vec<PathBuf> {
+ let mut paths = client
+ .fs
+ .files()
+ .into_iter()
+ .filter(|path| path.starts_with(repo_path))
+ .collect::<Vec<_>>();
+
+ let count = rng.gen_range(0..=paths.len());
+ paths.shuffle(rng);
+ paths.truncate(count);
+
+ paths
+ .iter()
+ .map(|path| path.strip_prefix(repo_path).unwrap().to_path_buf())
+ .collect::<Vec<_>>()
+ }
+
+ let repo_path = client
+ .fs
+ .directories()
+ .choose(&mut self.rng)
+ .unwrap()
+ .clone();
+
+ match self.rng.gen_range(0..100_u32) {
+ 0..=25 => {
+ let file_paths = generate_file_paths(&repo_path, &mut self.rng, client);
+
+ let contents = file_paths
+ .into_iter()
+ .map(|path| (path, Alphanumeric.sample_string(&mut self.rng, 16)))
+ .collect();
+
+ GitOperation::WriteGitIndex {
+ repo_path,
+ contents,
+ }
+ }
+ 26..=63 => {
+ let new_branch = (self.rng.gen_range(0..10) > 3)
+ .then(|| Alphanumeric.sample_string(&mut self.rng, 8));
+
+ GitOperation::WriteGitBranch {
+ repo_path,
+ new_branch,
+ }
+ }
+ 64..=100 => {
+ let file_paths = generate_file_paths(&repo_path, &mut self.rng, client);
+
+ let statuses = file_paths
+ .into_iter()
+ .map(|paths| {
+ (
+ paths,
+ match self.rng.gen_range(0..3_u32) {
+ 0 => GitFileStatus::Added,
+ 1 => GitFileStatus::Modified,
+ 2 => GitFileStatus::Conflict,
+ _ => unreachable!(),
+ },
+ )
+ })
+ .collect::<Vec<_>>();
+
+ GitOperation::WriteGitStatuses {
+ repo_path,
+ statuses,
+ }
+ }
+ _ => unreachable!(),
+ }
+ }
+
fn next_root_dir_name(&mut self, user_id: UserId) -> String {
let user_ix = self
.users
@@ -259,7 +259,7 @@ impl RegisteredBuffer {
#[derive(Debug)]
pub struct Completion {
- uuid: String,
+ pub uuid: String,
pub range: Range<Anchor>,
pub text: String,
}
@@ -1499,6 +1499,7 @@ mod tests {
cx.set_global(Settings::test(cx));
cx.set_global(SettingsStore::test(cx));
language::init(cx);
+ client::init_settings(cx);
workspace::init_settings(cx);
});
}
@@ -1256,6 +1256,16 @@ impl Editor {
let soft_wrap_mode_override =
(mode == EditorMode::SingleLine).then(|| language_settings::SoftWrap::None);
+
+ let mut project_subscription = None;
+ if mode == EditorMode::Full && buffer.read(cx).is_singleton() {
+ if let Some(project) = project.as_ref() {
+ project_subscription = Some(cx.observe(project, |_, _, cx| {
+ cx.emit(Event::TitleChanged);
+ }))
+ }
+ }
+
let mut this = Self {
handle: cx.weak_handle(),
buffer: buffer.clone(),
@@ -1312,6 +1322,11 @@ impl Editor {
cx.observe_global::<Settings, _>(Self::settings_changed),
],
};
+
+ if let Some(project_subscription) = project_subscription {
+ this._subscriptions.push(project_subscription);
+ }
+
this.end_selection(cx);
this.scroll_manager.show_scrollbar(cx);
@@ -1323,7 +1338,7 @@ impl Editor {
cx.set_global(ScrollbarAutoHide(should_auto_hide_scrollbars));
}
- this.report_editor_event("open", cx);
+ this.report_editor_event("open", None, cx);
this
}
@@ -3090,6 +3105,8 @@ impl Editor {
copilot
.update(cx, |copilot, cx| copilot.accept_completion(completion, cx))
.detach_and_log_err(cx);
+
+ self.report_copilot_event(Some(completion.uuid.clone()), true, cx)
}
self.insert_with_autoindent_mode(&suggestion.text.to_string(), None, cx);
cx.notify();
@@ -3107,6 +3124,8 @@ impl Editor {
copilot.discard_completions(&self.copilot_state.completions, cx)
})
.detach_and_log_err(cx);
+
+ self.report_copilot_event(None, false, cx)
}
self.display_map
@@ -6853,48 +6872,88 @@ impl Editor {
.collect()
}
- fn report_editor_event(&self, name: &'static str, cx: &AppContext) {
- if let Some((project, file)) = self.project.as_ref().zip(
- self.buffer
- .read(cx)
- .as_singleton()
- .and_then(|b| b.read(cx).file()),
- ) {
- let vim_mode = cx
- .global::<SettingsStore>()
- .untyped_user_settings()
- .get("vim_mode")
- == Some(&serde_json::Value::Bool(true));
- let telemetry_settings = *settings::get_setting::<TelemetrySettings>(None, cx);
- let copilot_enabled = all_language_settings(None, cx).copilot_enabled(None, None);
- let copilot_enabled_for_language = self
- .buffer
- .read(cx)
- .settings_at(0, cx)
- .show_copilot_suggestions;
-
- let extension = Path::new(file.file_name(cx))
- .extension()
- .and_then(|e| e.to_str());
- let telemetry = project.read(cx).client().telemetry().clone();
- telemetry.report_mixpanel_event(
- match name {
- "open" => "open editor",
- "save" => "save editor",
- _ => name,
- },
- json!({ "File Extension": extension, "Vim Mode": vim_mode, "In Clickhouse": true }),
- telemetry_settings,
- );
- let event = ClickhouseEvent::Editor {
- file_extension: extension.map(ToString::to_string),
- vim_mode,
- operation: name,
- copilot_enabled,
- copilot_enabled_for_language,
- };
- telemetry.report_clickhouse_event(event, telemetry_settings)
- }
+ fn report_copilot_event(
+ &self,
+ suggestion_id: Option<String>,
+ suggestion_accepted: bool,
+ cx: &AppContext,
+ ) {
+ let Some(project) = &self.project else {
+ return
+ };
+
+ // If None, we are either getting suggestions in a new, unsaved file, or in a file without an extension
+ let file_extension = self
+ .buffer
+ .read(cx)
+ .as_singleton()
+ .and_then(|b| b.read(cx).file())
+ .and_then(|file| Path::new(file.file_name(cx)).extension())
+ .and_then(|e| e.to_str())
+ .map(|a| a.to_string());
+
+ let telemetry = project.read(cx).client().telemetry().clone();
+ let telemetry_settings = *settings::get_setting::<TelemetrySettings>(None, cx);
+
+ let event = ClickhouseEvent::Copilot {
+ suggestion_id,
+ suggestion_accepted,
+ file_extension,
+ };
+ telemetry.report_clickhouse_event(event, telemetry_settings);
+ }
+
+ fn report_editor_event(
+ &self,
+ name: &'static str,
+ file_extension: Option<String>,
+ cx: &AppContext,
+ ) {
+ let Some(project) = &self.project else {
+ return
+ };
+
+ // If None, we are in a file without an extension
+ let file_extension = file_extension.or(self
+ .buffer
+ .read(cx)
+ .as_singleton()
+ .and_then(|b| b.read(cx).file())
+ .and_then(|file| Path::new(file.file_name(cx)).extension())
+ .and_then(|e| e.to_str())
+ .map(|a| a.to_string()));
+
+ let vim_mode = cx
+ .global::<SettingsStore>()
+ .untyped_user_settings()
+ .get("vim_mode")
+ == Some(&serde_json::Value::Bool(true));
+ let telemetry_settings = *settings::get_setting::<TelemetrySettings>(None, cx);
+ let copilot_enabled = all_language_settings(None, cx).copilot_enabled(None, None);
+ let copilot_enabled_for_language = self
+ .buffer
+ .read(cx)
+ .settings_at(0, cx)
+ .show_copilot_suggestions;
+
+ let telemetry = project.read(cx).client().telemetry().clone();
+ telemetry.report_mixpanel_event(
+ match name {
+ "open" => "open editor",
+ "save" => "save editor",
+ _ => name,
+ },
+ json!({ "File Extension": file_extension, "Vim Mode": vim_mode, "In Clickhouse": true }),
+ telemetry_settings,
+ );
+ let event = ClickhouseEvent::Editor {
+ file_extension,
+ vim_mode,
+ operation: name,
+ copilot_enabled,
+ copilot_enabled_for_language,
+ };
+ telemetry.report_clickhouse_event(event, telemetry_settings)
}
/// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines,
@@ -6682,10 +6682,11 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC
cx.update(|cx| {
cx.set_global(SettingsStore::test(cx));
cx.set_global(Settings::test(cx));
+ client::init_settings(cx);
language::init(cx);
- crate::init(cx);
Project::init_settings(cx);
workspace::init_settings(cx);
+ crate::init(cx);
});
update_test_settings(cx, f);
@@ -1013,8 +1013,7 @@ mod tests {
.zip(expected_styles.iter().cloned())
.collect::<Vec<_>>();
assert_eq!(
- rendered.text,
- dbg!(expected_text),
+ rendered.text, expected_text,
"wrong text for input {blocks:?}"
);
assert_eq!(
@@ -27,7 +27,7 @@ use std::{
path::{Path, PathBuf},
};
use text::Selection;
-use util::{ResultExt, TryFutureExt};
+use util::{paths::FILE_ROW_COLUMN_DELIMITER, ResultExt, TryFutureExt};
use workspace::item::{BreadcrumbText, FollowableItemHandle};
use workspace::{
item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
@@ -566,7 +566,7 @@ impl Item for Editor {
cx: &AppContext,
) -> AnyElement<T> {
Flex::row()
- .with_child(Label::new(self.title(cx).to_string(), style.label.clone()).aligned())
+ .with_child(Label::new(self.title(cx).to_string(), style.label.clone()).into_any())
.with_children(detail.and_then(|detail| {
let path = path_for_buffer(&self.buffer, detail, false, cx)?;
let description = path.to_string_lossy();
@@ -580,6 +580,7 @@ impl Item for Editor {
.aligned(),
)
}))
+ .align_children_center()
.into_any()
}
@@ -636,7 +637,7 @@ impl Item for Editor {
project: ModelHandle<Project>,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>> {
- self.report_editor_event("save", cx);
+ self.report_editor_event("save", None, cx);
let format = self.perform_format(project.clone(), FormatTrigger::Save, cx);
let buffers = self.buffer().clone().read(cx).all_buffers();
cx.spawn(|_, mut cx| async move {
@@ -685,6 +686,11 @@ impl Item for Editor {
.as_singleton()
.expect("cannot call save_as on an excerpt list");
+ let file_extension = abs_path
+ .extension()
+ .map(|a| a.to_string_lossy().to_string());
+ self.report_editor_event("save", file_extension, cx);
+
project.update(cx, |project, cx| {
project.save_buffer_as(buffer, abs_path, cx)
})
@@ -1111,7 +1117,11 @@ impl View for CursorPosition {
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
if let Some(position) = self.position {
let theme = &cx.global::<Settings>().theme.workspace.status_bar;
- let mut text = format!("{},{}", position.row + 1, position.column + 1);
+ let mut text = format!(
+ "{}{FILE_ROW_COLUMN_DELIMITER}{}",
+ position.row + 1,
+ position.column + 1
+ );
if self.selected_count > 0 {
write!(text, " ({} selected)", self.selected_count).unwrap();
}
@@ -16,6 +16,7 @@ menu = { path = "../menu" }
picker = { path = "../picker" }
project = { path = "../project" }
settings = { path = "../settings" }
+text = { path = "../text" }
util = { path = "../util" }
theme = { path = "../theme" }
workspace = { path = "../workspace" }
@@ -1,3 +1,4 @@
+use editor::{scroll::autoscroll::Autoscroll, Bias, Editor};
use fuzzy::PathMatch;
use gpui::{
actions, elements::*, AppContext, ModelHandle, MouseState, Task, ViewContext, WeakViewHandle,
@@ -12,7 +13,8 @@ use std::{
Arc,
},
};
-use util::{post_inc, ResultExt};
+use text::Point;
+use util::{paths::PathLikeWithPosition, post_inc, ResultExt};
use workspace::Workspace;
pub type FileFinder = Picker<FileFinderDelegate>;
@@ -23,7 +25,7 @@ pub struct FileFinderDelegate {
search_count: usize,
latest_search_id: usize,
latest_search_did_cancel: bool,
- latest_search_query: String,
+ latest_search_query: Option<PathLikeWithPosition<FileSearchQuery>>,
relative_to: Option<Arc<Path>>,
matches: Vec<PathMatch>,
selected: Option<(usize, Arc<Path>)>,
@@ -60,6 +62,21 @@ pub enum Event {
Dismissed,
}
+#[derive(Debug, Clone)]
+struct FileSearchQuery {
+ raw_query: String,
+ file_query_end: Option<usize>,
+}
+
+impl FileSearchQuery {
+ fn path_query(&self) -> &str {
+ match self.file_query_end {
+ Some(file_path_end) => &self.raw_query[..file_path_end],
+ None => &self.raw_query,
+ }
+ }
+}
+
impl FileFinderDelegate {
fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec<usize>, String, Vec<usize>) {
let path = &path_match.path;
@@ -103,7 +120,7 @@ impl FileFinderDelegate {
search_count: 0,
latest_search_id: 0,
latest_search_did_cancel: false,
- latest_search_query: String::new(),
+ latest_search_query: None,
relative_to,
matches: Vec::new(),
selected: None,
@@ -111,7 +128,11 @@ impl FileFinderDelegate {
}
}
- fn spawn_search(&mut self, query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
+ fn spawn_search(
+ &mut self,
+ query: PathLikeWithPosition<FileSearchQuery>,
+ cx: &mut ViewContext<FileFinder>,
+ ) -> Task<()> {
let relative_to = self.relative_to.clone();
let worktrees = self
.project
@@ -140,7 +161,7 @@ impl FileFinderDelegate {
cx.spawn(|picker, mut cx| async move {
let matches = fuzzy::match_path_sets(
candidate_sets.as_slice(),
- &query,
+ query.path_like.path_query(),
relative_to,
false,
100,
@@ -163,18 +184,24 @@ impl FileFinderDelegate {
&mut self,
search_id: usize,
did_cancel: bool,
- query: String,
+ query: PathLikeWithPosition<FileSearchQuery>,
matches: Vec<PathMatch>,
cx: &mut ViewContext<FileFinder>,
) {
if search_id >= self.latest_search_id {
self.latest_search_id = search_id;
- if self.latest_search_did_cancel && query == self.latest_search_query {
+ if self.latest_search_did_cancel
+ && Some(query.path_like.path_query())
+ == self
+ .latest_search_query
+ .as_ref()
+ .map(|query| query.path_like.path_query())
+ {
util::extend_sorted(&mut self.matches, matches.into_iter(), 100, |a, b| b.cmp(a));
} else {
self.matches = matches;
}
- self.latest_search_query = query;
+ self.latest_search_query = Some(query);
self.latest_search_did_cancel = did_cancel;
cx.notify();
}
@@ -209,13 +236,25 @@ impl PickerDelegate for FileFinderDelegate {
cx.notify();
}
- fn update_matches(&mut self, query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
- if query.is_empty() {
+ fn update_matches(&mut self, raw_query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
+ if raw_query.is_empty() {
self.latest_search_id = post_inc(&mut self.search_count);
self.matches.clear();
cx.notify();
Task::ready(())
} else {
+ let raw_query = &raw_query;
+ let query = PathLikeWithPosition::parse_str(raw_query, |path_like_str| {
+ Ok::<_, std::convert::Infallible>(FileSearchQuery {
+ raw_query: raw_query.to_owned(),
+ file_query_end: if path_like_str == raw_query {
+ None
+ } else {
+ Some(path_like_str.len())
+ },
+ })
+ })
+ .expect("infallible");
self.spawn_search(query, cx)
}
}
@@ -228,12 +267,49 @@ impl PickerDelegate for FileFinderDelegate {
path: m.path.clone(),
};
- workspace.update(cx, |workspace, cx| {
+ let open_task = workspace.update(cx, |workspace, cx| {
+ workspace.open_path(project_path.clone(), None, true, cx)
+ });
+
+ let workspace = workspace.downgrade();
+
+ let row = self
+ .latest_search_query
+ .as_ref()
+ .and_then(|query| query.row)
+ .map(|row| row.saturating_sub(1));
+ let col = self
+ .latest_search_query
+ .as_ref()
+ .and_then(|query| query.column)
+ .unwrap_or(0)
+ .saturating_sub(1);
+ cx.spawn(|_, mut cx| async move {
+ let item = open_task.await.log_err()?;
+ if let Some(row) = row {
+ if let Some(active_editor) = item.downcast::<Editor>() {
+ active_editor
+ .downgrade()
+ .update(&mut cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx).display_snapshot;
+ let point = snapshot
+ .buffer_snapshot
+ .clip_point(Point::new(row, col), Bias::Left);
+ editor.change_selections(Some(Autoscroll::center()), cx, |s| {
+ s.select_ranges([point..point])
+ });
+ })
+ .log_err();
+ }
+ }
+
workspace
- .open_path(project_path.clone(), None, true, cx)
- .detach_and_log_err(cx);
- workspace.dismiss_modal(cx);
+ .update(&mut cx, |workspace, cx| workspace.dismiss_modal(cx))
+ .log_err();
+
+ Some(())
})
+ .detach();
}
}
}
@@ -268,6 +344,8 @@ impl PickerDelegate for FileFinderDelegate {
#[cfg(test)]
mod tests {
+ use std::time::Duration;
+
use super::*;
use editor::Editor;
use gpui::TestAppContext;
@@ -283,7 +361,7 @@ mod tests {
}
#[gpui::test]
- async fn test_matching_paths(cx: &mut gpui::TestAppContext) {
+ async fn test_matching_paths(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
@@ -334,7 +412,173 @@ mod tests {
}
#[gpui::test]
- async fn test_matching_cancellation(cx: &mut gpui::TestAppContext) {
+ async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+
+ let first_file_name = "first.rs";
+ let first_file_contents = "// First Rust file";
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "test": {
+ first_file_name: first_file_contents,
+ "second.rs": "// Second Rust file",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
+ cx.dispatch_action(window_id, Toggle);
+ let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
+
+ let file_query = &first_file_name[..3];
+ let file_row = 1;
+ let file_column = 3;
+ assert!(file_column <= first_file_contents.len());
+ let query_inside_file = format!("{file_query}:{file_row}:{file_column}");
+ finder
+ .update(cx, |finder, cx| {
+ finder
+ .delegate_mut()
+ .update_matches(query_inside_file.to_string(), cx)
+ })
+ .await;
+ finder.read_with(cx, |finder, _| {
+ let finder = finder.delegate();
+ assert_eq!(finder.matches.len(), 1);
+ let latest_search_query = finder
+ .latest_search_query
+ .as_ref()
+ .expect("Finder should have a query after the update_matches call");
+ assert_eq!(latest_search_query.path_like.raw_query, query_inside_file);
+ assert_eq!(
+ latest_search_query.path_like.file_query_end,
+ Some(file_query.len())
+ );
+ assert_eq!(latest_search_query.row, Some(file_row));
+ assert_eq!(latest_search_query.column, Some(file_column as u32));
+ });
+
+ let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone());
+ cx.dispatch_action(window_id, SelectNext);
+ cx.dispatch_action(window_id, Confirm);
+ active_pane
+ .condition(cx, |pane, _| pane.active_item().is_some())
+ .await;
+ let editor = cx.update(|cx| {
+ let active_item = active_pane.read(cx).active_item().unwrap();
+ active_item.downcast::<Editor>().unwrap()
+ });
+ cx.foreground().advance_clock(Duration::from_secs(2));
+ cx.foreground().start_waiting();
+ cx.foreground().finish_waiting();
+ editor.update(cx, |editor, cx| {
+ let all_selections = editor.selections.all_adjusted(cx);
+ assert_eq!(
+ all_selections.len(),
+ 1,
+ "Expected to have 1 selection (caret) after file finder confirm, but got: {all_selections:?}"
+ );
+ let caret_selection = all_selections.into_iter().next().unwrap();
+ assert_eq!(caret_selection.start, caret_selection.end,
+ "Caret selection should have its start and end at the same position");
+ assert_eq!(file_row, caret_selection.start.row + 1,
+ "Query inside file should get caret with the same focus row");
+ assert_eq!(file_column, caret_selection.start.column as usize + 1,
+ "Query inside file should get caret with the same focus column");
+ });
+ }
+
+ #[gpui::test]
+ async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+
+ let first_file_name = "first.rs";
+ let first_file_contents = "// First Rust file";
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "test": {
+ first_file_name: first_file_contents,
+ "second.rs": "// Second Rust file",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
+ cx.dispatch_action(window_id, Toggle);
+ let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
+
+ let file_query = &first_file_name[..3];
+ let file_row = 200;
+ let file_column = 300;
+ assert!(file_column > first_file_contents.len());
+ let query_outside_file = format!("{file_query}:{file_row}:{file_column}");
+ finder
+ .update(cx, |finder, cx| {
+ finder
+ .delegate_mut()
+ .update_matches(query_outside_file.to_string(), cx)
+ })
+ .await;
+ finder.read_with(cx, |finder, _| {
+ let finder = finder.delegate();
+ assert_eq!(finder.matches.len(), 1);
+ let latest_search_query = finder
+ .latest_search_query
+ .as_ref()
+ .expect("Finder should have a query after the update_matches call");
+ assert_eq!(latest_search_query.path_like.raw_query, query_outside_file);
+ assert_eq!(
+ latest_search_query.path_like.file_query_end,
+ Some(file_query.len())
+ );
+ assert_eq!(latest_search_query.row, Some(file_row));
+ assert_eq!(latest_search_query.column, Some(file_column as u32));
+ });
+
+ let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone());
+ cx.dispatch_action(window_id, SelectNext);
+ cx.dispatch_action(window_id, Confirm);
+ active_pane
+ .condition(cx, |pane, _| pane.active_item().is_some())
+ .await;
+ let editor = cx.update(|cx| {
+ let active_item = active_pane.read(cx).active_item().unwrap();
+ active_item.downcast::<Editor>().unwrap()
+ });
+ cx.foreground().advance_clock(Duration::from_secs(2));
+ cx.foreground().start_waiting();
+ cx.foreground().finish_waiting();
+ editor.update(cx, |editor, cx| {
+ let all_selections = editor.selections.all_adjusted(cx);
+ assert_eq!(
+ all_selections.len(),
+ 1,
+ "Expected to have 1 selection (caret) after file finder confirm, but got: {all_selections:?}"
+ );
+ let caret_selection = all_selections.into_iter().next().unwrap();
+ assert_eq!(caret_selection.start, caret_selection.end,
+ "Caret selection should have its start and end at the same position");
+ assert_eq!(0, caret_selection.start.row,
+ "Excessive rows (as in query outside file borders) should get trimmed to last file row");
+ assert_eq!(first_file_contents.len(), caret_selection.start.column as usize,
+ "Excessive columns (as in query outside file borders) should get trimmed to selected row's last column");
+ });
+ }
+
+ #[gpui::test]
+ async fn test_matching_cancellation(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
@@ -367,7 +611,7 @@ mod tests {
)
});
- let query = "hi".to_string();
+ let query = test_path_like("hi");
finder
.update(cx, |f, cx| f.delegate_mut().spawn_search(query.clone(), cx))
.await;
@@ -403,7 +647,7 @@ mod tests {
}
#[gpui::test]
- async fn test_ignored_files(cx: &mut gpui::TestAppContext) {
+ async fn test_ignored_files(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
@@ -451,13 +695,15 @@ mod tests {
)
});
finder
- .update(cx, |f, cx| f.delegate_mut().spawn_search("hi".into(), cx))
+ .update(cx, |f, cx| {
+ f.delegate_mut().spawn_search(test_path_like("hi"), cx)
+ })
.await;
finder.read_with(cx, |f, _| assert_eq!(f.delegate().matches.len(), 7));
}
#[gpui::test]
- async fn test_single_file_worktrees(cx: &mut gpui::TestAppContext) {
+ async fn test_single_file_worktrees(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
@@ -487,7 +733,9 @@ mod tests {
// Even though there is only one worktree, that worktree's filename
// is included in the matching, because the worktree is a single file.
finder
- .update(cx, |f, cx| f.delegate_mut().spawn_search("thf".into(), cx))
+ .update(cx, |f, cx| {
+ f.delegate_mut().spawn_search(test_path_like("thf"), cx)
+ })
.await;
cx.read(|cx| {
let finder = finder.read(cx);
@@ -505,13 +753,15 @@ mod tests {
// Since the worktree root is a file, searching for its name followed by a slash does
// not match anything.
finder
- .update(cx, |f, cx| f.delegate_mut().spawn_search("thf/".into(), cx))
+ .update(cx, |f, cx| {
+ f.delegate_mut().spawn_search(test_path_like("thf/"), cx)
+ })
.await;
finder.read_with(cx, |f, _| assert_eq!(f.delegate().matches.len(), 0));
}
#[gpui::test]
- async fn test_multiple_matches_with_same_relative_path(cx: &mut gpui::TestAppContext) {
+ async fn test_multiple_matches_with_same_relative_path(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
@@ -547,7 +797,9 @@ mod tests {
// Run a search that matches two files with the same relative path.
finder
- .update(cx, |f, cx| f.delegate_mut().spawn_search("a.t".into(), cx))
+ .update(cx, |f, cx| {
+ f.delegate_mut().spawn_search(test_path_like("a.t"), cx)
+ })
.await;
// Can switch between different matches with the same relative path.
@@ -563,7 +815,7 @@ mod tests {
}
#[gpui::test]
- async fn test_path_distance_ordering(cx: &mut gpui::TestAppContext) {
+ async fn test_path_distance_ordering(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
@@ -601,7 +853,7 @@ mod tests {
finder
.update(cx, |f, cx| {
- f.delegate_mut().spawn_search("a.txt".into(), cx)
+ f.delegate_mut().spawn_search(test_path_like("a.txt"), cx)
})
.await;
@@ -613,7 +865,7 @@ mod tests {
}
#[gpui::test]
- async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
+ async fn test_search_worktree_without_files(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
@@ -643,7 +895,9 @@ mod tests {
)
});
finder
- .update(cx, |f, cx| f.delegate_mut().spawn_search("dir".into(), cx))
+ .update(cx, |f, cx| {
+ f.delegate_mut().spawn_search(test_path_like("dir"), cx)
+ })
.await;
cx.read(|cx| {
let finder = finder.read(cx);
@@ -662,4 +916,18 @@ mod tests {
state
})
}
+
+ fn test_path_like(test_str: &str) -> PathLikeWithPosition<FileSearchQuery> {
+ PathLikeWithPosition::parse_str(test_str, |path_like_str| {
+ Ok::<_, std::convert::Infallible>(FileSearchQuery {
+ raw_query: test_str.to_owned(),
+ file_query_end: if path_like_str == test_str {
+ None
+ } else {
+ Some(path_like_str.len())
+ },
+ })
+ })
+ .unwrap()
+ }
}
@@ -13,6 +13,7 @@ gpui = { path = "../gpui" }
lsp = { path = "../lsp" }
rope = { path = "../rope" }
util = { path = "../util" }
+sum_tree = { path = "../sum_tree" }
anyhow.workspace = true
async-trait.workspace = true
futures.workspace = true
@@ -27,7 +27,7 @@ use util::ResultExt;
#[cfg(any(test, feature = "test-support"))]
use collections::{btree_map, BTreeMap};
#[cfg(any(test, feature = "test-support"))]
-use repository::FakeGitRepositoryState;
+use repository::{FakeGitRepositoryState, GitFileStatus};
#[cfg(any(test, feature = "test-support"))]
use std::sync::Weak;
@@ -572,15 +572,15 @@ impl FakeFs {
Ok(())
}
- pub async fn pause_events(&self) {
+ pub fn pause_events(&self) {
self.state.lock().events_paused = true;
}
- pub async fn buffered_event_count(&self) -> usize {
+ pub fn buffered_event_count(&self) -> usize {
self.state.lock().buffered_events.len()
}
- pub async fn flush_events(&self, count: usize) {
+ pub fn flush_events(&self, count: usize) {
self.state.lock().flush_events(count);
}
@@ -654,6 +654,17 @@ impl FakeFs {
});
}
+ pub async fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, GitFileStatus)]) {
+ self.with_git_state(dot_git, |state| {
+ state.worktree_statuses.clear();
+ state.worktree_statuses.extend(
+ statuses
+ .iter()
+ .map(|(path, content)| ((**path).into(), content.clone())),
+ );
+ });
+ }
+
pub fn paths(&self) -> Vec<PathBuf> {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
@@ -821,14 +832,16 @@ impl Fs for FakeFs {
let old_path = normalize_path(old_path);
let new_path = normalize_path(new_path);
+
let mut state = self.state.lock();
let moved_entry = state.write_path(&old_path, |e| {
if let btree_map::Entry::Occupied(e) = e {
- Ok(e.remove())
+ Ok(e.get().clone())
} else {
Err(anyhow!("path does not exist: {}", &old_path.display()))
}
})?;
+
state.write_path(&new_path, |e| {
match e {
btree_map::Entry::Occupied(mut e) => {
@@ -844,6 +857,17 @@ impl Fs for FakeFs {
}
Ok(())
})?;
+
+ state
+ .write_path(&old_path, |e| {
+ if let btree_map::Entry::Occupied(e) = e {
+ Ok(e.remove())
+ } else {
+ unreachable!()
+ }
+ })
+ .unwrap();
+
state.emit_event(&[old_path, new_path]);
Ok(())
}
@@ -1,10 +1,15 @@
use anyhow::Result;
use collections::HashMap;
use parking_lot::Mutex;
+use serde_derive::{Deserialize, Serialize};
use std::{
+ cmp::Ordering,
+ ffi::OsStr,
+ os::unix::prelude::OsStrExt,
path::{Component, Path, PathBuf},
sync::Arc,
};
+use sum_tree::{MapSeekTarget, TreeMap};
use util::ResultExt;
pub use git2::Repository as LibGitRepository;
@@ -16,6 +21,10 @@ pub trait GitRepository: Send {
fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
fn branch_name(&self) -> Option<String>;
+
+ fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
+
+ fn status(&self, path: &RepoPath) -> Option<GitFileStatus>;
}
impl std::fmt::Debug for dyn GitRepository {
@@ -61,6 +70,48 @@ impl GitRepository for LibGitRepository {
let branch = String::from_utf8_lossy(head.shorthand_bytes());
Some(branch.to_string())
}
+
+ fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
+ let statuses = self.statuses(None).log_err()?;
+
+ let mut map = TreeMap::default();
+
+ for status in statuses
+ .iter()
+ .filter(|status| !status.status().contains(git2::Status::IGNORED))
+ {
+ let path = RepoPath(PathBuf::from(OsStr::from_bytes(status.path_bytes())));
+ let Some(status) = read_status(status.status()) else {
+ continue
+ };
+
+ map.insert(path, status)
+ }
+
+ Some(map)
+ }
+
+ fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
+ let status = self.status_file(path).log_err()?;
+ read_status(status)
+ }
+}
+
+fn read_status(status: git2::Status) -> Option<GitFileStatus> {
+ if status.contains(git2::Status::CONFLICTED) {
+ Some(GitFileStatus::Conflict)
+ } else if status.intersects(
+ git2::Status::WT_MODIFIED
+ | git2::Status::WT_RENAMED
+ | git2::Status::INDEX_MODIFIED
+ | git2::Status::INDEX_RENAMED,
+ ) {
+ Some(GitFileStatus::Modified)
+ } else if status.intersects(git2::Status::WT_NEW | git2::Status::INDEX_NEW) {
+ Some(GitFileStatus::Added)
+ } else {
+ None
+ }
}
#[derive(Debug, Clone, Default)]
@@ -71,6 +122,7 @@ pub struct FakeGitRepository {
#[derive(Debug, Clone, Default)]
pub struct FakeGitRepositoryState {
pub index_contents: HashMap<PathBuf, String>,
+ pub worktree_statuses: HashMap<RepoPath, GitFileStatus>,
pub branch_name: Option<String>,
}
@@ -93,6 +145,20 @@ impl GitRepository for FakeGitRepository {
let state = self.state.lock();
state.branch_name.clone()
}
+
+ fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
+ let state = self.state.lock();
+ let mut map = TreeMap::default();
+ for (repo_path, status) in state.worktree_statuses.iter() {
+ map.insert(repo_path.to_owned(), status.to_owned());
+ }
+ Some(map)
+ }
+
+ fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
+ let state = self.state.lock();
+ state.worktree_statuses.get(path).cloned()
+ }
}
fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
@@ -123,3 +189,66 @@ fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
_ => Ok(()),
}
}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
+pub enum GitFileStatus {
+ Added,
+ Modified,
+ Conflict,
+}
+
+#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
+pub struct RepoPath(PathBuf);
+
+impl RepoPath {
+ pub fn new(path: PathBuf) -> Self {
+ debug_assert!(path.is_relative(), "Repo paths must be relative");
+
+ RepoPath(path)
+ }
+}
+
+impl From<&Path> for RepoPath {
+ fn from(value: &Path) -> Self {
+ RepoPath::new(value.to_path_buf())
+ }
+}
+
+impl From<PathBuf> for RepoPath {
+ fn from(value: PathBuf) -> Self {
+ RepoPath::new(value)
+ }
+}
+
+impl Default for RepoPath {
+ fn default() -> Self {
+ RepoPath(PathBuf::new())
+ }
+}
+
+impl AsRef<Path> for RepoPath {
+ fn as_ref(&self) -> &Path {
+ self.0.as_ref()
+ }
+}
+
+impl std::ops::Deref for RepoPath {
+ type Target = PathBuf;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+#[derive(Debug)]
+pub struct RepoPathDescendants<'a>(pub &'a Path);
+
+impl<'a> MapSeekTarget<RepoPath> for RepoPathDescendants<'a> {
+ fn cmp_cursor(&self, key: &RepoPath) -> Ordering {
+ if key.starts_with(&self.0) {
+ Ordering::Greater
+ } else {
+ self.0.cmp(key)
+ }
+ }
+}
@@ -16,3 +16,4 @@ settings = { path = "../settings" }
text = { path = "../text" }
workspace = { path = "../workspace" }
postage.workspace = true
+util = { path = "../util" }
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, DisplayPoint, Editor};
+use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor};
use gpui::{
actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, AppContext, Axis, Entity,
View, ViewContext, ViewHandle,
@@ -8,6 +8,7 @@ use gpui::{
use menu::{Cancel, Confirm};
use settings::Settings;
use text::{Bias, Point};
+use util::paths::FILE_ROW_COLUMN_DELIMITER;
use workspace::{Modal, Workspace};
actions!(go_to_line, [Toggle]);
@@ -75,15 +76,16 @@ impl GoToLine {
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
self.prev_scroll_position.take();
- self.active_editor.update(cx, |active_editor, cx| {
- if let Some(rows) = active_editor.highlighted_rows() {
+ if let Some(point) = self.point_from_query(cx) {
+ self.active_editor.update(cx, |active_editor, cx| {
let snapshot = active_editor.snapshot(cx).display_snapshot;
- let position = DisplayPoint::new(rows.start, 0).to_point(&snapshot);
+ let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
active_editor.change_selections(Some(Autoscroll::center()), cx, |s| {
- s.select_ranges([position..position])
+ s.select_ranges([point..point])
});
- }
- });
+ });
+ }
+
cx.emit(Event::Dismissed);
}
@@ -96,16 +98,7 @@ impl GoToLine {
match event {
editor::Event::Blurred => cx.emit(Event::Dismissed),
editor::Event::BufferEdited { .. } => {
- let line_editor = self.line_editor.read(cx).text(cx);
- let mut components = line_editor.trim().split(&[',', ':'][..]);
- let row = components.next().and_then(|row| row.parse::<u32>().ok());
- let column = components.next().and_then(|row| row.parse::<u32>().ok());
- if let Some(point) = row.map(|row| {
- Point::new(
- row.saturating_sub(1),
- column.map(|column| column.saturating_sub(1)).unwrap_or(0),
- )
- }) {
+ if let Some(point) = self.point_from_query(cx) {
self.active_editor.update(cx, |active_editor, cx| {
let snapshot = active_editor.snapshot(cx).display_snapshot;
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
@@ -120,6 +113,20 @@ impl GoToLine {
_ => {}
}
}
+
+ fn point_from_query(&self, cx: &ViewContext<Self>) -> Option<Point> {
+ let line_editor = self.line_editor.read(cx).text(cx);
+ let mut components = line_editor
+ .splitn(2, FILE_ROW_COLUMN_DELIMITER)
+ .map(str::trim)
+ .fuse();
+ let row = components.next().and_then(|row| row.parse::<u32>().ok())?;
+ let column = components.next().and_then(|col| col.parse::<u32>().ok());
+ Some(Point::new(
+ row.saturating_sub(1),
+ column.unwrap_or(0).saturating_sub(1),
+ ))
+ }
}
impl Entity for GoToLine {
@@ -147,7 +154,7 @@ impl View for GoToLine {
let theme = &cx.global::<Settings>().theme.picker;
let label = format!(
- "{},{} of {} lines",
+ "{}{FILE_ROW_COLUMN_DELIMITER}{} of {} lines",
self.cursor_point.row + 1,
self.cursor_point.column + 1,
self.max_point.row + 1
@@ -48,7 +48,7 @@ smallvec.workspace = true
smol.workspace = true
time.workspace = true
tiny-skia = "0.5"
-usvg = "0.14"
+usvg = { version = "0.14", features = [] }
uuid = { version = "1.1.2", features = ["v4"] }
waker-fn = "1.1.0"
@@ -42,7 +42,7 @@ impl Color {
}
pub fn yellow() -> Self {
- Self(ColorU::from_u32(0x00ffffff))
+ Self(ColorU::from_u32(0xffff00ff))
}
pub fn new(r: u8, g: u8, b: u8, a: u8) -> Self {
@@ -578,6 +578,15 @@ pub struct ComponentHost<V: View, C: Component<V>> {
view_type: PhantomData<V>,
}
+impl<V: View, C: Component<V>> ComponentHost<V, C> {
+ pub fn new(c: C) -> Self {
+ Self {
+ component: c,
+ view_type: PhantomData,
+ }
+ }
+}
+
impl<V: View, C: Component<V>> Deref for ComponentHost<V, C> {
type Target = C;
@@ -11,6 +11,19 @@ pub struct Binding {
context_predicate: Option<KeymapContextPredicate>,
}
+impl std::fmt::Debug for Binding {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "Binding {{ keystrokes: {:?}, action: {}::{}, context_predicate: {:?} }}",
+ self.keystrokes,
+ self.action.namespace(),
+ self.action.name(),
+ self.context_predicate
+ )
+ }
+}
+
impl Clone for Binding {
fn clone(&self) -> Self {
Self {
@@ -755,7 +755,7 @@ impl platform::Window for Window {
let _ = postage::sink::Sink::try_send(&mut done_tx, answer.try_into().unwrap());
}
});
-
+ let block = block.copy();
let native_window = self.0.borrow().native_window;
self.0
.borrow()
@@ -75,5 +75,6 @@ lsp = { path = "../lsp", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
+git2 = { version = "0.15", default-features = false }
tempdir.workspace = true
unindent.workspace = true
@@ -123,6 +123,8 @@ pub struct Project {
loading_local_worktrees:
HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
opened_buffers: HashMap<u64, OpenBuffer>,
+ local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
+ local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
/// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
/// Used for re-issuing buffer requests when peers temporarily disconnect
incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
@@ -456,6 +458,8 @@ impl Project {
incomplete_remote_buffers: Default::default(),
loading_buffers_by_path: Default::default(),
loading_local_worktrees: Default::default(),
+ local_buffer_ids_by_path: Default::default(),
+ local_buffer_ids_by_entry_id: Default::default(),
buffer_snapshots: Default::default(),
join_project_response_message_id: 0,
client_state: None,
@@ -526,6 +530,8 @@ impl Project {
shared_buffers: Default::default(),
incomplete_remote_buffers: Default::default(),
loading_local_worktrees: Default::default(),
+ local_buffer_ids_by_path: Default::default(),
+ local_buffer_ids_by_entry_id: Default::default(),
active_entry: None,
collaborators: Default::default(),
join_project_response_message_id: response.message_id,
@@ -1643,6 +1649,21 @@ impl Project {
})
.detach();
+ if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
+ if file.is_local {
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id: file.worktree_id(cx),
+ path: file.path.clone(),
+ },
+ remote_id,
+ );
+
+ self.local_buffer_ids_by_entry_id
+ .insert(file.entry_id, remote_id);
+ }
+ }
+
self.detect_language_for_buffer(buffer, cx);
self.register_buffer_with_language_servers(buffer, cx);
self.register_buffer_with_copilot(buffer, cx);
@@ -4544,7 +4565,7 @@ impl Project {
if worktree.read(cx).is_local() {
cx.subscribe(worktree, |this, worktree, event, cx| match event {
worktree::Event::UpdatedEntries(changes) => {
- this.update_local_worktree_buffers(&worktree, cx);
+ this.update_local_worktree_buffers(&worktree, &changes, cx);
this.update_local_worktree_language_servers(&worktree, changes, cx);
}
worktree::Event::UpdatedGitRepositories(updated_repos) => {
@@ -4578,80 +4599,106 @@ impl Project {
fn update_local_worktree_buffers(
&mut self,
worktree_handle: &ModelHandle<Worktree>,
+ changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
cx: &mut ModelContext<Self>,
) {
let snapshot = worktree_handle.read(cx).snapshot();
- let mut buffers_to_delete = Vec::new();
let mut renamed_buffers = Vec::new();
+ for (path, entry_id) in changes.keys() {
+ let worktree_id = worktree_handle.read(cx).id();
+ let project_path = ProjectPath {
+ worktree_id,
+ path: path.clone(),
+ };
- for (buffer_id, buffer) in &self.opened_buffers {
- if let Some(buffer) = buffer.upgrade(cx) {
- buffer.update(cx, |buffer, cx| {
- if let Some(old_file) = File::from_dyn(buffer.file()) {
- if old_file.worktree != *worktree_handle {
- return;
- }
+ let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
+ Some(&buffer_id) => buffer_id,
+ None => match self.local_buffer_ids_by_path.get(&project_path) {
+ Some(&buffer_id) => buffer_id,
+ None => continue,
+ },
+ };
- let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id)
- {
- File {
- is_local: true,
- entry_id: entry.id,
- mtime: entry.mtime,
- path: entry.path.clone(),
- worktree: worktree_handle.clone(),
- is_deleted: false,
- }
- } else if let Some(entry) =
- snapshot.entry_for_path(old_file.path().as_ref())
- {
- File {
- is_local: true,
- entry_id: entry.id,
- mtime: entry.mtime,
- path: entry.path.clone(),
- worktree: worktree_handle.clone(),
- is_deleted: false,
- }
- } else {
- File {
- is_local: true,
- entry_id: old_file.entry_id,
- path: old_file.path().clone(),
- mtime: old_file.mtime(),
- worktree: worktree_handle.clone(),
- is_deleted: true,
- }
- };
+ let open_buffer = self.opened_buffers.get(&buffer_id);
+ let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) {
+ buffer
+ } else {
+ self.opened_buffers.remove(&buffer_id);
+ self.local_buffer_ids_by_path.remove(&project_path);
+ self.local_buffer_ids_by_entry_id.remove(entry_id);
+ continue;
+ };
- let old_path = old_file.abs_path(cx);
- if new_file.abs_path(cx) != old_path {
- renamed_buffers.push((cx.handle(), old_file.clone()));
+ buffer.update(cx, |buffer, cx| {
+ if let Some(old_file) = File::from_dyn(buffer.file()) {
+ if old_file.worktree != *worktree_handle {
+ return;
+ }
+
+ let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
+ File {
+ is_local: true,
+ entry_id: entry.id,
+ mtime: entry.mtime,
+ path: entry.path.clone(),
+ worktree: worktree_handle.clone(),
+ is_deleted: false,
}
+ } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
+ File {
+ is_local: true,
+ entry_id: entry.id,
+ mtime: entry.mtime,
+ path: entry.path.clone(),
+ worktree: worktree_handle.clone(),
+ is_deleted: false,
+ }
+ } else {
+ File {
+ is_local: true,
+ entry_id: old_file.entry_id,
+ path: old_file.path().clone(),
+ mtime: old_file.mtime(),
+ worktree: worktree_handle.clone(),
+ is_deleted: true,
+ }
+ };
- if new_file != *old_file {
- if let Some(project_id) = self.remote_id() {
- self.client
- .send(proto::UpdateBufferFile {
- project_id,
- buffer_id: *buffer_id as u64,
- file: Some(new_file.to_proto()),
- })
- .log_err();
- }
+ let old_path = old_file.abs_path(cx);
+ if new_file.abs_path(cx) != old_path {
+ renamed_buffers.push((cx.handle(), old_file.clone()));
+ self.local_buffer_ids_by_path.remove(&project_path);
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id,
+ path: path.clone(),
+ },
+ buffer_id,
+ );
+ }
- buffer.file_updated(Arc::new(new_file), cx).detach();
- }
+ if new_file.entry_id != *entry_id {
+ self.local_buffer_ids_by_entry_id.remove(entry_id);
+ self.local_buffer_ids_by_entry_id
+ .insert(new_file.entry_id, buffer_id);
}
- });
- } else {
- buffers_to_delete.push(*buffer_id);
- }
- }
- for buffer_id in buffers_to_delete {
- self.opened_buffers.remove(&buffer_id);
+ if new_file != *old_file {
+ if let Some(project_id) = self.remote_id() {
+ self.client
+ .send(proto::UpdateBufferFile {
+ project_id,
+ buffer_id: buffer_id as u64,
+ file: Some(new_file.to_proto()),
+ })
+ .log_err();
+ }
+
+ buffer.file_updated(Arc::new(new_file), cx).detach();
+ }
+ }
+ });
}
for (buffer, old_file) in renamed_buffers {
@@ -4664,7 +4711,7 @@ impl Project {
fn update_local_worktree_language_servers(
&mut self,
worktree_handle: &ModelHandle<Worktree>,
- changes: &HashMap<Arc<Path>, PathChange>,
+ changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
cx: &mut ModelContext<Self>,
) {
let worktree_id = worktree_handle.read(cx).id();
@@ -4681,7 +4728,7 @@ impl Project {
let params = lsp::DidChangeWatchedFilesParams {
changes: changes
.iter()
- .filter_map(|(path, change)| {
+ .filter_map(|((path, _), change)| {
let path = abs_path.join(path);
if watched_paths.matches(&path) {
Some(lsp::FileEvent {
@@ -6,7 +6,10 @@ use anyhow::{anyhow, Context, Result};
use client::{proto, Client};
use clock::ReplicaId;
use collections::{HashMap, VecDeque};
-use fs::{repository::GitRepository, Fs, LineEnding};
+use fs::{
+ repository::{GitFileStatus, GitRepository, RepoPath, RepoPathDescendants},
+ Fs, LineEnding,
+};
use futures::{
channel::{
mpsc::{self, UnboundedSender},
@@ -52,7 +55,7 @@ use std::{
time::{Duration, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt, TryFutureExt};
+use util::{paths::HOME, ResultExt, TakeUntilExt, TryFutureExt};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize);
@@ -117,10 +120,38 @@ pub struct Snapshot {
completed_scan_id: usize,
}
-#[derive(Clone, Debug, Eq, PartialEq)]
+impl Snapshot {
+ pub fn repo_for(&self, path: &Path) -> Option<RepositoryEntry> {
+ let mut max_len = 0;
+ let mut current_candidate = None;
+ for (work_directory, repo) in (&self.repository_entries).iter() {
+ if repo.contains(self, path) {
+ if work_directory.0.as_os_str().len() >= max_len {
+ current_candidate = Some(repo);
+ max_len = work_directory.0.as_os_str().len();
+ } else {
+ break;
+ }
+ }
+ }
+
+ current_candidate.map(|entry| entry.to_owned())
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
pub struct RepositoryEntry {
pub(crate) work_directory: WorkDirectoryEntry,
pub(crate) branch: Option<Arc<str>>,
+ pub(crate) statuses: TreeMap<RepoPath, GitFileStatus>,
+}
+
+fn read_git_status(git_status: i32) -> Option<GitFileStatus> {
+ proto::GitStatus::from_i32(git_status).map(|status| match status {
+ proto::GitStatus::Added => GitFileStatus::Added,
+ proto::GitStatus::Modified => GitFileStatus::Modified,
+ proto::GitStatus::Conflict => GitFileStatus::Conflict,
+ })
}
impl RepositoryEntry {
@@ -141,6 +172,102 @@ impl RepositoryEntry {
pub(crate) fn contains(&self, snapshot: &Snapshot, path: &Path) -> bool {
self.work_directory.contains(snapshot, path)
}
+
+ pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
+ self.work_directory
+ .relativize(snapshot, path)
+ .and_then(|repo_path| self.statuses.get(&repo_path))
+ .cloned()
+ }
+
+ pub fn status_for_path(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
+ self.work_directory
+ .relativize(snapshot, path)
+ .and_then(|repo_path| {
+ self.statuses
+ .iter_from(&repo_path)
+ .take_while(|(key, _)| key.starts_with(&repo_path))
+ // Short circut once we've found the highest level
+ .take_until(|(_, status)| status == &&GitFileStatus::Conflict)
+ .map(|(_, status)| status)
+ .reduce(
+ |status_first, status_second| match (status_first, status_second) {
+ (GitFileStatus::Conflict, _) | (_, GitFileStatus::Conflict) => {
+ &GitFileStatus::Conflict
+ }
+ (GitFileStatus::Modified, _) | (_, GitFileStatus::Modified) => {
+ &GitFileStatus::Modified
+ }
+ _ => &GitFileStatus::Added,
+ },
+ )
+ .copied()
+ })
+ }
+
+ pub fn build_update(&self, other: &Self) -> proto::RepositoryEntry {
+ let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
+ let mut removed_statuses: Vec<String> = Vec::new();
+
+ let mut self_statuses = self.statuses.iter().peekable();
+ let mut other_statuses = other.statuses.iter().peekable();
+ loop {
+ match (self_statuses.peek(), other_statuses.peek()) {
+ (Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => {
+ match Ord::cmp(self_repo_path, other_repo_path) {
+ Ordering::Less => {
+ updated_statuses.push(make_status_entry(self_repo_path, self_status));
+ self_statuses.next();
+ }
+ Ordering::Equal => {
+ if self_status != other_status {
+ updated_statuses
+ .push(make_status_entry(self_repo_path, self_status));
+ }
+
+ self_statuses.next();
+ other_statuses.next();
+ }
+ Ordering::Greater => {
+ removed_statuses.push(make_repo_path(other_repo_path));
+ other_statuses.next();
+ }
+ }
+ }
+ (Some((self_repo_path, self_status)), None) => {
+ updated_statuses.push(make_status_entry(self_repo_path, self_status));
+ self_statuses.next();
+ }
+ (None, Some((other_repo_path, _))) => {
+ removed_statuses.push(make_repo_path(other_repo_path));
+ other_statuses.next();
+ }
+ (None, None) => break,
+ }
+ }
+
+ proto::RepositoryEntry {
+ work_directory_id: self.work_directory_id().to_proto(),
+ branch: self.branch.as_ref().map(|str| str.to_string()),
+ removed_repo_paths: removed_statuses,
+ updated_statuses: updated_statuses,
+ }
+ }
+}
+
+fn make_repo_path(path: &RepoPath) -> String {
+ path.as_os_str().to_string_lossy().to_string()
+}
+
+fn make_status_entry(path: &RepoPath, status: &GitFileStatus) -> proto::StatusEntry {
+ proto::StatusEntry {
+ repo_path: make_repo_path(path),
+ status: match status {
+ GitFileStatus::Added => proto::GitStatus::Added.into(),
+ GitFileStatus::Modified => proto::GitStatus::Modified.into(),
+ GitFileStatus::Conflict => proto::GitStatus::Conflict.into(),
+ },
+ }
}
impl From<&RepositoryEntry> for proto::RepositoryEntry {
@@ -148,6 +275,12 @@ impl From<&RepositoryEntry> for proto::RepositoryEntry {
proto::RepositoryEntry {
work_directory_id: value.work_directory.to_proto(),
branch: value.branch.as_ref().map(|str| str.to_string()),
+ updated_statuses: value
+ .statuses
+ .iter()
+ .map(|(repo_path, status)| make_status_entry(repo_path, status))
+ .collect(),
+ removed_repo_paths: Default::default(),
}
}
}
@@ -162,6 +295,12 @@ impl Default for RepositoryWorkDirectory {
}
}
+impl AsRef<Path> for RepositoryWorkDirectory {
+ fn as_ref(&self) -> &Path {
+ self.0.as_ref()
+ }
+}
+
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
pub struct WorkDirectoryEntry(ProjectEntryId);
@@ -178,7 +317,7 @@ impl WorkDirectoryEntry {
worktree.entry_for_id(self.0).and_then(|entry| {
path.strip_prefix(&entry.path)
.ok()
- .map(move |path| RepoPath(path.to_owned()))
+ .map(move |path| path.into())
})
}
}
@@ -197,32 +336,9 @@ impl<'a> From<ProjectEntryId> for WorkDirectoryEntry {
}
}
-#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
-pub struct RepoPath(PathBuf);
-
-impl AsRef<Path> for RepoPath {
- fn as_ref(&self) -> &Path {
- self.0.as_ref()
- }
-}
-
-impl Deref for RepoPath {
- type Target = PathBuf;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-
-impl AsRef<Path> for RepositoryWorkDirectory {
- fn as_ref(&self) -> &Path {
- self.0.as_ref()
- }
-}
-
#[derive(Debug, Clone)]
pub struct LocalSnapshot {
- ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
+ ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, bool)>, // (gitignore, needs_update)
// The ProjectEntryId corresponds to the entry for the .git dir
// work_directory_id
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
@@ -234,6 +350,7 @@ pub struct LocalSnapshot {
#[derive(Debug, Clone)]
pub struct LocalRepositoryEntry {
pub(crate) scan_id: usize,
+ pub(crate) full_scan_id: usize,
pub(crate) repo_ptr: Arc<Mutex<dyn GitRepository>>,
/// Path to the actual .git folder.
/// Note: if .git is a file, this points to the folder indicated by the .git file
@@ -265,7 +382,7 @@ enum ScanState {
Started,
Updated {
snapshot: LocalSnapshot,
- changes: HashMap<Arc<Path>, PathChange>,
+ changes: HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
barrier: Option<barrier::Sender>,
scanning: bool,
},
@@ -279,7 +396,7 @@ struct ShareState {
}
pub enum Event {
- UpdatedEntries(HashMap<Arc<Path>, PathChange>),
+ UpdatedEntries(HashMap<(Arc<Path>, ProjectEntryId), PathChange>),
UpdatedGitRepositories(HashMap<Arc<Path>, LocalRepositoryEntry>),
}
@@ -1424,13 +1541,41 @@ impl Snapshot {
});
for repository in update.updated_repositories {
- let repository = RepositoryEntry {
- work_directory: ProjectEntryId::from_proto(repository.work_directory_id).into(),
- branch: repository.branch.map(Into::into),
- };
- if let Some(entry) = self.entry_for_id(repository.work_directory_id()) {
- self.repository_entries
- .insert(RepositoryWorkDirectory(entry.path.clone()), repository)
+ let work_directory_entry: WorkDirectoryEntry =
+ ProjectEntryId::from_proto(repository.work_directory_id).into();
+
+ if let Some(entry) = self.entry_for_id(*work_directory_entry) {
+ let mut statuses = TreeMap::default();
+ for status_entry in repository.updated_statuses {
+ let Some(git_file_status) = read_git_status(status_entry.status) else {
+ continue;
+ };
+
+ let repo_path = RepoPath::new(status_entry.repo_path.into());
+ statuses.insert(repo_path, git_file_status);
+ }
+
+ let work_directory = RepositoryWorkDirectory(entry.path.clone());
+ if self.repository_entries.get(&work_directory).is_some() {
+ self.repository_entries.update(&work_directory, |repo| {
+ repo.branch = repository.branch.map(Into::into);
+ repo.statuses.insert_tree(statuses);
+
+ for repo_path in repository.removed_repo_paths {
+ let repo_path = RepoPath::new(repo_path.into());
+ repo.statuses.remove(&repo_path);
+ }
+ });
+ } else {
+ self.repository_entries.insert(
+ work_directory,
+ RepositoryEntry {
+ work_directory: work_directory_entry,
+ branch: repository.branch.map(Into::into),
+ statuses,
+ },
+ )
+ }
} else {
log::error!("no work directory entry for repository {:?}", repository)
}
@@ -1524,6 +1669,30 @@ impl Snapshot {
}
}
+ fn descendent_entries<'a>(
+ &'a self,
+ include_dirs: bool,
+ include_ignored: bool,
+ parent_path: &'a Path,
+ ) -> DescendentEntriesIter<'a> {
+ let mut cursor = self.entries_by_path.cursor();
+ cursor.seek(&TraversalTarget::Path(parent_path), Bias::Left, &());
+ let mut traversal = Traversal {
+ cursor,
+ include_dirs,
+ include_ignored,
+ };
+
+ if traversal.end_offset() == traversal.start_offset() {
+ traversal.advance();
+ }
+
+ DescendentEntriesIter {
+ traversal,
+ parent_path,
+ }
+ }
+
pub fn root_entry(&self) -> Option<&Entry> {
self.entry_for_path("")
}
@@ -1570,32 +1739,17 @@ impl Snapshot {
}
impl LocalSnapshot {
- pub(crate) fn repo_for(&self, path: &Path) -> Option<RepositoryEntry> {
- let mut max_len = 0;
- let mut current_candidate = None;
- for (work_directory, repo) in (&self.repository_entries).iter() {
- if repo.contains(self, path) {
- if work_directory.0.as_os_str().len() >= max_len {
- current_candidate = Some(repo);
- max_len = work_directory.0.as_os_str().len();
- } else {
- break;
- }
- }
- }
-
- current_candidate.map(|entry| entry.to_owned())
+ pub(crate) fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> {
+ self.git_repositories.get(&repo.work_directory.0)
}
pub(crate) fn repo_for_metadata(
&self,
path: &Path,
- ) -> Option<(ProjectEntryId, Arc<Mutex<dyn GitRepository>>)> {
- let (entry_id, local_repo) = self
- .git_repositories
+ ) -> Option<(&ProjectEntryId, &LocalRepositoryEntry)> {
+ self.git_repositories
.iter()
- .find(|(_, repo)| repo.in_dot_git(path))?;
- Some((*entry_id, local_repo.repo_ptr.to_owned()))
+ .find(|(_, repo)| repo.in_dot_git(path))
}
#[cfg(test)]
@@ -1685,7 +1839,7 @@ impl LocalSnapshot {
}
Ordering::Equal => {
if self_repo != other_repo {
- updated_repositories.push((*self_repo).into());
+ updated_repositories.push(self_repo.build_update(other_repo));
}
self_repos.next();
@@ -1728,10 +1882,8 @@ impl LocalSnapshot {
let abs_path = self.abs_path.join(&entry.path);
match smol::block_on(build_gitignore(&abs_path, fs)) {
Ok(ignore) => {
- self.ignores_by_parent_abs_path.insert(
- abs_path.parent().unwrap().into(),
- (Arc::new(ignore), self.scan_id),
- );
+ self.ignores_by_parent_abs_path
+ .insert(abs_path.parent().unwrap().into(), (Arc::new(ignore), true));
}
Err(error) => {
log::error!(
@@ -1801,10 +1953,8 @@ impl LocalSnapshot {
}
if let Some(ignore) = ignore {
- self.ignores_by_parent_abs_path.insert(
- self.abs_path.join(&parent_path).into(),
- (ignore, self.scan_id),
- );
+ self.ignores_by_parent_abs_path
+ .insert(self.abs_path.join(&parent_path).into(), (ignore, false));
}
if parent_path.file_name() == Some(&DOT_GIT) {
@@ -1852,11 +2002,13 @@ impl LocalSnapshot {
let scan_id = self.scan_id;
let repo_lock = repo.lock();
+
self.repository_entries.insert(
work_directory,
RepositoryEntry {
work_directory: work_dir_id.into(),
branch: repo_lock.branch_name().map(Into::into),
+ statuses: repo_lock.statuses().unwrap_or_default(),
},
);
drop(repo_lock);
@@ -1865,6 +2017,7 @@ impl LocalSnapshot {
work_dir_id,
LocalRepositoryEntry {
scan_id,
+ full_scan_id: scan_id,
repo_ptr: repo,
git_dir_path: parent_path.clone(),
},
@@ -1905,11 +2058,11 @@ impl LocalSnapshot {
if path.file_name() == Some(&GITIGNORE) {
let abs_parent_path = self.abs_path.join(path.parent().unwrap());
- if let Some((_, scan_id)) = self
+ if let Some((_, needs_update)) = self
.ignores_by_parent_abs_path
.get_mut(abs_parent_path.as_path())
{
- *scan_id = self.snapshot.scan_id;
+ *needs_update = true;
}
}
}
@@ -2399,10 +2552,15 @@ struct BackgroundScanner {
status_updates_tx: UnboundedSender<ScanState>,
executor: Arc<executor::Background>,
refresh_requests_rx: channel::Receiver<(Vec<PathBuf>, barrier::Sender)>,
- prev_state: Mutex<(Snapshot, Vec<Arc<Path>>)>,
+ prev_state: Mutex<BackgroundScannerState>,
finished_initial_scan: bool,
}
+struct BackgroundScannerState {
+ snapshot: Snapshot,
+ event_paths: Vec<Arc<Path>>,
+}
+
impl BackgroundScanner {
fn new(
snapshot: LocalSnapshot,
@@ -2416,7 +2574,10 @@ impl BackgroundScanner {
status_updates_tx,
executor,
refresh_requests_rx,
- prev_state: Mutex::new((snapshot.snapshot.clone(), Vec::new())),
+ prev_state: Mutex::new(BackgroundScannerState {
+ snapshot: snapshot.snapshot.clone(),
+ event_paths: Default::default(),
+ }),
snapshot: Mutex::new(snapshot),
finished_initial_scan: false,
}
@@ -2444,7 +2605,7 @@ impl BackgroundScanner {
self.snapshot
.lock()
.ignores_by_parent_abs_path
- .insert(ancestor.into(), (ignore.into(), 0));
+ .insert(ancestor.into(), (ignore.into(), false));
}
}
{
@@ -2497,7 +2658,7 @@ impl BackgroundScanner {
// these before handling changes reported by the filesystem.
request = self.refresh_requests_rx.recv().fuse() => {
let Ok((paths, barrier)) = request else { break };
- if !self.process_refresh_request(paths, barrier).await {
+ if !self.process_refresh_request(paths.clone(), barrier).await {
return;
}
}
@@ -2508,25 +2669,37 @@ impl BackgroundScanner {
while let Poll::Ready(Some(more_events)) = futures::poll!(events_rx.next()) {
paths.extend(more_events.into_iter().map(|e| e.path));
}
- self.process_events(paths).await;
+ self.process_events(paths.clone()).await;
}
}
}
}
async fn process_refresh_request(&self, paths: Vec<PathBuf>, barrier: barrier::Sender) -> bool {
- self.reload_entries_for_paths(paths, None).await;
+ if let Some(mut paths) = self.reload_entries_for_paths(paths, None).await {
+ paths.sort_unstable();
+ util::extend_sorted(
+ &mut self.prev_state.lock().event_paths,
+ paths,
+ usize::MAX,
+ Ord::cmp,
+ );
+ }
self.send_status_update(false, Some(barrier))
}
async fn process_events(&mut self, paths: Vec<PathBuf>) {
let (scan_job_tx, scan_job_rx) = channel::unbounded();
- if let Some(mut paths) = self
+ let paths = self
.reload_entries_for_paths(paths, Some(scan_job_tx.clone()))
- .await
- {
- paths.sort_unstable();
- util::extend_sorted(&mut self.prev_state.lock().1, paths, usize::MAX, Ord::cmp);
+ .await;
+ if let Some(paths) = &paths {
+ util::extend_sorted(
+ &mut self.prev_state.lock().event_paths,
+ paths.iter().cloned(),
+ usize::MAX,
+ Ord::cmp,
+ );
}
drop(scan_job_tx);
self.scan_dirs(false, scan_job_rx).await;
@@ -2535,6 +2708,12 @@ impl BackgroundScanner {
let mut snapshot = self.snapshot.lock();
+ if let Some(paths) = paths {
+ for path in paths {
+ self.reload_repo_for_file_path(&path, &mut *snapshot, self.fs.as_ref());
+ }
+ }
+
let mut git_repositories = mem::take(&mut snapshot.git_repositories);
git_repositories.retain(|work_directory_id, _| {
snapshot
@@ -2560,6 +2739,7 @@ impl BackgroundScanner {
drop(snapshot);
self.send_status_update(false, None);
+ self.prev_state.lock().event_paths.clear();
}
async fn scan_dirs(
@@ -2637,14 +2817,18 @@ impl BackgroundScanner {
fn send_status_update(&self, scanning: bool, barrier: Option<barrier::Sender>) -> bool {
let mut prev_state = self.prev_state.lock();
- let snapshot = self.snapshot.lock().clone();
- let mut old_snapshot = snapshot.snapshot.clone();
- mem::swap(&mut old_snapshot, &mut prev_state.0);
- let changed_paths = mem::take(&mut prev_state.1);
- let changes = self.build_change_set(&old_snapshot, &snapshot.snapshot, changed_paths);
+ let new_snapshot = self.snapshot.lock().clone();
+ let old_snapshot = mem::replace(&mut prev_state.snapshot, new_snapshot.snapshot.clone());
+
+ let changes = self.build_change_set(
+ &old_snapshot,
+ &new_snapshot.snapshot,
+ &prev_state.event_paths,
+ );
+
self.status_updates_tx
.unbounded_send(ScanState::Updated {
- snapshot,
+ snapshot: new_snapshot,
changes,
scanning,
barrier,
@@ -2840,27 +3024,6 @@ impl BackgroundScanner {
fs_entry.is_ignored = ignore_stack.is_all();
snapshot.insert_entry(fs_entry, self.fs.as_ref());
- let scan_id = snapshot.scan_id;
-
- let repo_with_path_in_dotgit = snapshot.repo_for_metadata(&path);
- if let Some((entry_id, repo)) = repo_with_path_in_dotgit {
- let work_dir = snapshot
- .entry_for_id(entry_id)
- .map(|entry| RepositoryWorkDirectory(entry.path.clone()))?;
-
- let repo = repo.lock();
- repo.reload_index();
- let branch = repo.branch_name();
-
- snapshot.git_repositories.update(&entry_id, |entry| {
- entry.scan_id = scan_id;
- });
-
- snapshot
- .repository_entries
- .update(&work_dir, |entry| entry.branch = branch.map(Into::into));
- }
-
if let Some(scan_queue_tx) = &scan_queue_tx {
let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
@@ -2876,7 +3039,9 @@ impl BackgroundScanner {
}
}
}
- Ok(None) => {}
+ Ok(None) => {
+ self.remove_repo_path(&path, &mut snapshot);
+ }
Err(err) => {
// TODO - create a special 'error' entry in the entries tree to mark this
log::error!("error reading file on event {:?}", err);
@@ -2887,22 +3052,143 @@ impl BackgroundScanner {
Some(event_paths)
}
+ fn remove_repo_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> {
+ if !path
+ .components()
+ .any(|component| component.as_os_str() == *DOT_GIT)
+ {
+ let scan_id = snapshot.scan_id;
+ let repo = snapshot.repo_for(&path)?;
+
+ let repo_path = repo.work_directory.relativize(&snapshot, &path)?;
+
+ let work_dir = repo.work_directory(snapshot)?;
+ let work_dir_id = repo.work_directory;
+
+ snapshot
+ .git_repositories
+ .update(&work_dir_id, |entry| entry.scan_id = scan_id);
+
+ snapshot.repository_entries.update(&work_dir, |entry| {
+ entry
+ .statuses
+ .remove_range(&repo_path, &RepoPathDescendants(&repo_path))
+ });
+ }
+
+ Some(())
+ }
+
+ fn reload_repo_for_file_path(
+ &self,
+ path: &Path,
+ snapshot: &mut LocalSnapshot,
+ fs: &dyn Fs,
+ ) -> Option<()> {
+ let scan_id = snapshot.scan_id;
+
+ if path
+ .components()
+ .any(|component| component.as_os_str() == *DOT_GIT)
+ {
+ let (entry_id, repo_ptr) = {
+ let Some((entry_id, repo)) = snapshot.repo_for_metadata(&path) else {
+ let dot_git_dir = path.ancestors()
+ .skip_while(|ancestor| ancestor.file_name() != Some(&*DOT_GIT))
+ .next()?;
+
+ snapshot.build_repo(dot_git_dir.into(), fs);
+ return None;
+ };
+ if repo.full_scan_id == scan_id {
+ return None;
+ }
+ (*entry_id, repo.repo_ptr.to_owned())
+ };
+
+ let work_dir = snapshot
+ .entry_for_id(entry_id)
+ .map(|entry| RepositoryWorkDirectory(entry.path.clone()))?;
+
+ let repo = repo_ptr.lock();
+ repo.reload_index();
+ let branch = repo.branch_name();
+ let statuses = repo.statuses().unwrap_or_default();
+
+ snapshot.git_repositories.update(&entry_id, |entry| {
+ entry.scan_id = scan_id;
+ entry.full_scan_id = scan_id;
+ });
+
+ snapshot.repository_entries.update(&work_dir, |entry| {
+ entry.branch = branch.map(Into::into);
+ entry.statuses = statuses;
+ });
+ } else {
+ if snapshot
+ .entry_for_path(&path)
+ .map(|entry| entry.is_ignored)
+ .unwrap_or(false)
+ {
+ self.remove_repo_path(&path, snapshot);
+ return None;
+ }
+
+ let repo = snapshot.repo_for(&path)?;
+
+ let work_dir = repo.work_directory(snapshot)?;
+ let work_dir_id = repo.work_directory.clone();
+
+ snapshot
+ .git_repositories
+ .update(&work_dir_id, |entry| entry.scan_id = scan_id);
+
+ let local_repo = snapshot.get_local_repo(&repo)?.to_owned();
+
+ // Short circuit if we've already scanned everything
+ if local_repo.full_scan_id == scan_id {
+ return None;
+ }
+
+ let mut repository = snapshot.repository_entries.remove(&work_dir)?;
+
+ for entry in snapshot.descendent_entries(false, false, path) {
+ let Some(repo_path) = repo.work_directory.relativize(snapshot, &entry.path) else {
+ continue;
+ };
+
+ let status = local_repo.repo_ptr.lock().status(&repo_path);
+ if let Some(status) = status {
+ repository.statuses.insert(repo_path.clone(), status);
+ } else {
+ repository.statuses.remove(&repo_path);
+ }
+ }
+
+ snapshot.repository_entries.insert(work_dir, repository)
+ }
+
+ Some(())
+ }
+
async fn update_ignore_statuses(&self) {
use futures::FutureExt as _;
let mut snapshot = self.snapshot.lock().clone();
let mut ignores_to_update = Vec::new();
let mut ignores_to_delete = Vec::new();
- for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
- if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
- if *scan_id > snapshot.completed_scan_id
- && snapshot.entry_for_path(parent_path).is_some()
- {
- ignores_to_update.push(parent_abs_path.clone());
+ let abs_path = snapshot.abs_path.clone();
+ for (parent_abs_path, (_, needs_update)) in &mut snapshot.ignores_by_parent_abs_path {
+ if let Ok(parent_path) = parent_abs_path.strip_prefix(&abs_path) {
+ if *needs_update {
+ *needs_update = false;
+ if snapshot.snapshot.entry_for_path(parent_path).is_some() {
+ ignores_to_update.push(parent_abs_path.clone());
+ }
}
let ignore_path = parent_path.join(&*GITIGNORE);
- if snapshot.entry_for_path(ignore_path).is_none() {
+ if snapshot.snapshot.entry_for_path(ignore_path).is_none() {
ignores_to_delete.push(parent_abs_path.clone());
}
}
@@ -3012,8 +3298,8 @@ impl BackgroundScanner {
&self,
old_snapshot: &Snapshot,
new_snapshot: &Snapshot,
- event_paths: Vec<Arc<Path>>,
- ) -> HashMap<Arc<Path>, PathChange> {
+ event_paths: &[Arc<Path>],
+ ) -> HashMap<(Arc<Path>, ProjectEntryId), PathChange> {
use PathChange::{Added, AddedOrUpdated, Removed, Updated};
let mut changes = HashMap::default();
@@ -3022,7 +3308,7 @@ impl BackgroundScanner {
let received_before_initialized = !self.finished_initial_scan;
for path in event_paths {
- let path = PathKey(path);
+ let path = PathKey(path.clone());
old_paths.seek(&path, Bias::Left, &());
new_paths.seek(&path, Bias::Left, &());
@@ -3039,7 +3325,7 @@ impl BackgroundScanner {
match Ord::cmp(&old_entry.path, &new_entry.path) {
Ordering::Less => {
- changes.insert(old_entry.path.clone(), Removed);
+ changes.insert((old_entry.path.clone(), old_entry.id), Removed);
old_paths.next(&());
}
Ordering::Equal => {
@@ -3047,31 +3333,35 @@ impl BackgroundScanner {
// If the worktree was not fully initialized when this event was generated,
// we can't know whether this entry was added during the scan or whether
// it was merely updated.
- changes.insert(new_entry.path.clone(), AddedOrUpdated);
+ changes.insert(
+ (new_entry.path.clone(), new_entry.id),
+ AddedOrUpdated,
+ );
} else if old_entry.mtime != new_entry.mtime {
- changes.insert(new_entry.path.clone(), Updated);
+ changes.insert((new_entry.path.clone(), new_entry.id), Updated);
}
old_paths.next(&());
new_paths.next(&());
}
Ordering::Greater => {
- changes.insert(new_entry.path.clone(), Added);
+ changes.insert((new_entry.path.clone(), new_entry.id), Added);
new_paths.next(&());
}
}
}
(Some(old_entry), None) => {
- changes.insert(old_entry.path.clone(), Removed);
+ changes.insert((old_entry.path.clone(), old_entry.id), Removed);
old_paths.next(&());
}
(None, Some(new_entry)) => {
- changes.insert(new_entry.path.clone(), Added);
+ changes.insert((new_entry.path.clone(), new_entry.id), Added);
new_paths.next(&());
}
(None, None) => break,
}
}
}
+
changes
}
@@ -3212,17 +3502,13 @@ pub struct Traversal<'a> {
impl<'a> Traversal<'a> {
pub fn advance(&mut self) -> bool {
- self.advance_to_offset(self.offset() + 1)
- }
-
- pub fn advance_to_offset(&mut self, offset: usize) -> bool {
self.cursor.seek_forward(
&TraversalTarget::Count {
- count: offset,
+ count: self.end_offset() + 1,
include_dirs: self.include_dirs,
include_ignored: self.include_ignored,
},
- Bias::Right,
+ Bias::Left,
&(),
)
}
@@ -3249,11 +3535,17 @@ impl<'a> Traversal<'a> {
self.cursor.item()
}
- pub fn offset(&self) -> usize {
+ pub fn start_offset(&self) -> usize {
self.cursor
.start()
.count(self.include_dirs, self.include_ignored)
}
+
+ pub fn end_offset(&self) -> usize {
+ self.cursor
+ .end(&())
+ .count(self.include_dirs, self.include_ignored)
+ }
}
impl<'a> Iterator for Traversal<'a> {
@@ -3322,6 +3614,25 @@ impl<'a> Iterator for ChildEntriesIter<'a> {
}
}
+struct DescendentEntriesIter<'a> {
+ parent_path: &'a Path,
+ traversal: Traversal<'a>,
+}
+
+impl<'a> Iterator for DescendentEntriesIter<'a> {
+ type Item = &'a Entry;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(item) = self.traversal.entry() {
+ if item.path.starts_with(&self.parent_path) {
+ self.traversal.advance();
+ return Some(item);
+ }
+ }
+ None
+ }
+}
+
impl<'a> From<&'a Entry> for proto::Entry {
fn from(entry: &'a Entry) -> Self {
Self {
@@ -3436,6 +3747,105 @@ mod tests {
})
}
+ #[gpui::test]
+ async fn test_descendent_entries(cx: &mut TestAppContext) {
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "a": "",
+ "b": {
+ "c": {
+ "d": ""
+ },
+ "e": {}
+ },
+ "f": "",
+ "g": {
+ "h": {}
+ },
+ "i": {
+ "j": {
+ "k": ""
+ },
+ "l": {
+
+ }
+ },
+ ".gitignore": "i/j\n",
+ }),
+ )
+ .await;
+
+ let http_client = FakeHttpClient::with_404_response();
+ let client = cx.read(|cx| Client::new(http_client, cx));
+
+ let tree = Worktree::local(
+ client,
+ Path::new("/root"),
+ true,
+ fs,
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.descendent_entries(false, false, Path::new("b"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("b/c/d"),]
+ );
+ assert_eq!(
+ tree.descendent_entries(true, false, Path::new("b"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ Path::new("b"),
+ Path::new("b/c"),
+ Path::new("b/c/d"),
+ Path::new("b/e"),
+ ]
+ );
+
+ assert_eq!(
+ tree.descendent_entries(false, false, Path::new("g"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ Vec::<PathBuf>::new()
+ );
+ assert_eq!(
+ tree.descendent_entries(true, false, Path::new("g"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("g"), Path::new("g/h"),]
+ );
+
+ assert_eq!(
+ tree.descendent_entries(false, false, Path::new("i"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ Vec::<PathBuf>::new()
+ );
+ assert_eq!(
+ tree.descendent_entries(false, true, Path::new("i"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("i/j/k")]
+ );
+ assert_eq!(
+ tree.descendent_entries(true, false, Path::new("i"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("i"), Path::new("i/l"),]
+ );
+ })
+ }
+
#[gpui::test(iterations = 10)]
async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.background());
@@ -6,7 +6,7 @@ use gpui::{
actions,
anyhow::{anyhow, Result},
elements::{
- AnchorCorner, ChildView, ContainerStyle, Empty, Flex, Label, MouseEventHandler,
+ AnchorCorner, ChildView, ComponentHost, ContainerStyle, Empty, Flex, MouseEventHandler,
ParentElement, ScrollTarget, Stack, Svg, UniformList, UniformListState,
},
geometry::vector::Vector2F,
@@ -16,7 +16,10 @@ use gpui::{
ViewHandle, WeakViewHandle,
};
use menu::{Confirm, SelectNext, SelectPrev};
-use project::{Entry, EntryKind, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId};
+use project::{
+ repository::GitFileStatus, Entry, EntryKind, Project, ProjectEntryId, ProjectPath, Worktree,
+ WorktreeId,
+};
use settings::Settings;
use std::{
cmp::Ordering,
@@ -26,7 +29,7 @@ use std::{
path::Path,
sync::Arc,
};
-use theme::ProjectPanelEntry;
+use theme::{ui::FileName, ProjectPanelEntry};
use unicase::UniCase;
use workspace::Workspace;
@@ -86,6 +89,7 @@ pub struct EntryDetails {
is_editing: bool,
is_processing: bool,
is_cut: bool,
+ git_status: Option<GitFileStatus>,
}
actions!(
@@ -1008,6 +1012,15 @@ impl ProjectPanel {
let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
for entry in &visible_worktree_entries[entry_range] {
+ let path = &entry.path;
+ let status = (entry.path.parent().is_some() && !entry.is_ignored)
+ .then(|| {
+ snapshot
+ .repo_for(path)
+ .and_then(|entry| entry.status_for_path(&snapshot, path))
+ })
+ .flatten();
+
let mut details = EntryDetails {
filename: entry
.path
@@ -1028,6 +1041,7 @@ impl ProjectPanel {
is_cut: self
.clipboard_entry
.map_or(false, |e| e.is_cut() && e.entry_id() == entry.id),
+ git_status: status,
};
if let Some(edit_state) = &self.edit_state {
@@ -1096,12 +1110,16 @@ impl ProjectPanel {
.flex(1.0, true)
.into_any()
} else {
- Label::new(details.filename.clone(), style.text.clone())
- .contained()
- .with_margin_left(style.icon_spacing)
- .aligned()
- .left()
- .into_any()
+ ComponentHost::new(FileName::new(
+ details.filename.clone(),
+ details.git_status,
+ FileName::style(style.text.clone(), &cx.global::<Settings>().theme),
+ ))
+ .contained()
+ .with_margin_left(style.icon_spacing)
+ .aligned()
+ .left()
+ .into_any()
})
.constrained()
.with_height(style.height)
@@ -986,8 +986,22 @@ message Entry {
message RepositoryEntry {
uint64 work_directory_id = 1;
optional string branch = 2;
+ repeated string removed_repo_paths = 3;
+ repeated StatusEntry updated_statuses = 4;
}
+message StatusEntry {
+ string repo_path = 1;
+ GitStatus status = 2;
+}
+
+enum GitStatus {
+ Added = 0;
+ Modified = 1;
+ Conflict = 2;
+}
+
+
message BufferState {
uint64 id = 1;
optional File file = 2;
@@ -1,6 +1,7 @@
use super::{entity_messages, messages, request_messages, ConnectionId, TypedEnvelope};
use anyhow::{anyhow, Result};
use async_tungstenite::tungstenite::Message as WebSocketMessage;
+use collections::HashMap;
use futures::{SinkExt as _, StreamExt as _};
use prost::Message as _;
use serde::Serialize;
@@ -484,14 +485,21 @@ pub fn split_worktree_update(
mut message: UpdateWorktree,
max_chunk_size: usize,
) -> impl Iterator<Item = UpdateWorktree> {
- let mut done = false;
+ let mut done_files = false;
+
+ let mut repository_map = message
+ .updated_repositories
+ .into_iter()
+ .map(|repo| (repo.work_directory_id, repo))
+ .collect::<HashMap<_, _>>();
+
iter::from_fn(move || {
- if done {
+ if done_files {
return None;
}
let updated_entries_chunk_size = cmp::min(message.updated_entries.len(), max_chunk_size);
- let updated_entries = message
+ let updated_entries: Vec<_> = message
.updated_entries
.drain(..updated_entries_chunk_size)
.collect();
@@ -502,22 +510,28 @@ pub fn split_worktree_update(
.drain(..removed_entries_chunk_size)
.collect();
- done = message.updated_entries.is_empty() && message.removed_entries.is_empty();
+ done_files = message.updated_entries.is_empty() && message.removed_entries.is_empty();
- // Wait to send repositories until after we've guaranteed that their associated entries
- // will be read
- let updated_repositories = if done {
- mem::take(&mut message.updated_repositories)
- } else {
- Default::default()
- };
+ let mut updated_repositories = Vec::new();
- let removed_repositories = if done {
+ if !repository_map.is_empty() {
+ for entry in &updated_entries {
+ if let Some(repo) = repository_map.remove(&entry.id) {
+ updated_repositories.push(repo)
+ }
+ }
+ }
+
+ let removed_repositories = if done_files {
mem::take(&mut message.removed_repositories)
} else {
Default::default()
};
+ if done_files {
+ updated_repositories.extend(mem::take(&mut repository_map).into_values());
+ }
+
Some(UpdateWorktree {
project_id: message.project_id,
worktree_id: message.worktree_id,
@@ -526,7 +540,7 @@ pub fn split_worktree_update(
updated_entries,
removed_entries,
scan_id: message.scan_id,
- is_last_update: done && message.is_last_update,
+ is_last_update: done_files && message.is_last_update,
updated_repositories,
removed_repositories,
})
@@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*;
mod macros;
-pub const PROTOCOL_VERSION: u32 = 54;
+pub const PROTOCOL_VERSION: u32 = 55;
@@ -30,6 +30,7 @@ smol.workspace = true
glob.workspace = true
[dev-dependencies]
+client = { path = "../client", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
serde_json.workspace = true
@@ -1287,6 +1287,7 @@ pub mod tests {
cx.set_global(settings);
language::init(cx);
+ client::init_settings(cx);
editor::init_settings(cx);
workspace::init_settings(cx);
});
@@ -5,7 +5,7 @@ use arrayvec::ArrayVec;
pub use cursor::{Cursor, FilterCursor, Iter};
use std::marker::PhantomData;
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
-pub use tree_map::{TreeMap, TreeSet};
+pub use tree_map::{MapSeekTarget, TreeMap, TreeSet};
#[cfg(test)]
const TREE_BASE: usize = 2;
@@ -1,14 +1,14 @@
use std::{cmp::Ordering, fmt::Debug};
-use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary};
+use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary};
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
where
K: Clone + Debug + Default + Ord,
V: Clone + Debug;
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MapEntry<K, V> {
key: K,
value: V,
@@ -73,6 +73,17 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
removed
}
+ pub fn remove_range(&mut self, start: &impl MapSeekTarget<K>, end: &impl MapSeekTarget<K>) {
+ let start = MapSeekTargetAdaptor(start);
+ let end = MapSeekTargetAdaptor(end);
+ let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
+ let mut new_tree = cursor.slice(&start, Bias::Left, &());
+ cursor.seek(&end, Bias::Left, &());
+ new_tree.push_tree(cursor.suffix(&()), &());
+ drop(cursor);
+ self.0 = new_tree;
+ }
+
/// Returns the key-value pair with the greatest key less than or equal to the given key.
pub fn closest(&self, key: &K) -> Option<(&K, &V)> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
@@ -82,6 +93,16 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
cursor.item().map(|item| (&item.key, &item.value))
}
+ pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator<Item = (&K, &V)> + '_ {
+ let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
+ let from_key = MapKeyRef(Some(from));
+ cursor.seek(&from_key, Bias::Left, &());
+
+ cursor
+ .into_iter()
+ .map(|map_entry| (&map_entry.key, &map_entry.value))
+ }
+
pub fn update<F, T>(&mut self, key: &K, f: F) -> Option<T>
where
F: FnOnce(&mut V) -> T,
@@ -125,6 +146,45 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
pub fn values(&self) -> impl Iterator<Item = &V> + '_ {
self.0.iter().map(|entry| &entry.value)
}
+
+ pub fn insert_tree(&mut self, other: TreeMap<K, V>) {
+ let edits = other
+ .iter()
+ .map(|(key, value)| {
+ Edit::Insert(MapEntry {
+ key: key.to_owned(),
+ value: value.to_owned(),
+ })
+ })
+ .collect();
+
+ self.0.edit(edits, &());
+ }
+}
+
+#[derive(Debug)]
+struct MapSeekTargetAdaptor<'a, T>(&'a T);
+
+impl<'a, K: Debug + Clone + Default + Ord, T: MapSeekTarget<K>>
+ SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>> for MapSeekTargetAdaptor<'_, T>
+{
+ fn cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
+ if let Some(key) = &cursor_location.0 {
+ MapSeekTarget::cmp_cursor(self.0, key)
+ } else {
+ Ordering::Greater
+ }
+ }
+}
+
+pub trait MapSeekTarget<K>: Debug {
+ fn cmp_cursor(&self, cursor_location: &K) -> Ordering;
+}
+
+impl<K: Debug + Ord> MapSeekTarget<K> for K {
+ fn cmp_cursor(&self, cursor_location: &K) -> Ordering {
+ self.cmp(cursor_location)
+ }
}
impl<K, V> Default for TreeMap<K, V>
@@ -186,7 +246,7 @@ where
K: Clone + Debug + Default + Ord,
{
fn cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
- self.0.cmp(&cursor_location.0)
+ Ord::cmp(&self.0, &cursor_location.0)
}
}
@@ -272,4 +332,112 @@ mod tests {
map.retain(|key, _| *key % 2 == 0);
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&4, &"d"), (&6, &"f")]);
}
+
+ #[test]
+ fn test_iter_from() {
+ let mut map = TreeMap::default();
+
+ map.insert("a", 1);
+ map.insert("b", 2);
+ map.insert("baa", 3);
+ map.insert("baaab", 4);
+ map.insert("c", 5);
+
+ let result = map
+ .iter_from(&"ba")
+ .take_while(|(key, _)| key.starts_with(&"ba"))
+ .collect::<Vec<_>>();
+
+ assert_eq!(result.len(), 2);
+ assert!(result.iter().find(|(k, _)| k == &&"baa").is_some());
+ assert!(result.iter().find(|(k, _)| k == &&"baaab").is_some());
+
+ let result = map
+ .iter_from(&"c")
+ .take_while(|(key, _)| key.starts_with(&"c"))
+ .collect::<Vec<_>>();
+
+ assert_eq!(result.len(), 1);
+ assert!(result.iter().find(|(k, _)| k == &&"c").is_some());
+ }
+
+ #[test]
+ fn test_insert_tree() {
+ let mut map = TreeMap::default();
+ map.insert("a", 1);
+ map.insert("b", 2);
+ map.insert("c", 3);
+
+ let mut other = TreeMap::default();
+ other.insert("a", 2);
+ other.insert("b", 2);
+ other.insert("d", 4);
+
+ map.insert_tree(other);
+
+ assert_eq!(map.iter().count(), 4);
+ assert_eq!(map.get(&"a"), Some(&2));
+ assert_eq!(map.get(&"b"), Some(&2));
+ assert_eq!(map.get(&"c"), Some(&3));
+ assert_eq!(map.get(&"d"), Some(&4));
+ }
+
+ #[test]
+ fn test_remove_between_and_path_successor() {
+ use std::path::{Path, PathBuf};
+
+ #[derive(Debug)]
+ pub struct PathDescendants<'a>(&'a Path);
+
+ impl MapSeekTarget<PathBuf> for PathDescendants<'_> {
+ fn cmp_cursor(&self, key: &PathBuf) -> Ordering {
+ if key.starts_with(&self.0) {
+ Ordering::Greater
+ } else {
+ self.0.cmp(key)
+ }
+ }
+ }
+
+ let mut map = TreeMap::default();
+
+ map.insert(PathBuf::from("a"), 1);
+ map.insert(PathBuf::from("a/a"), 1);
+ map.insert(PathBuf::from("b"), 2);
+ map.insert(PathBuf::from("b/a/a"), 3);
+ map.insert(PathBuf::from("b/a/a/a/b"), 4);
+ map.insert(PathBuf::from("c"), 5);
+ map.insert(PathBuf::from("c/a"), 6);
+
+ map.remove_range(
+ &PathBuf::from("b/a"),
+ &PathDescendants(&PathBuf::from("b/a")),
+ );
+
+ assert_eq!(map.get(&PathBuf::from("a")), Some(&1));
+ assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1));
+ assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
+ assert_eq!(map.get(&PathBuf::from("b/a/a")), None);
+ assert_eq!(map.get(&PathBuf::from("b/a/a/a/b")), None);
+ assert_eq!(map.get(&PathBuf::from("c")), Some(&5));
+ assert_eq!(map.get(&PathBuf::from("c/a")), Some(&6));
+
+ map.remove_range(&PathBuf::from("c"), &PathDescendants(&PathBuf::from("c")));
+
+ assert_eq!(map.get(&PathBuf::from("a")), Some(&1));
+ assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1));
+ assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
+ assert_eq!(map.get(&PathBuf::from("c")), None);
+ assert_eq!(map.get(&PathBuf::from("c/a")), None);
+
+ map.remove_range(&PathBuf::from("a"), &PathDescendants(&PathBuf::from("a")));
+
+ assert_eq!(map.get(&PathBuf::from("a")), None);
+ assert_eq!(map.get(&PathBuf::from("a/a")), None);
+ assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
+
+ map.remove_range(&PathBuf::from("b"), &PathDescendants(&PathBuf::from("b")));
+
+ assert_eq!(map.get(&PathBuf::from("b")), None);
+ }
}
@@ -13,6 +13,7 @@ doctest = false
[dependencies]
gpui = { path = "../gpui" }
+fs = { path = "../fs" }
anyhow.workspace = true
indexmap = "1.6.2"
parking_lot.workspace = true
@@ -1,9 +1,10 @@
use std::borrow::Cow;
+use fs::repository::GitFileStatus;
use gpui::{
color::Color,
elements::{
- ConstrainedBox, Container, ContainerStyle, Empty, Flex, KeystrokeLabel, Label,
+ ConstrainedBox, Container, ContainerStyle, Empty, Flex, KeystrokeLabel, Label, LabelStyle,
MouseEventHandler, ParentElement, Stack, Svg,
},
fonts::TextStyle,
@@ -11,11 +12,11 @@ use gpui::{
platform,
platform::MouseButton,
scene::MouseClick,
- Action, Element, EventContext, MouseState, View, ViewContext,
+ Action, AnyElement, Element, EventContext, MouseState, View, ViewContext,
};
use serde::Deserialize;
-use crate::{ContainedText, Interactive};
+use crate::{ContainedText, Interactive, Theme};
#[derive(Clone, Deserialize, Default)]
pub struct CheckboxStyle {
@@ -252,3 +253,53 @@ where
.constrained()
.with_height(style.dimensions().y())
}
+
+pub struct FileName {
+ filename: String,
+ git_status: Option<GitFileStatus>,
+ style: FileNameStyle,
+}
+
+pub struct FileNameStyle {
+ template_style: LabelStyle,
+ git_inserted: Color,
+ git_modified: Color,
+ git_deleted: Color,
+}
+
+impl FileName {
+ pub fn new(filename: String, git_status: Option<GitFileStatus>, style: FileNameStyle) -> Self {
+ FileName {
+ filename,
+ git_status,
+ style,
+ }
+ }
+
+ pub fn style<I: Into<LabelStyle>>(style: I, theme: &Theme) -> FileNameStyle {
+ FileNameStyle {
+ template_style: style.into(),
+ git_inserted: theme.editor.diff.inserted,
+ git_modified: theme.editor.diff.modified,
+ git_deleted: theme.editor.diff.deleted,
+ }
+ }
+}
+
+impl<V: View> gpui::elements::Component<V> for FileName {
+ fn render(&self, _: &mut V, _: &mut ViewContext<V>) -> AnyElement<V> {
+ // Prepare colors for git statuses
+ let mut filename_text_style = self.style.template_style.text.clone();
+ filename_text_style.color = self
+ .git_status
+ .as_ref()
+ .map(|status| match status {
+ GitFileStatus::Added => self.style.git_inserted,
+ GitFileStatus::Modified => self.style.git_modified,
+ GitFileStatus::Conflict => self.style.git_deleted,
+ })
+ .unwrap_or(self.style.template_style.text.color);
+
+ Label::new(self.filename.clone(), filename_text_style).into_any()
+ }
+}
@@ -26,6 +26,7 @@ serde.workspace = true
serde_json.workspace = true
git2 = { version = "0.15", default-features = false, optional = true }
dirs = "3.0"
+take-until = "0.2.0"
[dev-dependencies]
tempdir.workspace = true
@@ -1,5 +1,7 @@
use std::path::{Path, PathBuf};
+use serde::{Deserialize, Serialize};
+
lazy_static::lazy_static! {
pub static ref HOME: PathBuf = dirs::home_dir().expect("failed to determine home directory");
pub static ref CONFIG_DIR: PathBuf = HOME.join(".config").join("zed");
@@ -70,3 +72,208 @@ pub fn compact(path: &Path) -> PathBuf {
path.to_path_buf()
}
}
+
+/// A delimiter to use in `path_query:row_number:column_number` strings parsing.
+pub const FILE_ROW_COLUMN_DELIMITER: char = ':';
+
+/// A representation of a path-like string with optional row and column numbers.
+/// Matching values example: `te`, `test.rs:22`, `te:22:5`, etc.
+#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
+pub struct PathLikeWithPosition<P> {
+ pub path_like: P,
+ pub row: Option<u32>,
+ // Absent if row is absent.
+ pub column: Option<u32>,
+}
+
+impl<P> PathLikeWithPosition<P> {
+ /// Parses a string that possibly has `:row:column` suffix.
+ /// Ignores trailing `:`s, so `test.rs:22:` is parsed as `test.rs:22`.
+ /// If any of the row/column component parsing fails, the whole string is then parsed as a path like.
+ pub fn parse_str<E>(
+ s: &str,
+ parse_path_like_str: impl Fn(&str) -> Result<P, E>,
+ ) -> Result<Self, E> {
+ let fallback = |fallback_str| {
+ Ok(Self {
+ path_like: parse_path_like_str(fallback_str)?,
+ row: None,
+ column: None,
+ })
+ };
+
+ match s.trim().split_once(FILE_ROW_COLUMN_DELIMITER) {
+ Some((path_like_str, maybe_row_and_col_str)) => {
+ let path_like_str = path_like_str.trim();
+ let maybe_row_and_col_str = maybe_row_and_col_str.trim();
+ if path_like_str.is_empty() {
+ fallback(s)
+ } else if maybe_row_and_col_str.is_empty() {
+ fallback(path_like_str)
+ } else {
+ let (row_parse_result, maybe_col_str) =
+ match maybe_row_and_col_str.split_once(FILE_ROW_COLUMN_DELIMITER) {
+ Some((maybe_row_str, maybe_col_str)) => {
+ (maybe_row_str.parse::<u32>(), maybe_col_str.trim())
+ }
+ None => (maybe_row_and_col_str.parse::<u32>(), ""),
+ };
+
+ match row_parse_result {
+ Ok(row) => {
+ if maybe_col_str.is_empty() {
+ Ok(Self {
+ path_like: parse_path_like_str(path_like_str)?,
+ row: Some(row),
+ column: None,
+ })
+ } else {
+ match maybe_col_str.parse::<u32>() {
+ Ok(col) => Ok(Self {
+ path_like: parse_path_like_str(path_like_str)?,
+ row: Some(row),
+ column: Some(col),
+ }),
+ Err(_) => fallback(s),
+ }
+ }
+ }
+ Err(_) => fallback(s),
+ }
+ }
+ }
+ None => fallback(s),
+ }
+ }
+
+ pub fn map_path_like<P2, E>(
+ self,
+ mapping: impl FnOnce(P) -> Result<P2, E>,
+ ) -> Result<PathLikeWithPosition<P2>, E> {
+ Ok(PathLikeWithPosition {
+ path_like: mapping(self.path_like)?,
+ row: self.row,
+ column: self.column,
+ })
+ }
+
+ pub fn to_string(&self, path_like_to_string: impl Fn(&P) -> String) -> String {
+ let path_like_string = path_like_to_string(&self.path_like);
+ if let Some(row) = self.row {
+ if let Some(column) = self.column {
+ format!("{path_like_string}:{row}:{column}")
+ } else {
+ format!("{path_like_string}:{row}")
+ }
+ } else {
+ path_like_string
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ type TestPath = PathLikeWithPosition<String>;
+
+ fn parse_str(s: &str) -> TestPath {
+ TestPath::parse_str(s, |s| Ok::<_, std::convert::Infallible>(s.to_string()))
+ .expect("infallible")
+ }
+
+ #[test]
+ fn path_with_position_parsing_positive() {
+ let input_and_expected = [
+ (
+ "test_file.rs",
+ PathLikeWithPosition {
+ path_like: "test_file.rs".to_string(),
+ row: None,
+ column: None,
+ },
+ ),
+ (
+ "test_file.rs:1",
+ PathLikeWithPosition {
+ path_like: "test_file.rs".to_string(),
+ row: Some(1),
+ column: None,
+ },
+ ),
+ (
+ "test_file.rs:1:2",
+ PathLikeWithPosition {
+ path_like: "test_file.rs".to_string(),
+ row: Some(1),
+ column: Some(2),
+ },
+ ),
+ ];
+
+ for (input, expected) in input_and_expected {
+ let actual = parse_str(input);
+ assert_eq!(
+ actual, expected,
+ "For positive case input str '{input}', got a parse mismatch"
+ );
+ }
+ }
+
+ #[test]
+ fn path_with_position_parsing_negative() {
+ for input in [
+ "test_file.rs:a",
+ "test_file.rs:a:b",
+ "test_file.rs::",
+ "test_file.rs::1",
+ "test_file.rs:1::",
+ "test_file.rs::1:2",
+ "test_file.rs:1::2",
+ "test_file.rs:1:2:",
+ "test_file.rs:1:2:3",
+ ] {
+ let actual = parse_str(input);
+ assert_eq!(
+ actual,
+ PathLikeWithPosition {
+ path_like: input.to_string(),
+ row: None,
+ column: None,
+ },
+ "For negative case input str '{input}', got a parse mismatch"
+ );
+ }
+ }
+
+ // Trim off trailing `:`s for otherwise valid input.
+ #[test]
+ fn path_with_position_parsing_special() {
+ let input_and_expected = [
+ (
+ "test_file.rs:",
+ PathLikeWithPosition {
+ path_like: "test_file.rs".to_string(),
+ row: None,
+ column: None,
+ },
+ ),
+ (
+ "test_file.rs:1:",
+ PathLikeWithPosition {
+ path_like: "test_file.rs".to_string(),
+ row: Some(1),
+ column: None,
+ },
+ ),
+ ];
+
+ for (input, expected) in input_and_expected {
+ let actual = parse_str(input);
+ assert_eq!(
+ actual, expected,
+ "For special case input str '{input}', got a parse mismatch"
+ );
+ }
+ }
+}
@@ -17,6 +17,8 @@ pub use backtrace::Backtrace;
use futures::Future;
use rand::{seq::SliceRandom, Rng};
+pub use take_until::*;
+
#[macro_export]
macro_rules! debug_panic {
( $($fmt_arg:tt)* ) => {
@@ -464,7 +464,6 @@ mod tests {
let (_, _workspace) = cx.add_window(|cx| {
Workspace::new(
- Some(serialized_workspace),
0,
project.clone(),
Arc::new(AppState {
@@ -482,6 +481,11 @@ mod tests {
)
});
+ cx.update(|cx| {
+ Workspace::load_workspace(_workspace.downgrade(), serialized_workspace, Vec::new(), cx)
+ })
+ .await;
+
cx.foreground().run_until_parked();
//Should terminate
}
@@ -607,7 +611,6 @@ mod tests {
let project = Project::test(fs, [], cx).await;
let (window_id, workspace) = cx.add_window(|cx| {
Workspace::new(
- None,
0,
project.clone(),
Arc::new(AppState {
@@ -1,6 +1,6 @@
use crate::{
- dock::DockPosition, DockAnchor, ItemDeserializers, Member, Pane, PaneAxis, Workspace,
- WorkspaceId,
+ dock::DockPosition, item::ItemHandle, DockAnchor, ItemDeserializers, Member, Pane, PaneAxis,
+ Workspace, WorkspaceId,
};
use anyhow::{anyhow, Context, Result};
use async_recursion::async_recursion;
@@ -97,17 +97,23 @@ impl SerializedPaneGroup {
workspace_id: WorkspaceId,
workspace: &WeakViewHandle<Workspace>,
cx: &mut AsyncAppContext,
- ) -> Option<(Member, Option<ViewHandle<Pane>>)> {
+ ) -> Option<(
+ Member,
+ Option<ViewHandle<Pane>>,
+ Vec<Option<Box<dyn ItemHandle>>>,
+ )> {
match self {
SerializedPaneGroup::Group { axis, children } => {
let mut current_active_pane = None;
let mut members = Vec::new();
+ let mut items = Vec::new();
for child in children {
- if let Some((new_member, active_pane)) = child
+ if let Some((new_member, active_pane, new_items)) = child
.deserialize(project, workspace_id, workspace, cx)
.await
{
members.push(new_member);
+ items.extend(new_items);
current_active_pane = current_active_pane.or(active_pane);
}
}
@@ -117,7 +123,7 @@ impl SerializedPaneGroup {
}
if members.len() == 1 {
- return Some((members.remove(0), current_active_pane));
+ return Some((members.remove(0), current_active_pane, items));
}
Some((
@@ -126,6 +132,7 @@ impl SerializedPaneGroup {
members,
}),
current_active_pane,
+ items,
))
}
SerializedPaneGroup::Pane(serialized_pane) => {
@@ -133,7 +140,7 @@ impl SerializedPaneGroup {
.update(cx, |workspace, cx| workspace.add_pane(cx).downgrade())
.log_err()?;
let active = serialized_pane.active;
- serialized_pane
+ let new_items = serialized_pane
.deserialize_to(project, &pane, workspace_id, workspace, cx)
.await
.log_err()?;
@@ -143,7 +150,7 @@ impl SerializedPaneGroup {
.log_err()?
{
let pane = pane.upgrade(cx)?;
- Some((Member::Pane(pane.clone()), active.then(|| pane)))
+ Some((Member::Pane(pane.clone()), active.then(|| pane), new_items))
} else {
let pane = pane.upgrade(cx)?;
workspace
@@ -174,7 +181,8 @@ impl SerializedPane {
workspace_id: WorkspaceId,
workspace: &WeakViewHandle<Workspace>,
cx: &mut AsyncAppContext,
- ) -> Result<()> {
+ ) -> Result<Vec<Option<Box<dyn ItemHandle>>>> {
+ let mut items = Vec::new();
let mut active_item_index = None;
for (index, item) in self.children.iter().enumerate() {
let project = project.clone();
@@ -192,6 +200,8 @@ impl SerializedPane {
.await
.log_err();
+ items.push(item_handle.clone());
+
if let Some(item_handle) = item_handle {
workspace.update(cx, |workspace, cx| {
let pane_handle = pane_handle
@@ -213,7 +223,7 @@ impl SerializedPane {
})?;
}
- anyhow::Ok(())
+ anyhow::Ok(items)
}
}
@@ -83,7 +83,7 @@ use status_bar::StatusBar;
pub use status_bar::StatusItemView;
use theme::{Theme, ThemeRegistry};
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
-use util::{paths, ResultExt};
+use util::{async_iife, paths, ResultExt};
pub use workspace_settings::{AutosaveSetting, DockAnchor, GitGutterSetting, WorkspaceSettings};
lazy_static! {
@@ -241,7 +241,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
},
);
cx.add_action(Workspace::toggle_sidebar_item);
- cx.add_action(Workspace::focus_center);
cx.add_action(|workspace: &mut Workspace, _: &ActivatePreviousPane, cx| {
workspace.activate_previous_pane(cx)
});
@@ -509,7 +508,6 @@ struct FollowerState {
impl Workspace {
pub fn new(
- serialized_workspace: Option<SerializedWorkspace>,
workspace_id: WorkspaceId,
project: ModelHandle<Project>,
app_state: Arc<AppState>,
@@ -675,18 +673,6 @@ impl Workspace {
this.project_remote_id_changed(project.read(cx).remote_id(), cx);
cx.defer(|this, cx| this.update_window_title(cx));
- if let Some(serialized_workspace) = serialized_workspace {
- cx.defer(move |_, cx| {
- Self::load_from_serialized_workspace(weak_handle, serialized_workspace, cx)
- });
- } else if project.read(cx).is_local() {
- if settings::get_setting::<WorkspaceSettings>(None, cx).default_dock_anchor
- != DockAnchor::Expanded
- {
- Dock::show(&mut this, false, cx);
- }
- }
-
this
}
@@ -708,18 +694,15 @@ impl Workspace {
);
cx.spawn(|mut cx| async move {
- let mut serialized_workspace =
- persistence::DB.workspace_for_roots(&abs_paths.as_slice());
+ let serialized_workspace = persistence::DB.workspace_for_roots(&abs_paths.as_slice());
- let paths_to_open = serialized_workspace
- .as_ref()
- .map(|workspace| workspace.location.paths())
- .unwrap_or(Arc::new(abs_paths));
+ let paths_to_open = Arc::new(abs_paths);
// Get project paths for all of the abs_paths
let mut worktree_roots: HashSet<Arc<Path>> = Default::default();
- let mut project_paths = Vec::new();
- for path in paths_to_open.iter() {
+ let mut project_paths: Vec<(PathBuf, Option<ProjectPath>)> =
+ Vec::with_capacity(paths_to_open.len());
+ for path in paths_to_open.iter().cloned() {
if let Some((worktree, project_entry)) = cx
.update(|cx| {
Workspace::project_path_for_path(project_handle.clone(), &path, true, cx)
@@ -728,9 +711,9 @@ impl Workspace {
.log_err()
{
worktree_roots.insert(worktree.read_with(&mut cx, |tree, _| tree.abs_path()));
- project_paths.push(Some(project_entry));
+ project_paths.push((path, Some(project_entry)));
} else {
- project_paths.push(None);
+ project_paths.push((path, None));
}
}
@@ -750,27 +733,17 @@ impl Workspace {
))
});
- let build_workspace =
- |cx: &mut ViewContext<Workspace>,
- serialized_workspace: Option<SerializedWorkspace>| {
- let mut workspace = Workspace::new(
- serialized_workspace,
- workspace_id,
- project_handle.clone(),
- app_state.clone(),
- cx,
- );
- (app_state.initialize_workspace)(&mut workspace, &app_state, cx);
- workspace
- };
+ let build_workspace = |cx: &mut ViewContext<Workspace>| {
+ let mut workspace =
+ Workspace::new(workspace_id, project_handle.clone(), app_state.clone(), cx);
+ (app_state.initialize_workspace)(&mut workspace, &app_state, cx);
+
+ workspace
+ };
let workspace = requesting_window_id
.and_then(|window_id| {
- cx.update(|cx| {
- cx.replace_root_view(window_id, |cx| {
- build_workspace(cx, serialized_workspace.take())
- })
- })
+ cx.update(|cx| cx.replace_root_view(window_id, |cx| build_workspace(cx)))
})
.unwrap_or_else(|| {
let (bounds, display) = if let Some(bounds) = window_bounds_override {
@@ -808,44 +781,21 @@ impl Workspace {
// Use the serialized workspace to construct the new window
cx.add_window(
(app_state.build_window_options)(bounds, display, cx.platform().as_ref()),
- |cx| build_workspace(cx, serialized_workspace),
+ |cx| build_workspace(cx),
)
.1
});
let workspace = workspace.downgrade();
notify_if_database_failed(&workspace, &mut cx);
-
- // Call open path for each of the project paths
- // (this will bring them to the front if they were in the serialized workspace)
- debug_assert!(paths_to_open.len() == project_paths.len());
- let tasks = paths_to_open
- .iter()
- .cloned()
- .zip(project_paths.into_iter())
- .map(|(abs_path, project_path)| {
- let workspace = workspace.clone();
- cx.spawn(|mut cx| {
- let fs = app_state.fs.clone();
- async move {
- let project_path = project_path?;
- if fs.is_file(&abs_path).await {
- Some(
- workspace
- .update(&mut cx, |workspace, cx| {
- workspace.open_path(project_path, None, true, cx)
- })
- .log_err()?
- .await,
- )
- } else {
- None
- }
- }
- })
- });
-
- let opened_items = futures::future::join_all(tasks.into_iter()).await;
+ let opened_items = open_items(
+ serialized_workspace,
+ &workspace,
+ project_paths,
+ app_state,
+ cx,
+ )
+ .await;
(workspace, opened_items)
})
@@ -1136,6 +1086,8 @@ impl Workspace {
visible: bool,
cx: &mut ViewContext<Self>,
) -> Task<Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>> {
+ log::info!("open paths {:?}", abs_paths);
+
let fs = self.app_state.fs.clone();
// Sort the paths to ensure we add worktrees for parents before their children.
@@ -1432,11 +1384,6 @@ impl Workspace {
cx.notify();
}
- pub fn focus_center(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
- cx.focus_self();
- cx.notify();
- }
-
fn add_pane(&mut self, cx: &mut ViewContext<Self>) -> ViewHandle<Pane> {
let pane = cx.add_view(|cx| {
Pane::new(
@@ -2559,13 +2506,15 @@ impl Workspace {
}
}
- fn load_from_serialized_workspace(
+ pub(crate) fn load_workspace(
workspace: WeakViewHandle<Workspace>,
serialized_workspace: SerializedWorkspace,
+ paths_to_open: Vec<Option<ProjectPath>>,
cx: &mut AppContext,
- ) {
+ ) -> Task<Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>> {
cx.spawn(|mut cx| async move {
- let (project, dock_pane_handle, old_center_pane) =
+ let result = async_iife! {{
+ let (project, dock_pane_handle, old_center_pane) =
workspace.read_with(&cx, |workspace, _| {
(
workspace.project().clone(),
@@ -2574,74 +2523,107 @@ impl Workspace {
)
})?;
- serialized_workspace
- .dock_pane
- .deserialize_to(
- &project,
- &dock_pane_handle,
- serialized_workspace.id,
- &workspace,
- &mut cx,
- )
- .await?;
+ let dock_items = serialized_workspace
+ .dock_pane
+ .deserialize_to(
+ &project,
+ &dock_pane_handle,
+ serialized_workspace.id,
+ &workspace,
+ &mut cx,
+ )
+ .await?;
+
+ let mut center_items = None;
+ let mut center_group = None;
+ // Traverse the splits tree and add to things
+ if let Some((group, active_pane, items)) = serialized_workspace
+ .center_group
+ .deserialize(&project, serialized_workspace.id, &workspace, &mut cx)
+ .await {
+ center_items = Some(items);
+ center_group = Some((group, active_pane))
+ }
+
+ let resulting_list = cx.read(|cx| {
+ let mut opened_items = center_items
+ .unwrap_or_default()
+ .into_iter()
+ .chain(dock_items.into_iter())
+ .filter_map(|item| {
+ let item = item?;
+ let project_path = item.project_path(cx)?;
+ Some((project_path, item))
+ })
+ .collect::<HashMap<_, _>>();
- // Traverse the splits tree and add to things
- let center_group = serialized_workspace
- .center_group
- .deserialize(&project, serialized_workspace.id, &workspace, &mut cx)
- .await;
+ paths_to_open
+ .into_iter()
+ .map(|path_to_open| {
+ path_to_open.map(|path_to_open| {
+ Ok(opened_items.remove(&path_to_open))
+ })
+ .transpose()
+ .map(|item| item.flatten())
+ .transpose()
+ })
+ .collect::<Vec<_>>()
+ });
- // Remove old panes from workspace panes list
- workspace.update(&mut cx, |workspace, cx| {
- if let Some((center_group, active_pane)) = center_group {
- workspace.remove_panes(workspace.center.root.clone(), cx);
+ // Remove old panes from workspace panes list
+ workspace.update(&mut cx, |workspace, cx| {
+ if let Some((center_group, active_pane)) = center_group {
+ workspace.remove_panes(workspace.center.root.clone(), cx);
- // Swap workspace center group
- workspace.center = PaneGroup::with_root(center_group);
+ // Swap workspace center group
+ workspace.center = PaneGroup::with_root(center_group);
- // Change the focus to the workspace first so that we retrigger focus in on the pane.
- cx.focus_self();
+ // Change the focus to the workspace first so that we retrigger focus in on the pane.
+ cx.focus_self();
- if let Some(active_pane) = active_pane {
- cx.focus(&active_pane);
+ if let Some(active_pane) = active_pane {
+ cx.focus(&active_pane);
+ } else {
+ cx.focus(workspace.panes.last().unwrap());
+ }
} else {
- cx.focus(workspace.panes.last().unwrap());
+ let old_center_handle = old_center_pane.and_then(|weak| weak.upgrade(cx));
+ if let Some(old_center_handle) = old_center_handle {
+ cx.focus(&old_center_handle)
+ } else {
+ cx.focus_self()
+ }
}
- } else {
- let old_center_handle = old_center_pane.and_then(|weak| weak.upgrade(cx));
- if let Some(old_center_handle) = old_center_handle {
- cx.focus(&old_center_handle)
- } else {
- cx.focus_self()
+
+ if workspace.left_sidebar().read(cx).is_open()
+ != serialized_workspace.left_sidebar_open
+ {
+ workspace.toggle_sidebar(SidebarSide::Left, cx);
}
- }
- if workspace.left_sidebar().read(cx).is_open()
- != serialized_workspace.left_sidebar_open
- {
- workspace.toggle_sidebar(SidebarSide::Left, cx);
- }
+ // Note that without after_window, the focus_self() and
+ // the focus the dock generates start generating alternating
+ // focus due to the deferred execution each triggering each other
+ cx.after_window_update(move |workspace, cx| {
+ Dock::set_dock_position(
+ workspace,
+ serialized_workspace.dock_position,
+ false,
+ cx,
+ );
+ });
- // Note that without after_window, the focus_self() and
- // the focus the dock generates start generating alternating
- // focus due to the deferred execution each triggering each other
- cx.after_window_update(move |workspace, cx| {
- Dock::set_dock_position(
- workspace,
- serialized_workspace.dock_position,
- false,
- cx,
- );
- });
+ cx.notify();
+ })?;
- cx.notify();
- })?;
+ // Serialize ourself to make sure our timestamps and any pane / item changes are replicated
+ workspace.read_with(&cx, |workspace, cx| workspace.serialize_workspace(cx))?;
- // Serialize ourself to make sure our timestamps and any pane / item changes are replicated
- workspace.read_with(&cx, |workspace, cx| workspace.serialize_workspace(cx))?;
- anyhow::Ok(())
+ Ok::<_, anyhow::Error>(resulting_list)
+ }};
+
+ result.await.unwrap_or_default()
})
- .detach_and_log_err(cx);
}
#[cfg(any(test, feature = "test-support"))]
@@ -2657,10 +2639,99 @@ impl Workspace {
dock_default_item_factory: |_, _| None,
background_actions: || &[],
});
- Self::new(None, 0, project, app_state, cx)
+ Self::new(0, project, app_state, cx)
}
}
+async fn open_items(
+ serialized_workspace: Option<SerializedWorkspace>,
+ workspace: &WeakViewHandle<Workspace>,
+ mut project_paths_to_open: Vec<(PathBuf, Option<ProjectPath>)>,
+ app_state: Arc<AppState>,
+ mut cx: AsyncAppContext,
+) -> Vec<Option<anyhow::Result<Box<dyn ItemHandle>>>> {
+ let mut opened_items = Vec::with_capacity(project_paths_to_open.len());
+
+ if let Some(serialized_workspace) = serialized_workspace {
+ let workspace = workspace.clone();
+ let restored_items = cx
+ .update(|cx| {
+ Workspace::load_workspace(
+ workspace,
+ serialized_workspace,
+ project_paths_to_open
+ .iter()
+ .map(|(_, project_path)| project_path)
+ .cloned()
+ .collect(),
+ cx,
+ )
+ })
+ .await;
+
+ let restored_project_paths = cx.read(|cx| {
+ restored_items
+ .iter()
+ .filter_map(|item| item.as_ref()?.as_ref().ok()?.project_path(cx))
+ .collect::<HashSet<_>>()
+ });
+
+ opened_items = restored_items;
+ project_paths_to_open
+ .iter_mut()
+ .for_each(|(_, project_path)| {
+ if let Some(project_path_to_open) = project_path {
+ if restored_project_paths.contains(project_path_to_open) {
+ *project_path = None;
+ }
+ }
+ });
+ } else {
+ for _ in 0..project_paths_to_open.len() {
+ opened_items.push(None);
+ }
+ }
+ assert!(opened_items.len() == project_paths_to_open.len());
+
+ let tasks =
+ project_paths_to_open
+ .into_iter()
+ .enumerate()
+ .map(|(i, (abs_path, project_path))| {
+ let workspace = workspace.clone();
+ cx.spawn(|mut cx| {
+ let fs = app_state.fs.clone();
+ async move {
+ let file_project_path = project_path?;
+ if fs.is_file(&abs_path).await {
+ Some((
+ i,
+ workspace
+ .update(&mut cx, |workspace, cx| {
+ workspace.open_path(file_project_path, None, true, cx)
+ })
+ .log_err()?
+ .await,
+ ))
+ } else {
+ None
+ }
+ }
+ })
+ });
+
+ for maybe_opened_path in futures::future::join_all(tasks.into_iter())
+ .await
+ .into_iter()
+ {
+ if let Some((i, path_open_result)) = maybe_opened_path {
+ opened_items[i] = Some(path_open_result);
+ }
+ }
+
+ opened_items
+}
+
fn notify_if_database_failed(workspace: &WeakViewHandle<Workspace>, cx: &mut AsyncAppContext) {
const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml";
@@ -2900,8 +2971,6 @@ pub fn open_paths(
Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>,
)>,
> {
- log::info!("open paths {:?}", abs_paths);
-
let app_state = app_state.clone();
let abs_paths = abs_paths.to_vec();
cx.spawn(|mut cx| async move {
@@ -3031,8 +3100,7 @@ pub fn join_remote_project(
let (_, workspace) = cx.add_window(
(app_state.build_window_options)(None, None, cx.platform().as_ref()),
|cx| {
- let mut workspace =
- Workspace::new(Default::default(), 0, project, app_state.clone(), cx);
+ let mut workspace = Workspace::new(0, project, app_state.clone(), cx);
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
workspace
},
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
-version = "0.86.0"
+version = "0.87.0"
publish = false
[lib]
@@ -6,18 +6,18 @@ use assets::Assets;
use backtrace::Backtrace;
use cli::{
ipc::{self, IpcSender},
- CliRequest, CliResponse, IpcHandshake,
+ CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME,
};
use client::{self, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN};
use db::kvp::KEY_VALUE_STORE;
-use editor::Editor;
+use editor::{scroll::autoscroll::Autoscroll, Editor};
use futures::{
channel::{mpsc, oneshot},
FutureExt, SinkExt, StreamExt,
};
use gpui::{Action, App, AppContext, AssetSource, AsyncAppContext, Task, ViewContext};
use isahc::{config::Configurable, Request};
-use language::LanguageRegistry;
+use language::{LanguageRegistry, Point};
use log::LevelFilter;
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
@@ -30,20 +30,28 @@ use settings::{
use simplelog::ConfigBuilder;
use smol::process::Command;
use std::{
+ collections::HashMap,
env,
ffi::OsStr,
fs::OpenOptions,
io::Write as _,
os::unix::prelude::OsStrExt,
panic,
- path::PathBuf,
+ path::{Path, PathBuf},
str,
- sync::{Arc, Weak},
+ sync::{
+ atomic::{AtomicBool, Ordering},
+ Arc, Weak,
+ },
thread,
time::Duration,
};
+use sum_tree::Bias;
use terminal_view::{get_working_directory, TerminalSettings, TerminalView};
-use util::http::{self, HttpClient};
+use util::{
+ http::{self, HttpClient},
+ paths::PathLikeWithPosition,
+};
use welcome::{show_welcome_experience, FIRST_OPEN};
use fs::RealFs;
@@ -90,29 +98,17 @@ fn main() {
};
let (cli_connections_tx, mut cli_connections_rx) = mpsc::unbounded();
+ let cli_connections_tx = Arc::new(cli_connections_tx);
let (open_paths_tx, mut open_paths_rx) = mpsc::unbounded();
+ let open_paths_tx = Arc::new(open_paths_tx);
+ let urls_callback_triggered = Arc::new(AtomicBool::new(false));
+
+ let callback_cli_connections_tx = Arc::clone(&cli_connections_tx);
+ let callback_open_paths_tx = Arc::clone(&open_paths_tx);
+ let callback_urls_callback_triggered = Arc::clone(&urls_callback_triggered);
app.on_open_urls(move |urls, _| {
- if let Some(server_name) = urls.first().and_then(|url| url.strip_prefix("zed-cli://")) {
- if let Some(cli_connection) = connect_to_cli(server_name).log_err() {
- cli_connections_tx
- .unbounded_send(cli_connection)
- .map_err(|_| anyhow!("no listener for cli connections"))
- .log_err();
- };
- } else {
- let paths: Vec<_> = urls
- .iter()
- .flat_map(|url| url.strip_prefix("file://"))
- .map(|url| {
- let decoded = urlencoding::decode_binary(url.as_bytes());
- PathBuf::from(OsStr::from_bytes(decoded.as_ref()))
- })
- .collect();
- open_paths_tx
- .unbounded_send(paths)
- .map_err(|_| anyhow!("no listener for open urls requests"))
- .log_err();
- }
+ callback_urls_callback_triggered.store(true, Ordering::Release);
+ open_urls(urls, &callback_cli_connections_tx, &callback_open_paths_tx);
})
.on_reopen(move |cx| {
if cx.has_global::<Weak<AppState>>() {
@@ -228,6 +224,14 @@ fn main() {
workspace::open_paths(&paths, &app_state, None, cx).detach_and_log_err(cx);
}
} else {
+ // TODO Development mode that forces the CLI mode usually runs Zed binary as is instead
+ // of an *app, hence gets no specific callbacks run. Emulate them here, if needed.
+ if std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_some()
+ && !urls_callback_triggered.load(Ordering::Acquire)
+ {
+ open_urls(collect_url_args(), &cli_connections_tx, &open_paths_tx)
+ }
+
if let Ok(Some(connection)) = cli_connections_rx.try_next() {
cx.spawn(|cx| handle_cli_connection(connection, app_state.clone(), cx))
.detach();
@@ -278,6 +282,37 @@ fn main() {
});
}
+fn open_urls(
+ urls: Vec<String>,
+ cli_connections_tx: &mpsc::UnboundedSender<(
+ mpsc::Receiver<CliRequest>,
+ IpcSender<CliResponse>,
+ )>,
+ open_paths_tx: &mpsc::UnboundedSender<Vec<PathBuf>>,
+) {
+ if let Some(server_name) = urls.first().and_then(|url| url.strip_prefix("zed-cli://")) {
+ if let Some(cli_connection) = connect_to_cli(server_name).log_err() {
+ cli_connections_tx
+ .unbounded_send(cli_connection)
+ .map_err(|_| anyhow!("no listener for cli connections"))
+ .log_err();
+ };
+ } else {
+ let paths: Vec<_> = urls
+ .iter()
+ .flat_map(|url| url.strip_prefix("file://"))
+ .map(|url| {
+ let decoded = urlencoding::decode_binary(url.as_bytes());
+ PathBuf::from(OsStr::from_bytes(decoded.as_ref()))
+ })
+ .collect();
+ open_paths_tx
+ .unbounded_send(paths)
+ .map_err(|_| anyhow!("no listener for open urls requests"))
+ .log_err();
+ }
+}
+
async fn restore_or_create_workspace(app_state: &Arc<AppState>, mut cx: AsyncAppContext) {
if let Some(location) = workspace::last_opened_workspace_paths().await {
cx.update(|cx| workspace::open_paths(location.paths().as_ref(), app_state, None, cx))
@@ -508,7 +543,8 @@ async fn load_login_shell_environment() -> Result<()> {
}
fn stdout_is_a_pty() -> bool {
- unsafe { libc::isatty(libc::STDOUT_FILENO as i32) != 0 }
+ std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none()
+ && unsafe { libc::isatty(libc::STDOUT_FILENO as i32) != 0 }
}
fn collect_path_args() -> Vec<PathBuf> {
@@ -521,7 +557,11 @@ fn collect_path_args() -> Vec<PathBuf> {
None
}
})
- .collect::<Vec<_>>()
+ .collect()
+}
+
+fn collect_url_args() -> Vec<String> {
+ env::args().skip(1).collect()
}
fn load_embedded_fonts(app: &App) {
@@ -616,13 +656,38 @@ async fn handle_cli_connection(
if let Some(request) = requests.next().await {
match request {
CliRequest::Open { paths, wait } => {
+ let mut caret_positions = HashMap::new();
+
let paths = if paths.is_empty() {
workspace::last_opened_workspace_paths()
.await
.map(|location| location.paths().to_vec())
- .unwrap_or(paths)
+ .unwrap_or_default()
} else {
paths
+ .into_iter()
+ .filter_map(|path_with_position_string| {
+ let path_with_position = PathLikeWithPosition::parse_str(
+ &path_with_position_string,
+ |path_str| {
+ Ok::<_, std::convert::Infallible>(
+ Path::new(path_str).to_path_buf(),
+ )
+ },
+ )
+ .expect("Infallible");
+ let path = path_with_position.path_like;
+ if let Some(row) = path_with_position.row {
+ if path.is_file() {
+ let row = row.saturating_sub(1);
+ let col =
+ path_with_position.column.unwrap_or(0).saturating_sub(1);
+ caret_positions.insert(path.clone(), Point::new(row, col));
+ }
+ }
+ Some(path)
+ })
+ .collect()
};
let mut errored = false;
@@ -632,11 +697,32 @@ async fn handle_cli_connection(
{
Ok((workspace, items)) => {
let mut item_release_futures = Vec::new();
- cx.update(|cx| {
- for (item, path) in items.into_iter().zip(&paths) {
- match item {
- Some(Ok(item)) => {
- let released = oneshot::channel();
+
+ for (item, path) in items.into_iter().zip(&paths) {
+ match item {
+ Some(Ok(item)) => {
+ if let Some(point) = caret_positions.remove(path) {
+ if let Some(active_editor) = item.downcast::<Editor>() {
+ active_editor
+ .downgrade()
+ .update(&mut cx, |editor, cx| {
+ let snapshot =
+ editor.snapshot(cx).display_snapshot;
+ let point = snapshot
+ .buffer_snapshot
+ .clip_point(point, Bias::Left);
+ editor.change_selections(
+ Some(Autoscroll::center()),
+ cx,
+ |s| s.select_ranges([point..point]),
+ );
+ })
+ .log_err();
+ }
+ }
+
+ let released = oneshot::channel();
+ cx.update(|cx| {
item.on_release(
cx,
Box::new(move |_| {
@@ -644,23 +730,20 @@ async fn handle_cli_connection(
}),
)
.detach();
- item_release_futures.push(released.1);
- }
- Some(Err(err)) => {
- responses
- .send(CliResponse::Stderr {
- message: format!(
- "error opening {:?}: {}",
- path, err
- ),
- })
- .log_err();
- errored = true;
- }
- None => {}
+ });
+ item_release_futures.push(released.1);
}
+ Some(Err(err)) => {
+ responses
+ .send(CliResponse::Stderr {
+ message: format!("error opening {:?}: {}", path, err),
+ })
+ .log_err();
+ errored = true;
+ }
+ None => {}
}
- });
+ }
if wait {
let background = cx.background();
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+set -eu
+
+if [[ $# < 1 ]]; then
+ echo "usage: $0 <MAX_SIZE_IN_GB>"
+ exit 1
+fi
+
+max_size_gb=$1
+
+current_size=$(du -s target | cut -f1)
+current_size_gb=$(expr ${current_size} / 1024 / 1024)
+
+echo "target directory size: ${current_size_gb}gb. max size: ${max_size_gb}gb"
+
+if [[ ${current_size_gb} -gt ${max_size_gb} ]]; then
+ echo "clearing target directory"
+ rm -rf target
+fi