Cargo.lock 🔗
@@ -2440,6 +2440,7 @@ dependencies = [
"parking_lot 0.11.2",
"regex",
"rope",
+ "rpc",
"serde",
"serde_derive",
"serde_json",
Max Brunsfeld created
Cargo.lock | 1
crates/clock/src/clock.rs | 1
crates/collab/migrations.sqlite/20221109000000_test_schema.sql | 17
crates/collab/migrations/20230605191135_remove_repository_statuses.sql | 4
crates/collab/src/db.rs | 143
crates/collab/src/db/worktree_entry.rs | 1
crates/collab/src/tests/integration_tests.rs | 80
crates/collab/src/tests/randomized_integration_tests.rs | 64
crates/fs/Cargo.toml | 2
crates/fs/src/fs.rs | 71
crates/fs/src/repository.rs | 68
crates/project/src/project.rs | 33
crates/project/src/worktree.rs | 555
crates/project_panel/src/project_panel.rs | 14
crates/rpc/proto/zed.proto | 3
crates/sum_tree/src/sum_tree.rs | 5
crates/util/src/util.rs | 2
styles/package-lock.json | 11
styles/package.json | 4
styles/src/buildLicenses.ts | 10
styles/src/buildThemes.ts | 6
styles/src/buildTokens.ts | 39
styles/src/common.ts | 4
styles/src/styleTree/app.ts | 2
styles/src/styleTree/assistant.ts | 156
styles/src/styleTree/commandPalette.ts | 4
styles/src/styleTree/components.ts | 2
styles/src/styleTree/contactFinder.ts | 2
styles/src/styleTree/contactList.ts | 2
styles/src/styleTree/contactNotification.ts | 2
styles/src/styleTree/contactsPopover.ts | 2
styles/src/styleTree/contextMenu.ts | 2
styles/src/styleTree/copilot.ts | 2
styles/src/styleTree/editor.ts | 15
styles/src/styleTree/feedback.ts | 2
styles/src/styleTree/hoverPopover.ts | 2
styles/src/styleTree/incomingCallNotification.ts | 2
styles/src/styleTree/lspLogMenu.ts | 2
styles/src/styleTree/picker.ts | 4
styles/src/styleTree/projectDiagnostics.ts | 2
styles/src/styleTree/projectPanel.ts | 14
styles/src/styleTree/projectSharedNotification.ts | 2
styles/src/styleTree/search.ts | 6
styles/src/styleTree/sharedScreen.ts | 2
styles/src/styleTree/simpleMessageNotification.ts | 2
styles/src/styleTree/statusBar.ts | 2
styles/src/styleTree/tabBar.ts | 6
styles/src/styleTree/terminal.ts | 2
styles/src/styleTree/tooltip.ts | 2
styles/src/styleTree/updateNotification.ts | 2
styles/src/styleTree/welcome.ts | 4
styles/src/styleTree/workspace.ts | 8
styles/src/theme/color.ts | 0
styles/src/theme/colorScheme.ts | 2
styles/src/theme/index.ts | 2
styles/src/theme/ramps.ts | 2
styles/src/theme/syntax.ts | 2
styles/src/theme/themeConfig.ts | 7
styles/src/theme/tokens/colorScheme.ts | 12
styles/src/theme/tokens/players.ts | 28
styles/src/theme/tokens/token.ts | 14
styles/src/themes/atelier/common.ts | 4
styles/src/themes/ayu/common.ts | 4
styles/src/themes/gruvbox/gruvbox-common.ts | 3
styles/src/themes/index.ts | 2
styles/src/utils/slugify.ts | 1
styles/tsconfig.json | 16
67 files changed, 818 insertions(+), 676 deletions(-)
@@ -2440,6 +2440,7 @@ dependencies = [
"parking_lot 0.11.2",
"regex",
"rope",
+ "rpc",
"serde",
"serde_derive",
"serde_json",
@@ -66,6 +66,7 @@ impl<'a> AddAssign<&'a Local> for Local {
}
}
+/// A vector clock
#[derive(Clone, Default, Hash, Eq, PartialEq)]
pub struct Global(SmallVec<[u32; 8]>);
@@ -76,6 +76,7 @@ CREATE TABLE "worktree_entries" (
"is_symlink" BOOL NOT NULL,
"is_ignored" BOOL NOT NULL,
"is_deleted" BOOL NOT NULL,
+ "git_status" INTEGER,
PRIMARY KEY(project_id, worktree_id, id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
);
@@ -96,22 +97,6 @@ CREATE TABLE "worktree_repositories" (
CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
-CREATE TABLE "worktree_repository_statuses" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INTEGER NOT NULL,
- "work_directory_id" INTEGER NOT NULL,
- "repo_path" VARCHAR NOT NULL,
- "status" INTEGER NOT NULL,
- "scan_id" INTEGER NOT NULL,
- "is_deleted" BOOL NOT NULL,
- PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
- FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_repository_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
-CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
-CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id_and_work_directory_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
-
CREATE TABLE "worktree_settings_files" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
@@ -0,0 +1,4 @@
+DROP TABLE "worktree_repository_statuses";
+
+ALTER TABLE "worktree_entries"
+ADD "git_status" INT8;
@@ -1539,6 +1539,7 @@ impl Database {
}),
is_symlink: db_entry.is_symlink,
is_ignored: db_entry.is_ignored,
+ git_status: db_entry.git_status.map(|status| status as i32),
});
}
}
@@ -1573,54 +1574,6 @@ impl Database {
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
- removed_repo_paths: Default::default(),
- updated_statuses: Default::default(),
- });
- }
- }
- }
-
- // Repository Status Entries
- for repository in worktree.updated_repositories.iter_mut() {
- let repository_status_entry_filter =
- if let Some(rejoined_worktree) = rejoined_worktree {
- worktree_repository_statuses::Column::ScanId
- .gt(rejoined_worktree.scan_id)
- } else {
- worktree_repository_statuses::Column::IsDeleted.eq(false)
- };
-
- let mut db_repository_statuses =
- worktree_repository_statuses::Entity::find()
- .filter(
- Condition::all()
- .add(
- worktree_repository_statuses::Column::ProjectId
- .eq(project.id),
- )
- .add(
- worktree_repository_statuses::Column::WorktreeId
- .eq(worktree.id),
- )
- .add(
- worktree_repository_statuses::Column::WorkDirectoryId
- .eq(repository.work_directory_id),
- )
- .add(repository_status_entry_filter),
- )
- .stream(&*tx)
- .await?;
-
- while let Some(db_status_entry) = db_repository_statuses.next().await {
- let db_status_entry = db_status_entry?;
- if db_status_entry.is_deleted {
- repository
- .removed_repo_paths
- .push(db_status_entry.repo_path);
- } else {
- repository.updated_statuses.push(proto::StatusEntry {
- repo_path: db_status_entry.repo_path,
- status: db_status_entry.status as i32,
});
}
}
@@ -2396,6 +2349,7 @@ impl Database {
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
is_symlink: ActiveValue::set(entry.is_symlink),
is_ignored: ActiveValue::set(entry.is_ignored),
+ git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
is_deleted: ActiveValue::set(false),
scan_id: ActiveValue::set(update.scan_id as i64),
}
@@ -2414,6 +2368,7 @@ impl Database {
worktree_entry::Column::MtimeNanos,
worktree_entry::Column::IsSymlink,
worktree_entry::Column::IsIgnored,
+ worktree_entry::Column::GitStatus,
worktree_entry::Column::ScanId,
])
.to_owned(),
@@ -2467,68 +2422,6 @@ impl Database {
)
.exec(&*tx)
.await?;
-
- for repository in update.updated_repositories.iter() {
- if !repository.updated_statuses.is_empty() {
- worktree_repository_statuses::Entity::insert_many(
- repository.updated_statuses.iter().map(|status_entry| {
- worktree_repository_statuses::ActiveModel {
- project_id: ActiveValue::set(project_id),
- worktree_id: ActiveValue::set(worktree_id),
- work_directory_id: ActiveValue::set(
- repository.work_directory_id as i64,
- ),
- repo_path: ActiveValue::set(status_entry.repo_path.clone()),
- status: ActiveValue::set(status_entry.status as i64),
- scan_id: ActiveValue::set(update.scan_id as i64),
- is_deleted: ActiveValue::set(false),
- }
- }),
- )
- .on_conflict(
- OnConflict::columns([
- worktree_repository_statuses::Column::ProjectId,
- worktree_repository_statuses::Column::WorktreeId,
- worktree_repository_statuses::Column::WorkDirectoryId,
- worktree_repository_statuses::Column::RepoPath,
- ])
- .update_columns([
- worktree_repository_statuses::Column::ScanId,
- worktree_repository_statuses::Column::Status,
- worktree_repository_statuses::Column::IsDeleted,
- ])
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
- }
-
- if !repository.removed_repo_paths.is_empty() {
- worktree_repository_statuses::Entity::update_many()
- .filter(
- worktree_repository_statuses::Column::ProjectId
- .eq(project_id)
- .and(
- worktree_repository_statuses::Column::WorktreeId
- .eq(worktree_id),
- )
- .and(
- worktree_repository_statuses::Column::WorkDirectoryId
- .eq(repository.work_directory_id as i64),
- )
- .and(worktree_repository_statuses::Column::RepoPath.is_in(
- repository.removed_repo_paths.iter().map(String::as_str),
- )),
- )
- .set(worktree_repository_statuses::ActiveModel {
- is_deleted: ActiveValue::Set(true),
- scan_id: ActiveValue::Set(update.scan_id as i64),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- }
- }
}
if !update.removed_repositories.is_empty() {
@@ -2812,6 +2705,7 @@ impl Database {
}),
is_symlink: db_entry.is_symlink,
is_ignored: db_entry.is_ignored,
+ git_status: db_entry.git_status.map(|status| status as i32),
});
}
}
@@ -2837,41 +2731,12 @@ impl Database {
proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch,
- removed_repo_paths: Default::default(),
- updated_statuses: Default::default(),
},
);
}
}
}
- {
- let mut db_status_entries = worktree_repository_statuses::Entity::find()
- .filter(
- Condition::all()
- .add(worktree_repository_statuses::Column::ProjectId.eq(project_id))
- .add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
- )
- .stream(&*tx)
- .await?;
-
- while let Some(db_status_entry) = db_status_entries.next().await {
- let db_status_entry = db_status_entry?;
- if let Some(worktree) = worktrees.get_mut(&(db_status_entry.worktree_id as u64))
- {
- if let Some(repository_entry) = worktree
- .repository_entries
- .get_mut(&(db_status_entry.work_directory_id as u64))
- {
- repository_entry.updated_statuses.push(proto::StatusEntry {
- repo_path: db_status_entry.repo_path,
- status: db_status_entry.status as i32,
- });
- }
- }
- }
- }
-
// Populate worktree diagnostic summaries.
{
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
@@ -15,6 +15,7 @@ pub struct Model {
pub inode: i64,
pub mtime_seconds: i64,
pub mtime_nanos: i32,
+ pub git_status: Option<i64>,
pub is_symlink: bool,
pub is_ignored: bool,
pub is_deleted: bool,
@@ -2415,14 +2415,10 @@ async fn test_git_diff_base_change(
"
.unindent();
- client_a
- .fs
- .as_fake()
- .set_index_for_repo(
- Path::new("/dir/.git"),
- &[(Path::new("a.txt"), diff_base.clone())],
- )
- .await;
+ client_a.fs.as_fake().set_index_for_repo(
+ Path::new("/dir/.git"),
+ &[(Path::new("a.txt"), diff_base.clone())],
+ );
// Create the buffer
let buffer_local_a = project_local
@@ -2464,14 +2460,10 @@ async fn test_git_diff_base_change(
);
});
- client_a
- .fs
- .as_fake()
- .set_index_for_repo(
- Path::new("/dir/.git"),
- &[(Path::new("a.txt"), new_diff_base.clone())],
- )
- .await;
+ client_a.fs.as_fake().set_index_for_repo(
+ Path::new("/dir/.git"),
+ &[(Path::new("a.txt"), new_diff_base.clone())],
+ );
// Wait for buffer_local_a to receive it
deterministic.run_until_parked();
@@ -2513,14 +2505,10 @@ async fn test_git_diff_base_change(
"
.unindent();
- client_a
- .fs
- .as_fake()
- .set_index_for_repo(
- Path::new("/dir/sub/.git"),
- &[(Path::new("b.txt"), diff_base.clone())],
- )
- .await;
+ client_a.fs.as_fake().set_index_for_repo(
+ Path::new("/dir/sub/.git"),
+ &[(Path::new("b.txt"), diff_base.clone())],
+ );
// Create the buffer
let buffer_local_b = project_local
@@ -2562,14 +2550,10 @@ async fn test_git_diff_base_change(
);
});
- client_a
- .fs
- .as_fake()
- .set_index_for_repo(
- Path::new("/dir/sub/.git"),
- &[(Path::new("b.txt"), new_diff_base.clone())],
- )
- .await;
+ client_a.fs.as_fake().set_index_for_repo(
+ Path::new("/dir/sub/.git"),
+ &[(Path::new("b.txt"), new_diff_base.clone())],
+ );
// Wait for buffer_local_b to receive it
deterministic.run_until_parked();
@@ -2646,8 +2630,7 @@ async fn test_git_branch_name(
client_a
.fs
.as_fake()
- .set_branch_name(Path::new("/dir/.git"), Some("branch-1"))
- .await;
+ .set_branch_name(Path::new("/dir/.git"), Some("branch-1"));
// Wait for it to catch up to the new branch
deterministic.run_until_parked();
@@ -2673,8 +2656,7 @@ async fn test_git_branch_name(
client_a
.fs
.as_fake()
- .set_branch_name(Path::new("/dir/.git"), Some("branch-2"))
- .await;
+ .set_branch_name(Path::new("/dir/.git"), Some("branch-2"));
// Wait for buffer_local_a to receive it
deterministic.run_until_parked();
@@ -2726,17 +2708,13 @@ async fn test_git_status_sync(
const A_TXT: &'static str = "a.txt";
const B_TXT: &'static str = "b.txt";
- client_a
- .fs
- .as_fake()
- .set_status_for_repo(
- Path::new("/dir/.git"),
- &[
- (&Path::new(A_TXT), GitFileStatus::Added),
- (&Path::new(B_TXT), GitFileStatus::Added),
- ],
- )
- .await;
+ client_a.fs.as_fake().set_status_for_repo_via_git_operation(
+ Path::new("/dir/.git"),
+ &[
+ (&Path::new(A_TXT), GitFileStatus::Added),
+ (&Path::new(B_TXT), GitFileStatus::Added),
+ ],
+ );
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let project_id = active_call_a
@@ -2763,8 +2741,7 @@ async fn test_git_status_sync(
assert_eq!(worktrees.len(), 1);
let worktree = worktrees[0].clone();
let snapshot = worktree.read(cx).snapshot();
- let root_entry = snapshot.root_git_entry().unwrap();
- assert_eq!(root_entry.status_for_file(&snapshot, file), status);
+ assert_eq!(snapshot.status_for_file(file), status);
}
// Smoke test status reading
@@ -2780,14 +2757,13 @@ async fn test_git_status_sync(
client_a
.fs
.as_fake()
- .set_status_for_repo(
+ .set_status_for_repo_via_working_copy_change(
Path::new("/dir/.git"),
&[
(&Path::new(A_TXT), GitFileStatus::Modified),
(&Path::new(B_TXT), GitFileStatus::Modified),
],
- )
- .await;
+ );
// Wait for buffer_local_a to receive it
deterministic.run_until_parked();
@@ -422,7 +422,7 @@ async fn apply_client_operation(
);
ensure_project_shared(&project, client, cx).await;
- if !client.fs.paths().contains(&new_root_path) {
+ if !client.fs.paths(false).contains(&new_root_path) {
client.fs.create_dir(&new_root_path).await.unwrap();
}
project
@@ -628,12 +628,13 @@ async fn apply_client_operation(
ensure_project_shared(&project, client, cx).await;
let requested_version = buffer.read_with(cx, |buffer, _| buffer.version());
- let save = project.update(cx, |project, cx| project.save_buffer(buffer, cx));
- let save = cx.background().spawn(async move {
- let (saved_version, _, _) = save
- .await
+ let save = project.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
+ let save = cx.spawn(|cx| async move {
+ save.await
.map_err(|err| anyhow!("save request failed: {:?}", err))?;
- assert!(saved_version.observed_all(&requested_version));
+ assert!(buffer
+ .read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() })
+ .observed_all(&requested_version));
anyhow::Ok(())
});
if detach {
@@ -743,7 +744,7 @@ async fn apply_client_operation(
} => {
if !client
.fs
- .directories()
+ .directories(false)
.contains(&path.parent().unwrap().to_owned())
{
return Err(TestError::Inapplicable);
@@ -770,10 +771,16 @@ async fn apply_client_operation(
repo_path,
contents,
} => {
- if !client.fs.directories().contains(&repo_path) {
+ if !client.fs.directories(false).contains(&repo_path) {
return Err(TestError::Inapplicable);
}
+ for (path, _) in contents.iter() {
+ if !client.fs.files().contains(&repo_path.join(path)) {
+ return Err(TestError::Inapplicable);
+ }
+ }
+
log::info!(
"{}: writing git index for repo {:?}: {:?}",
client.username,
@@ -789,13 +796,13 @@ async fn apply_client_operation(
if client.fs.metadata(&dot_git_dir).await?.is_none() {
client.fs.create_dir(&dot_git_dir).await?;
}
- client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
+ client.fs.set_index_for_repo(&dot_git_dir, &contents);
}
GitOperation::WriteGitBranch {
repo_path,
new_branch,
} => {
- if !client.fs.directories().contains(&repo_path) {
+ if !client.fs.directories(false).contains(&repo_path) {
return Err(TestError::Inapplicable);
}
@@ -810,15 +817,21 @@ async fn apply_client_operation(
if client.fs.metadata(&dot_git_dir).await?.is_none() {
client.fs.create_dir(&dot_git_dir).await?;
}
- client.fs.set_branch_name(&dot_git_dir, new_branch).await;
+ client.fs.set_branch_name(&dot_git_dir, new_branch);
}
GitOperation::WriteGitStatuses {
repo_path,
statuses,
+ git_operation,
} => {
- if !client.fs.directories().contains(&repo_path) {
+ if !client.fs.directories(false).contains(&repo_path) {
return Err(TestError::Inapplicable);
}
+ for (path, _) in statuses.iter() {
+ if !client.fs.files().contains(&repo_path.join(path)) {
+ return Err(TestError::Inapplicable);
+ }
+ }
log::info!(
"{}: writing git statuses for repo {:?}: {:?}",
@@ -838,10 +851,16 @@ async fn apply_client_operation(
client.fs.create_dir(&dot_git_dir).await?;
}
- client
- .fs
- .set_status_for_repo(&dot_git_dir, statuses.as_slice())
- .await;
+ if git_operation {
+ client
+ .fs
+ .set_status_for_repo_via_git_operation(&dot_git_dir, statuses.as_slice());
+ } else {
+ client.fs.set_status_for_repo_via_working_copy_change(
+ &dot_git_dir,
+ statuses.as_slice(),
+ );
+ }
}
},
}
@@ -913,9 +932,10 @@ fn check_consistency_between_clients(clients: &[(Rc<TestClient>, TestAppContext)
assert_eq!(
guest_snapshot.entries(false).collect::<Vec<_>>(),
host_snapshot.entries(false).collect::<Vec<_>>(),
- "{} has different snapshot than the host for worktree {:?} and project {:?}",
+ "{} has different snapshot than the host for worktree {:?} ({:?}) and project {:?}",
client.username,
host_snapshot.abs_path(),
+ id,
guest_project.remote_id(),
);
assert_eq!(guest_snapshot.repositories().collect::<Vec<_>>(), host_snapshot.repositories().collect::<Vec<_>>(),
@@ -1230,6 +1250,7 @@ enum GitOperation {
WriteGitStatuses {
repo_path: PathBuf,
statuses: Vec<(PathBuf, GitFileStatus)>,
+ git_operation: bool,
},
}
@@ -1575,7 +1596,7 @@ impl TestPlan {
.choose(&mut self.rng)
.cloned() else { continue };
let project_root_name = root_name_for_project(&project, cx);
- let mut paths = client.fs.paths();
+ let mut paths = client.fs.paths(false);
paths.remove(0);
let new_root_path = if paths.is_empty() || self.rng.gen() {
Path::new("/").join(&self.next_root_dir_name(user_id))
@@ -1755,7 +1776,7 @@ impl TestPlan {
let is_dir = self.rng.gen::<bool>();
let content;
let mut path;
- let dir_paths = client.fs.directories();
+ let dir_paths = client.fs.directories(false);
if is_dir {
content = String::new();
@@ -1809,7 +1830,7 @@ impl TestPlan {
let repo_path = client
.fs
- .directories()
+ .directories(false)
.choose(&mut self.rng)
.unwrap()
.clone();
@@ -1855,9 +1876,12 @@ impl TestPlan {
})
.collect::<Vec<_>>();
+ let git_operation = self.rng.gen::<bool>();
+
GitOperation::WriteGitStatuses {
repo_path,
statuses,
+ git_operation,
}
}
_ => unreachable!(),
@@ -14,6 +14,8 @@ lsp = { path = "../lsp" }
rope = { path = "../rope" }
util = { path = "../util" }
sum_tree = { path = "../sum_tree" }
+rpc = { path = "../rpc" }
+
anyhow.workspace = true
async-trait.workspace = true
futures.workspace = true
@@ -29,6 +29,8 @@ use collections::{btree_map, BTreeMap};
#[cfg(any(test, feature = "test-support"))]
use repository::{FakeGitRepositoryState, GitFileStatus};
#[cfg(any(test, feature = "test-support"))]
+use std::ffi::OsStr;
+#[cfg(any(test, feature = "test-support"))]
use std::sync::Weak;
lazy_static! {
@@ -501,6 +503,11 @@ impl FakeFsState {
}
}
+#[cfg(any(test, feature = "test-support"))]
+lazy_static! {
+ pub static ref FS_DOT_GIT: &'static OsStr = OsStr::new(".git");
+}
+
#[cfg(any(test, feature = "test-support"))]
impl FakeFs {
pub fn new(executor: Arc<gpui::executor::Background>) -> Arc<Self> {
@@ -619,7 +626,7 @@ impl FakeFs {
.boxed()
}
- pub fn with_git_state<F>(&self, dot_git: &Path, f: F)
+ pub fn with_git_state<F>(&self, dot_git: &Path, emit_git_event: bool, f: F)
where
F: FnOnce(&mut FakeGitRepositoryState),
{
@@ -633,18 +640,22 @@ impl FakeFs {
f(&mut repo_state);
- state.emit_event([dot_git]);
+ if emit_git_event {
+ state.emit_event([dot_git]);
+ }
} else {
panic!("not a directory");
}
}
- pub async fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
- self.with_git_state(dot_git, |state| state.branch_name = branch.map(Into::into))
+ pub fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
+ self.with_git_state(dot_git, true, |state| {
+ state.branch_name = branch.map(Into::into)
+ })
}
- pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
- self.with_git_state(dot_git, |state| {
+ pub fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
+ self.with_git_state(dot_git, true, |state| {
state.index_contents.clear();
state.index_contents.extend(
head_state
@@ -654,8 +665,32 @@ impl FakeFs {
});
}
- pub async fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, GitFileStatus)]) {
- self.with_git_state(dot_git, |state| {
+ pub fn set_status_for_repo_via_working_copy_change(
+ &self,
+ dot_git: &Path,
+ statuses: &[(&Path, GitFileStatus)],
+ ) {
+ self.with_git_state(dot_git, false, |state| {
+ state.worktree_statuses.clear();
+ state.worktree_statuses.extend(
+ statuses
+ .iter()
+ .map(|(path, content)| ((**path).into(), content.clone())),
+ );
+ });
+ self.state.lock().emit_event(
+ statuses
+ .iter()
+ .map(|(path, _)| dot_git.parent().unwrap().join(path)),
+ );
+ }
+
+ pub fn set_status_for_repo_via_git_operation(
+ &self,
+ dot_git: &Path,
+ statuses: &[(&Path, GitFileStatus)],
+ ) {
+ self.with_git_state(dot_git, true, |state| {
state.worktree_statuses.clear();
state.worktree_statuses.extend(
statuses
@@ -665,7 +700,7 @@ impl FakeFs {
});
}
- pub fn paths(&self) -> Vec<PathBuf> {
+ pub fn paths(&self, include_dot_git: bool) -> Vec<PathBuf> {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
@@ -675,12 +710,18 @@ impl FakeFs {
queue.push_back((path.join(name), entry.clone()));
}
}
- result.push(path);
+ if include_dot_git
+ || !path
+ .components()
+ .any(|component| component.as_os_str() == *FS_DOT_GIT)
+ {
+ result.push(path);
+ }
}
result
}
- pub fn directories(&self) -> Vec<PathBuf> {
+ pub fn directories(&self, include_dot_git: bool) -> Vec<PathBuf> {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
@@ -689,7 +730,13 @@ impl FakeFs {
for (name, entry) in entries {
queue.push_back((path.join(name), entry.clone()));
}
- result.push(path);
+ if include_dot_git
+ || !path
+ .components()
+ .any(|component| component.as_os_str() == *FS_DOT_GIT)
+ {
+ result.push(path);
+ }
}
}
result
@@ -1,6 +1,8 @@
use anyhow::Result;
use collections::HashMap;
+use git2::ErrorCode;
use parking_lot::Mutex;
+use rpc::proto;
use serde_derive::{Deserialize, Serialize};
use std::{
cmp::Ordering,
@@ -24,7 +26,7 @@ pub trait GitRepository: Send {
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
- fn status(&self, path: &RepoPath) -> Option<GitFileStatus>;
+ fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>>;
}
impl std::fmt::Debug for dyn GitRepository {
@@ -91,9 +93,18 @@ impl GitRepository for LibGitRepository {
Some(map)
}
- fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
- let status = self.status_file(path).log_err()?;
- read_status(status)
+ fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>> {
+ let status = self.status_file(path);
+ match status {
+ Ok(status) => Ok(read_status(status)),
+ Err(e) => {
+ if e.code() == ErrorCode::NotFound {
+ Ok(None)
+ } else {
+ Err(e.into())
+ }
+ }
+ }
}
}
@@ -155,9 +166,9 @@ impl GitRepository for FakeGitRepository {
Some(map)
}
- fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
+ fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>> {
let state = self.state.lock();
- state.worktree_statuses.get(path).cloned()
+ Ok(state.worktree_statuses.get(path).cloned())
}
}
@@ -197,8 +208,51 @@ pub enum GitFileStatus {
Conflict,
}
+impl GitFileStatus {
+ pub fn merge(
+ this: Option<GitFileStatus>,
+ other: Option<GitFileStatus>,
+ prefer_other: bool,
+ ) -> Option<GitFileStatus> {
+ if prefer_other {
+ return other;
+ } else {
+ match (this, other) {
+ (Some(GitFileStatus::Conflict), _) | (_, Some(GitFileStatus::Conflict)) => {
+ Some(GitFileStatus::Conflict)
+ }
+ (Some(GitFileStatus::Modified), _) | (_, Some(GitFileStatus::Modified)) => {
+ Some(GitFileStatus::Modified)
+ }
+ (Some(GitFileStatus::Added), _) | (_, Some(GitFileStatus::Added)) => {
+ Some(GitFileStatus::Added)
+ }
+ _ => None,
+ }
+ }
+ }
+
+ pub fn from_proto(git_status: Option<i32>) -> Option<GitFileStatus> {
+ git_status.and_then(|status| {
+ proto::GitStatus::from_i32(status).map(|status| match status {
+ proto::GitStatus::Added => GitFileStatus::Added,
+ proto::GitStatus::Modified => GitFileStatus::Modified,
+ proto::GitStatus::Conflict => GitFileStatus::Conflict,
+ })
+ })
+ }
+
+ pub fn to_proto(self) -> i32 {
+ match self {
+ GitFileStatus::Added => proto::GitStatus::Added as i32,
+ GitFileStatus::Modified => proto::GitStatus::Modified as i32,
+ GitFileStatus::Conflict => proto::GitStatus::Conflict as i32,
+ }
+ }
+}
+
#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
-pub struct RepoPath(PathBuf);
+pub struct RepoPath(pub PathBuf);
impl RepoPath {
pub fn new(path: PathBuf) -> Self {
@@ -37,8 +37,8 @@ use language::{
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch,
- PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16,
- Transaction, Unclipped,
+ PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
+ Unclipped,
};
use log::error;
use lsp::{
@@ -69,7 +69,7 @@ use std::{
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
},
- time::{Duration, Instant, SystemTime},
+ time::{Duration, Instant},
};
use terminals::Terminals;
use util::{
@@ -1618,7 +1618,7 @@ impl Project {
&self,
buffer: ModelHandle<Buffer>,
cx: &mut ModelContext<Self>,
- ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
+ ) -> Task<Result<()>> {
let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
return Task::ready(Err(anyhow!("buffer doesn't have a file")));
};
@@ -5161,9 +5161,9 @@ impl Project {
return None;
}
let path = &project_path.path;
- changed_repos.iter().find(|(work_dir, change)| {
- path.starts_with(work_dir) && change.git_dir_changed
- })?;
+ changed_repos
+ .iter()
+ .find(|(work_dir, _)| path.starts_with(work_dir))?;
let receiver = receiver.clone();
let path = path.clone();
Some(async move {
@@ -5186,9 +5186,9 @@ impl Project {
return None;
}
let path = file.path();
- changed_repos.iter().find(|(work_dir, change)| {
- path.starts_with(work_dir) && change.git_dir_changed
- })?;
+ changed_repos
+ .iter()
+ .find(|(work_dir, _)| path.starts_with(work_dir))?;
Some((buffer, path.clone()))
})
.collect::<Vec<_>>();
@@ -5989,16 +5989,15 @@ impl Project {
.await?;
let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
- let (saved_version, fingerprint, mtime) = this
- .update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
+ this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
.await?;
- Ok(proto::BufferSaved {
+ Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
project_id,
buffer_id,
- version: serialize_version(&saved_version),
- mtime: Some(mtime.into()),
- fingerprint: language::proto::serialize_fingerprint(fingerprint),
- })
+ version: serialize_version(buffer.saved_version()),
+ mtime: Some(buffer.saved_mtime().into()),
+ fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
+ }))
}
async fn handle_reload_buffers(
@@ -7,7 +7,7 @@ use client::{proto, Client};
use clock::ReplicaId;
use collections::{HashMap, VecDeque};
use fs::{
- repository::{GitFileStatus, GitRepository, RepoPath, RepoPathDescendants},
+ repository::{GitFileStatus, GitRepository, RepoPath},
Fs, LineEnding,
};
use futures::{
@@ -45,7 +45,7 @@ use std::{
fmt,
future::Future,
mem,
- ops::{Deref, DerefMut},
+ ops::{AddAssign, Deref, DerefMut, Sub},
path::{Path, PathBuf},
pin::Pin,
sync::{
@@ -55,7 +55,7 @@ use std::{
time::{Duration, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt, TakeUntilExt};
+use util::{paths::HOME, ResultExt};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize);
@@ -124,15 +124,6 @@ pub struct Snapshot {
pub struct RepositoryEntry {
pub(crate) work_directory: WorkDirectoryEntry,
pub(crate) branch: Option<Arc<str>>,
- pub(crate) statuses: TreeMap<RepoPath, GitFileStatus>,
-}
-
-fn read_git_status(git_status: i32) -> Option<GitFileStatus> {
- proto::GitStatus::from_i32(git_status).map(|status| match status {
- proto::GitStatus::Added => GitFileStatus::Added,
- proto::GitStatus::Modified => GitFileStatus::Modified,
- proto::GitStatus::Conflict => GitFileStatus::Conflict,
- })
}
impl RepositoryEntry {
@@ -150,115 +141,19 @@ impl RepositoryEntry {
.map(|entry| RepositoryWorkDirectory(entry.path.clone()))
}
- pub fn status_for_path(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
- self.work_directory
- .relativize(snapshot, path)
- .and_then(|repo_path| {
- self.statuses
- .iter_from(&repo_path)
- .take_while(|(key, _)| key.starts_with(&repo_path))
- // Short circuit once we've found the highest level
- .take_until(|(_, status)| status == &&GitFileStatus::Conflict)
- .map(|(_, status)| status)
- .reduce(
- |status_first, status_second| match (status_first, status_second) {
- (GitFileStatus::Conflict, _) | (_, GitFileStatus::Conflict) => {
- &GitFileStatus::Conflict
- }
- (GitFileStatus::Modified, _) | (_, GitFileStatus::Modified) => {
- &GitFileStatus::Modified
- }
- _ => &GitFileStatus::Added,
- },
- )
- .copied()
- })
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
- self.work_directory
- .relativize(snapshot, path)
- .and_then(|repo_path| (&self.statuses).get(&repo_path))
- .cloned()
- }
-
- pub fn build_update(&self, other: &Self) -> proto::RepositoryEntry {
- let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
- let mut removed_statuses: Vec<String> = Vec::new();
-
- let mut self_statuses = self.statuses.iter().peekable();
- let mut other_statuses = other.statuses.iter().peekable();
- loop {
- match (self_statuses.peek(), other_statuses.peek()) {
- (Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => {
- match Ord::cmp(self_repo_path, other_repo_path) {
- Ordering::Less => {
- updated_statuses.push(make_status_entry(self_repo_path, self_status));
- self_statuses.next();
- }
- Ordering::Equal => {
- if self_status != other_status {
- updated_statuses
- .push(make_status_entry(self_repo_path, self_status));
- }
-
- self_statuses.next();
- other_statuses.next();
- }
- Ordering::Greater => {
- removed_statuses.push(make_repo_path(other_repo_path));
- other_statuses.next();
- }
- }
- }
- (Some((self_repo_path, self_status)), None) => {
- updated_statuses.push(make_status_entry(self_repo_path, self_status));
- self_statuses.next();
- }
- (None, Some((other_repo_path, _))) => {
- removed_statuses.push(make_repo_path(other_repo_path));
- other_statuses.next();
- }
- (None, None) => break,
- }
- }
-
+ pub fn build_update(&self, _: &Self) -> proto::RepositoryEntry {
proto::RepositoryEntry {
work_directory_id: self.work_directory_id().to_proto(),
branch: self.branch.as_ref().map(|str| str.to_string()),
- removed_repo_paths: removed_statuses,
- updated_statuses,
}
}
}
-fn make_repo_path(path: &RepoPath) -> String {
- path.as_os_str().to_string_lossy().to_string()
-}
-
-fn make_status_entry(path: &RepoPath, status: &GitFileStatus) -> proto::StatusEntry {
- proto::StatusEntry {
- repo_path: make_repo_path(path),
- status: match status {
- GitFileStatus::Added => proto::GitStatus::Added.into(),
- GitFileStatus::Modified => proto::GitStatus::Modified.into(),
- GitFileStatus::Conflict => proto::GitStatus::Conflict.into(),
- },
- }
-}
-
impl From<&RepositoryEntry> for proto::RepositoryEntry {
fn from(value: &RepositoryEntry) -> Self {
proto::RepositoryEntry {
work_directory_id: value.work_directory.to_proto(),
branch: value.branch.as_ref().map(|str| str.to_string()),
- updated_statuses: value
- .statuses
- .iter()
- .map(|(repo_path, status)| make_status_entry(repo_path, status))
- .collect(),
- removed_repo_paths: Default::default(),
}
}
}
@@ -330,7 +225,6 @@ pub struct BackgroundScannerState {
#[derive(Debug, Clone)]
pub struct LocalRepositoryEntry {
- pub(crate) work_dir_scan_id: usize,
pub(crate) git_dir_scan_id: usize,
pub(crate) repo_ptr: Arc<Mutex<dyn GitRepository>>,
/// Path to the actual .git folder.
@@ -864,18 +758,13 @@ impl LocalWorktree {
entry.path.clone(),
GitRepositoryChange {
old_repository: None,
- git_dir_changed: true,
},
));
}
new_repos.next();
}
Ordering::Equal => {
- let git_dir_changed =
- new_repo.git_dir_scan_id != old_repo.git_dir_scan_id;
- let work_dir_changed =
- new_repo.work_dir_scan_id != old_repo.work_dir_scan_id;
- if git_dir_changed || work_dir_changed {
+ if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id {
if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
let old_repo = old_snapshot
.repository_entries
@@ -885,7 +774,6 @@ impl LocalWorktree {
entry.path.clone(),
GitRepositoryChange {
old_repository: old_repo,
- git_dir_changed,
},
));
}
@@ -903,7 +791,6 @@ impl LocalWorktree {
entry.path.clone(),
GitRepositoryChange {
old_repository: old_repo,
- git_dir_changed: true,
},
));
}
@@ -917,7 +804,6 @@ impl LocalWorktree {
entry.path.clone(),
GitRepositoryChange {
old_repository: None,
- git_dir_changed: true,
},
));
}
@@ -933,7 +819,6 @@ impl LocalWorktree {
entry.path.clone(),
GitRepositoryChange {
old_repository: old_repo,
- git_dir_changed: true,
},
));
}
@@ -1038,7 +923,7 @@ impl LocalWorktree {
path: Arc<Path>,
has_changed_file: bool,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
+ ) -> Task<Result<()>> {
let handle = cx.handle();
let buffer = buffer_handle.read(cx);
@@ -1094,7 +979,7 @@ impl LocalWorktree {
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
});
- Ok((version, fingerprint, entry.mtime))
+ Ok(())
})
}
@@ -1405,7 +1290,7 @@ impl RemoteWorktree {
&self,
buffer_handle: ModelHandle<Buffer>,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
+ ) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx);
let buffer_id = buffer.remote_id();
let version = buffer.version();
@@ -1430,7 +1315,7 @@ impl RemoteWorktree {
buffer.did_save(version.clone(), fingerprint, mtime, cx);
});
- Ok((version, fingerprint, mtime))
+ Ok(())
})
}
@@ -1574,7 +1459,7 @@ impl Snapshot {
fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option<Arc<Path>> {
let removed_entry = self.entries_by_id.remove(&entry_id, &())?;
self.entries_by_path = {
- let mut cursor = self.entries_by_path.cursor();
+ let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
let mut new_entries_by_path =
cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
while let Some(entry) = cursor.item() {
@@ -1592,6 +1477,14 @@ impl Snapshot {
Some(removed_entry.path)
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn status_for_file(&self, path: impl Into<PathBuf>) -> Option<GitFileStatus> {
+ let path = path.into();
+ self.entries_by_path
+ .get(&PathKey(Arc::from(path)), &())
+ .and_then(|entry| entry.git_status)
+ }
+
pub(crate) fn apply_remote_update(&mut self, mut update: proto::UpdateWorktree) -> Result<()> {
let mut entries_by_path_edits = Vec::new();
let mut entries_by_id_edits = Vec::new();
@@ -1643,26 +1536,10 @@ impl Snapshot {
ProjectEntryId::from_proto(repository.work_directory_id).into();
if let Some(entry) = self.entry_for_id(*work_directory_entry) {
- let mut statuses = TreeMap::default();
- for status_entry in repository.updated_statuses {
- let Some(git_file_status) = read_git_status(status_entry.status) else {
- continue;
- };
-
- let repo_path = RepoPath::new(status_entry.repo_path.into());
- statuses.insert(repo_path, git_file_status);
- }
-
let work_directory = RepositoryWorkDirectory(entry.path.clone());
if self.repository_entries.get(&work_directory).is_some() {
self.repository_entries.update(&work_directory, |repo| {
repo.branch = repository.branch.map(Into::into);
- repo.statuses.insert_tree(statuses);
-
- for repo_path in repository.removed_repo_paths {
- let repo_path = RepoPath::new(repo_path.into());
- repo.statuses.remove(&repo_path);
- }
});
} else {
self.repository_entries.insert(
@@ -1670,7 +1547,6 @@ impl Snapshot {
RepositoryEntry {
work_directory: work_directory_entry,
branch: repository.branch.map(Into::into),
- statuses,
},
)
}
@@ -1799,6 +1675,64 @@ impl Snapshot {
})
}
+ /// Update the `git_status` of the given entries such that files'
+ /// statuses bubble up to their ancestor directories.
+ pub fn propagate_git_statuses(&self, result: &mut [Entry]) {
+ let mut cursor = self
+ .entries_by_path
+ .cursor::<(TraversalProgress, GitStatuses)>();
+ let mut entry_stack = Vec::<(usize, GitStatuses)>::new();
+
+ let mut result_ix = 0;
+ loop {
+ let next_entry = result.get(result_ix);
+ let containing_entry = entry_stack.last().map(|(ix, _)| &result[*ix]);
+
+ let entry_to_finish = match (containing_entry, next_entry) {
+ (Some(_), None) => entry_stack.pop(),
+ (Some(containing_entry), Some(next_path)) => {
+ if !next_path.path.starts_with(&containing_entry.path) {
+ entry_stack.pop()
+ } else {
+ None
+ }
+ }
+ (None, Some(_)) => None,
+ (None, None) => break,
+ };
+
+ if let Some((entry_ix, prev_statuses)) = entry_to_finish {
+ cursor.seek_forward(
+ &TraversalTarget::PathSuccessor(&result[entry_ix].path),
+ Bias::Left,
+ &(),
+ );
+
+ let statuses = cursor.start().1 - prev_statuses;
+
+ result[entry_ix].git_status = if statuses.conflict > 0 {
+ Some(GitFileStatus::Conflict)
+ } else if statuses.modified > 0 {
+ Some(GitFileStatus::Modified)
+ } else if statuses.added > 0 {
+ Some(GitFileStatus::Added)
+ } else {
+ None
+ };
+ } else {
+ if result[result_ix].is_dir() {
+ cursor.seek_forward(
+ &TraversalTarget::Path(&result[result_ix].path),
+ Bias::Left,
+ &(),
+ );
+ entry_stack.push((result_ix, cursor.start().1));
+ }
+ result_ix += 1;
+ }
+ }
+ }
+
pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
let empty_path = Path::new("");
self.entries_by_path
@@ -1895,6 +1829,14 @@ impl LocalSnapshot {
self.git_repositories.get(&repo.work_directory.0)
}
+ pub(crate) fn local_repo_for_path(
+ &self,
+ path: &Path,
+ ) -> Option<(RepositoryWorkDirectory, &LocalRepositoryEntry)> {
+ let (path, repo) = self.repository_and_work_directory_for_path(path)?;
+ Some((path, self.git_repositories.get(&repo.work_directory_id())?))
+ }
+
pub(crate) fn repo_for_metadata(
&self,
path: &Path,
@@ -2039,7 +1981,8 @@ impl LocalSnapshot {
entry
}
- fn build_repo(&mut self, parent_path: Arc<Path>, fs: &dyn Fs) -> Option<()> {
+ #[must_use = "Changed paths must be used for diffing later"]
+ fn build_repo(&mut self, parent_path: Arc<Path>, fs: &dyn Fs) -> Option<Vec<Arc<Path>>> {
let abs_path = self.abs_path.join(&parent_path);
let work_dir: Arc<Path> = parent_path.parent().unwrap().into();
@@ -2056,35 +1999,67 @@ impl LocalSnapshot {
.entry_for_path(work_dir.clone())
.map(|entry| entry.id)?;
- if self.git_repositories.get(&work_dir_id).is_none() {
- let repo = fs.open_repo(abs_path.as_path())?;
- let work_directory = RepositoryWorkDirectory(work_dir.clone());
- let scan_id = self.scan_id;
+ if self.git_repositories.get(&work_dir_id).is_some() {
+ return None;
+ }
- let repo_lock = repo.lock();
+ let repo = fs.open_repo(abs_path.as_path())?;
+ let work_directory = RepositoryWorkDirectory(work_dir.clone());
- self.repository_entries.insert(
- work_directory,
- RepositoryEntry {
- work_directory: work_dir_id.into(),
- branch: repo_lock.branch_name().map(Into::into),
- statuses: repo_lock.statuses().unwrap_or_default(),
- },
- );
- drop(repo_lock);
-
- self.git_repositories.insert(
- work_dir_id,
- LocalRepositoryEntry {
- work_dir_scan_id: scan_id,
- git_dir_scan_id: scan_id,
- repo_ptr: repo,
- git_dir_path: parent_path.clone(),
- },
- )
+ let repo_lock = repo.lock();
+
+ self.repository_entries.insert(
+ work_directory.clone(),
+ RepositoryEntry {
+ work_directory: work_dir_id.into(),
+ branch: repo_lock.branch_name().map(Into::into),
+ },
+ );
+
+ let changed_paths = self.scan_statuses(repo_lock.deref(), &work_directory);
+
+ drop(repo_lock);
+
+ self.git_repositories.insert(
+ work_dir_id,
+ LocalRepositoryEntry {
+ git_dir_scan_id: 0,
+ repo_ptr: repo,
+ git_dir_path: parent_path.clone(),
+ },
+ );
+
+ Some(changed_paths)
+ }
+
+ #[must_use = "Changed paths must be used for diffing later"]
+ fn scan_statuses(
+ &mut self,
+ repo_ptr: &dyn GitRepository,
+ work_directory: &RepositoryWorkDirectory,
+ ) -> Vec<Arc<Path>> {
+ let mut changes = vec![];
+ let mut edits = vec![];
+ for mut entry in self
+ .descendent_entries(false, false, &work_directory.0)
+ .cloned()
+ {
+ let Ok(repo_path) = entry.path.strip_prefix(&work_directory.0) else {
+ continue;
+ };
+ let git_file_status = repo_ptr
+ .status(&RepoPath(repo_path.into()))
+ .log_err()
+ .flatten();
+ if entry.git_status != git_file_status {
+ entry.git_status = git_file_status;
+ changes.push(entry.path.clone());
+ edits.push(Edit::Insert(entry));
+ }
}
- Some(())
+ self.entries_by_path.edit(edits, &());
+ changes
}
fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
@@ -2139,13 +2114,14 @@ impl BackgroundScannerState {
self.snapshot.insert_entry(entry, fs)
}
+ #[must_use = "Changed paths must be used for diffing later"]
fn populate_dir(
&mut self,
parent_path: Arc<Path>,
entries: impl IntoIterator<Item = Entry>,
ignore: Option<Arc<Gitignore>>,
fs: &dyn Fs,
- ) {
+ ) -> Option<Vec<Arc<Path>>> {
let mut parent_entry = if let Some(parent_entry) = self
.snapshot
.entries_by_path
@@ -2157,7 +2133,7 @@ impl BackgroundScannerState {
"populating a directory {:?} that has been removed",
parent_path
);
- return;
+ return None;
};
match parent_entry.kind {
@@ -2165,7 +2141,7 @@ impl BackgroundScannerState {
parent_entry.kind = EntryKind::Dir;
}
EntryKind::Dir => {}
- _ => return,
+ _ => return None,
}
if let Some(ignore) = ignore {
@@ -2175,10 +2151,6 @@ impl BackgroundScannerState {
.insert(abs_parent_path, (ignore, false));
}
- if parent_path.file_name() == Some(&DOT_GIT) {
- self.snapshot.build_repo(parent_path, fs);
- }
-
let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
let mut entries_by_id_edits = Vec::new();
@@ -2197,6 +2169,11 @@ impl BackgroundScannerState {
.entries_by_path
.edit(entries_by_path_edits, &());
self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
+
+ if parent_path.file_name() == Some(&DOT_GIT) {
+ return self.snapshot.build_repo(parent_path, fs);
+ }
+ None
}
fn remove_path(&mut self, path: &Path) {
@@ -2518,6 +2495,7 @@ pub struct Entry {
pub mtime: SystemTime,
pub is_symlink: bool,
pub is_ignored: bool,
+ pub git_status: Option<GitFileStatus>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@@ -2546,9 +2524,6 @@ pub enum PathChange {
pub struct GitRepositoryChange {
/// The previous state of the repository, if it already existed.
pub old_repository: Option<RepositoryEntry>,
- /// Whether the content of the .git directory changed. This will be false
- /// if only the repository's work directory changed.
- pub git_dir_changed: bool,
}
pub type UpdatedEntriesSet = Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>;
@@ -2573,6 +2548,7 @@ impl Entry {
mtime: metadata.mtime,
is_symlink: metadata.is_symlink,
is_ignored: false,
+ git_status: None,
}
}
@@ -2583,6 +2559,10 @@ impl Entry {
pub fn is_file(&self) -> bool {
matches!(self.kind, EntryKind::File(_))
}
+
+ pub fn git_status(&self) -> Option<GitFileStatus> {
+ self.git_status
+ }
}
impl sum_tree::Item for Entry {
@@ -2600,12 +2580,23 @@ impl sum_tree::Item for Entry {
visible_file_count = 0;
}
+ let mut statuses = GitStatuses::default();
+ match self.git_status {
+ Some(status) => match status {
+ GitFileStatus::Added => statuses.added = 1,
+ GitFileStatus::Modified => statuses.modified = 1,
+ GitFileStatus::Conflict => statuses.conflict = 1,
+ },
+ None => {}
+ }
+
EntrySummary {
max_path: self.path.clone(),
count: 1,
visible_count,
file_count,
visible_file_count,
+ statuses,
}
}
}
@@ -2625,6 +2616,7 @@ pub struct EntrySummary {
visible_count: usize,
file_count: usize,
visible_file_count: usize,
+ statuses: GitStatuses,
}
impl Default for EntrySummary {
@@ -2635,6 +2627,7 @@ impl Default for EntrySummary {
visible_count: 0,
file_count: 0,
visible_file_count: 0,
+ statuses: Default::default(),
}
}
}
@@ -2648,6 +2641,7 @@ impl sum_tree::Summary for EntrySummary {
self.visible_count += rhs.visible_count;
self.file_count += rhs.file_count;
self.visible_file_count += rhs.visible_file_count;
+ self.statuses += rhs.statuses;
}
}
@@ -2807,6 +2801,7 @@ impl BackgroundScanner {
let mut state = self.state.lock();
state.snapshot.completed_scan_id = state.snapshot.scan_id;
}
+
self.send_status_update(false, None);
// Process any any FS events that occurred while performing the initial scan.
@@ -2862,14 +2857,16 @@ impl BackgroundScanner {
self.update_ignore_statuses().await;
{
- let mut snapshot = &mut self.state.lock().snapshot;
+ let mut state = self.state.lock();
if let Some(paths) = paths {
for path in paths {
- self.reload_repo_for_file_path(&path, &mut *snapshot, self.fs.as_ref());
+ self.reload_git_repo(&path, &mut *state, self.fs.as_ref());
}
}
+ let mut snapshot = &mut state.snapshot;
+
let mut git_repositories = mem::take(&mut snapshot.git_repositories);
git_repositories.retain(|work_directory_id, _| {
snapshot
@@ -2993,14 +2990,18 @@ impl BackgroundScanner {
let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
let mut ignore_stack = job.ignore_stack.clone();
let mut new_ignore = None;
- let (root_abs_path, root_char_bag, next_entry_id) = {
+ let (root_abs_path, root_char_bag, next_entry_id, repository) = {
let snapshot = &self.state.lock().snapshot;
(
snapshot.abs_path().clone(),
snapshot.root_char_bag,
self.next_entry_id.clone(),
+ snapshot
+ .local_repo_for_path(&job.path)
+ .map(|(work_dir, repo)| (work_dir, repo.clone())),
)
};
+
let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
while let Some(child_abs_path) = child_paths.next().await {
let child_abs_path: Arc<Path> = match child_abs_path {
@@ -3093,6 +3094,18 @@ impl BackgroundScanner {
}
} else {
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
+ if !child_entry.is_ignored {
+ if let Some((repo_path, repo)) = &repository {
+ if let Ok(path) = child_path.strip_prefix(&repo_path.0) {
+ child_entry.git_status = repo
+ .repo_ptr
+ .lock()
+ .status(&RepoPath(path.into()))
+ .log_err()
+ .flatten();
+ }
+ }
+ }
}
new_entries.push(child_entry);
@@ -3100,10 +3113,19 @@ impl BackgroundScanner {
{
let mut state = self.state.lock();
- state.populate_dir(job.path.clone(), new_entries, new_ignore, self.fs.as_ref());
+ let changed_paths =
+ state.populate_dir(job.path.clone(), new_entries, new_ignore, self.fs.as_ref());
if let Err(ix) = state.changed_paths.binary_search(&job.path) {
state.changed_paths.insert(ix, job.path.clone());
}
+ if let Some(changed_paths) = changed_paths {
+ util::extend_sorted(
+ &mut state.changed_paths,
+ changed_paths,
+ usize::MAX,
+ Ord::cmp,
+ )
+ }
}
for new_job in new_jobs {
@@ -3147,12 +3169,14 @@ impl BackgroundScanner {
// refreshed. Do this before adding any new entries, so that renames can be
// detected regardless of the order of the paths.
let mut event_paths = Vec::<Arc<Path>>::with_capacity(abs_paths.len());
+ let mut event_metadata = Vec::<_>::with_capacity(abs_paths.len());
for (abs_path, metadata) in abs_paths.iter().zip(metadata.iter()) {
if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
if matches!(metadata, Ok(None)) || doing_recursive_update {
state.remove_path(path);
}
event_paths.push(path.into());
+ event_metadata.push(metadata);
} else {
log::error!(
"unexpected event {:?} for root path {:?}",
@@ -3162,7 +3186,7 @@ impl BackgroundScanner {
}
}
- for (path, metadata) in event_paths.iter().cloned().zip(metadata.into_iter()) {
+ for (path, metadata) in event_paths.iter().cloned().zip(event_metadata.into_iter()) {
let abs_path: Arc<Path> = root_abs_path.join(&path).into();
match metadata {
@@ -3170,6 +3194,7 @@ impl BackgroundScanner {
let ignore_stack = state
.snapshot
.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
+
let mut fs_entry = Entry::new(
path.clone(),
&metadata,
@@ -3177,6 +3202,24 @@ impl BackgroundScanner {
state.snapshot.root_char_bag,
);
fs_entry.is_ignored = ignore_stack.is_all();
+
+ if !fs_entry.is_ignored {
+ if !fs_entry.is_dir() {
+ if let Some((work_dir, repo)) =
+ state.snapshot.local_repo_for_path(&path)
+ {
+ if let Ok(path) = path.strip_prefix(work_dir.0) {
+ fs_entry.git_status = repo
+ .repo_ptr
+ .lock()
+ .status(&RepoPath(path.into()))
+ .log_err()
+ .flatten()
+ }
+ }
+ }
+ }
+
state.insert_entry(fs_entry, self.fs.as_ref());
if let Some(scan_queue_tx) = &scan_queue_tx {
@@ -3219,8 +3262,6 @@ impl BackgroundScanner {
.components()
.any(|component| component.as_os_str() == *DOT_GIT)
{
- let scan_id = snapshot.scan_id;
-
if let Some(repository) = snapshot.repository_for_work_directory(path) {
let entry = repository.work_directory.0;
snapshot.git_repositories.remove(&entry);
@@ -3230,113 +3271,80 @@ impl BackgroundScanner {
.remove(&RepositoryWorkDirectory(path.into()));
return Some(());
}
-
- let repo = snapshot.repository_for_path(&path)?;
- let repo_path = repo.work_directory.relativize(&snapshot, &path)?;
- let work_dir = repo.work_directory(snapshot)?;
- let work_dir_id = repo.work_directory;
-
- snapshot
- .git_repositories
- .update(&work_dir_id, |entry| entry.work_dir_scan_id = scan_id);
-
- snapshot.repository_entries.update(&work_dir, |entry| {
- entry
- .statuses
- .remove_range(&repo_path, &RepoPathDescendants(&repo_path))
- });
}
+ // TODO statuses
+ // Track when a .git is removed and iterate over the file system there
+
Some(())
}
- fn reload_repo_for_file_path(
+ fn reload_git_repo(
&self,
path: &Path,
- snapshot: &mut LocalSnapshot,
+ state: &mut BackgroundScannerState,
fs: &dyn Fs,
) -> Option<()> {
- let scan_id = snapshot.scan_id;
+ let scan_id = state.snapshot.scan_id;
if path
.components()
.any(|component| component.as_os_str() == *DOT_GIT)
{
let (entry_id, repo_ptr) = {
- let Some((entry_id, repo)) = snapshot.repo_for_metadata(&path) else {
+ let Some((entry_id, repo)) = state.snapshot.repo_for_metadata(&path) else {
let dot_git_dir = path.ancestors()
.skip_while(|ancestor| ancestor.file_name() != Some(&*DOT_GIT))
.next()?;
- snapshot.build_repo(dot_git_dir.into(), fs);
+ let changed_paths = state.snapshot.build_repo(dot_git_dir.into(), fs);
+ if let Some(changed_paths) = changed_paths {
+ util::extend_sorted(
+ &mut state.changed_paths,
+ changed_paths,
+ usize::MAX,
+ Ord::cmp,
+ );
+ }
+
return None;
};
if repo.git_dir_scan_id == scan_id {
return None;
}
+
(*entry_id, repo.repo_ptr.to_owned())
};
- let work_dir = snapshot
+ let work_dir = state
+ .snapshot
.entry_for_id(entry_id)
.map(|entry| RepositoryWorkDirectory(entry.path.clone()))?;
let repo = repo_ptr.lock();
repo.reload_index();
let branch = repo.branch_name();
- let statuses = repo.statuses().unwrap_or_default();
- snapshot.git_repositories.update(&entry_id, |entry| {
- entry.work_dir_scan_id = scan_id;
+ state.snapshot.git_repositories.update(&entry_id, |entry| {
entry.git_dir_scan_id = scan_id;
});
- snapshot.repository_entries.update(&work_dir, |entry| {
- entry.branch = branch.map(Into::into);
- entry.statuses = statuses;
- });
- } else {
- if snapshot
- .entry_for_path(&path)
- .map(|entry| entry.is_ignored)
- .unwrap_or(false)
- {
- self.remove_repo_path(&path, snapshot);
- return None;
- }
-
- let repo = snapshot.repository_for_path(&path)?;
-
- let work_dir = repo.work_directory(snapshot)?;
- let work_dir_id = repo.work_directory.clone();
-
- let (local_repo, git_dir_scan_id) =
- snapshot.git_repositories.update(&work_dir_id, |entry| {
- entry.work_dir_scan_id = scan_id;
- (entry.repo_ptr.clone(), entry.git_dir_scan_id)
- })?;
-
- // Short circuit if we've already scanned everything
- if git_dir_scan_id == scan_id {
- return None;
- }
-
- let mut repository = snapshot.repository_entries.remove(&work_dir)?;
-
- for entry in snapshot.descendent_entries(false, false, path) {
- let Some(repo_path) = repo.work_directory.relativize(snapshot, &entry.path) else {
- continue;
- };
+ state
+ .snapshot
+ .snapshot
+ .repository_entries
+ .update(&work_dir, |entry| {
+ entry.branch = branch.map(Into::into);
+ });
- let status = local_repo.lock().status(&repo_path);
- if let Some(status) = status {
- repository.statuses.insert(repo_path.clone(), status);
- } else {
- repository.statuses.remove(&repo_path);
- }
- }
+ let changed_paths = state.snapshot.scan_statuses(repo.deref(), &work_dir);
- snapshot.repository_entries.insert(work_dir, repository)
+ util::extend_sorted(
+ &mut state.changed_paths,
+ changed_paths,
+ usize::MAX,
+ Ord::cmp,
+ )
}
Some(())
@@ -3501,7 +3509,6 @@ impl BackgroundScanner {
if new_paths.item().map_or(false, |e| e.path < path.0) {
new_paths.seek_forward(&path, Bias::Left, &());
}
-
loop {
match (old_paths.item(), new_paths.item()) {
(Some(old_entry), Some(new_entry)) => {
@@ -3520,6 +3527,13 @@ impl BackgroundScanner {
}
Ordering::Equal => {
if self.phase == EventsReceivedDuringInitialScan {
+ if old_entry.id != new_entry.id {
+ changes.push((
+ old_entry.path.clone(),
+ old_entry.id,
+ Removed,
+ ));
+ }
// If the worktree was not fully initialized when this event was generated,
// we can't know whether this entry was added during the scan or whether
// it was merely updated.
@@ -3702,6 +3716,39 @@ impl<'a> Default for TraversalProgress<'a> {
}
}
+#[derive(Clone, Debug, Default, Copy)]
+struct GitStatuses {
+ added: usize,
+ modified: usize,
+ conflict: usize,
+}
+
+impl AddAssign for GitStatuses {
+ fn add_assign(&mut self, rhs: Self) {
+ self.added += rhs.added;
+ self.modified += rhs.modified;
+ self.conflict += rhs.conflict;
+ }
+}
+
+impl Sub for GitStatuses {
+ type Output = GitStatuses;
+
+ fn sub(self, rhs: Self) -> Self::Output {
+ GitStatuses {
+ added: self.added - rhs.added,
+ modified: self.modified - rhs.modified,
+ conflict: self.conflict - rhs.conflict,
+ }
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, EntrySummary> for GitStatuses {
+ fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
+ *self += summary.statuses
+ }
+}
+
pub struct Traversal<'a> {
cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
include_ignored: bool,
@@ -1002,6 +1002,7 @@ impl ProjectPanel {
mtime: entry.mtime,
is_symlink: false,
is_ignored: false,
+ git_status: entry.git_status,
});
}
if expanded_dir_ids.binary_search(&entry.id).is_err()
@@ -1011,6 +1012,9 @@ impl ProjectPanel {
}
entry_iter.advance();
}
+
+ snapshot.propagate_git_statuses(&mut visible_worktree_entries);
+
visible_worktree_entries.sort_by(|entry_a, entry_b| {
let mut components_a = entry_a.path.components().peekable();
let mut components_b = entry_b.path.components().peekable();
@@ -1108,14 +1112,8 @@ impl ProjectPanel {
.unwrap_or(&[]);
let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
- for (entry, repo) in
- snapshot.entries_with_repositories(visible_worktree_entries[entry_range].iter())
- {
- let status = (git_status_setting
- && entry.path.parent().is_some()
- && !entry.is_ignored)
- .then(|| repo.and_then(|repo| repo.status_for_path(&snapshot, &entry.path)))
- .flatten();
+ for entry in visible_worktree_entries[entry_range].iter() {
+ let status = git_status_setting.then(|| entry.git_status).flatten();
let mut details = EntryDetails {
filename: entry
@@ -1005,13 +1005,12 @@ message Entry {
Timestamp mtime = 5;
bool is_symlink = 6;
bool is_ignored = 7;
+ optional GitStatus git_status = 8;
}
message RepositoryEntry {
uint64 work_directory_id = 1;
optional string branch = 2;
- repeated string removed_repo_paths = 3;
- repeated StatusEntry updated_statuses = 4;
}
message StatusEntry {
@@ -480,6 +480,11 @@ impl<T: Item> SumTree<T> {
} => child_trees.last().unwrap().rightmost_leaf(),
}
}
+
+ #[cfg(debug_assertions)]
+ pub fn _debug_entries(&self) -> Vec<&T> {
+ self.iter().collect::<Vec<_>>()
+ }
}
impl<T: Item + PartialEq> PartialEq for SumTree<T> {
@@ -57,7 +57,7 @@ pub fn post_inc<T: From<u8> + AddAssign<T> + Copy>(value: &mut T) -> T {
}
/// Extend a sorted vector with a sorted sequence of items, maintaining the vector's sort order and
-/// enforcing a maximum length. Sort the items according to the given callback. Before calling this,
+/// enforcing a maximum length. This also de-duplicates items. Sort the items according to the given callback. Before calling this,
/// both `vec` and `new_items` should already be sorted according to the `cmp` comparator.
pub fn extend_sorted<T, I, F>(vec: &mut Vec<T>, new_items: I, limit: usize, mut cmp: F)
where
@@ -9,6 +9,7 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
+ "@tokens-studio/types": "^0.2.3",
"@types/chroma-js": "^2.4.0",
"@types/node": "^18.14.1",
"ayu": "^8.0.1",
@@ -53,6 +54,11 @@
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
+ "node_modules/@tokens-studio/types": {
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/@tokens-studio/types/-/types-0.2.3.tgz",
+ "integrity": "sha512-2KN3V0JPf+Zh8aoVMwykJq29Lsi7vYgKGYBQ/zQ+FbDEmrH6T/Vwn8kG7cvbTmW1JAAvgxVxMIivgC9PmFelNA=="
+ },
"node_modules/@tsconfig/node10": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
@@ -271,6 +277,11 @@
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
+ "@tokens-studio/types": {
+ "version": "0.2.3",
+ "resolved": "https://registry.npmjs.org/@tokens-studio/types/-/types-0.2.3.tgz",
+ "integrity": "sha512-2KN3V0JPf+Zh8aoVMwykJq29Lsi7vYgKGYBQ/zQ+FbDEmrH6T/Vwn8kG7cvbTmW1JAAvgxVxMIivgC9PmFelNA=="
+ },
"@tsconfig/node10": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
@@ -5,11 +5,13 @@
"main": "index.js",
"scripts": {
"build": "ts-node ./src/buildThemes.ts",
- "build-licenses": "ts-node ./src/buildLicenses.ts"
+ "build-licenses": "ts-node ./src/buildLicenses.ts",
+ "build-tokens": "ts-node ./src/buildTokens.ts"
},
"author": "",
"license": "ISC",
"dependencies": {
+ "@tokens-studio/types": "^0.2.3",
"@types/chroma-js": "^2.4.0",
"@types/node": "^18.14.1",
"ayu": "^8.0.1",
@@ -30,17 +30,19 @@ function generateLicenseFile(themes: ThemeConfig[]) {
checkLicenses(themes)
for (const theme of themes) {
const licenseText = fs.readFileSync(theme.licenseFile).toString()
- writeLicense(theme.name, theme.licenseUrl, licenseText)
+ writeLicense(theme.name, licenseText, theme.licenseUrl)
}
}
function writeLicense(
themeName: string,
- licenseUrl: string,
- licenseText: String
+ licenseText: string,
+ licenseUrl?: string
) {
process.stdout.write(
- `## [${themeName}](${licenseUrl})\n\n${licenseText}\n********************************************************************************\n\n`
+ licenseUrl
+ ? `## [${themeName}](${licenseUrl})\n\n${licenseText}\n********************************************************************************\n\n`
+ : `## ${themeName}\n\n${licenseText}\n********************************************************************************\n\n`
)
}
@@ -2,7 +2,7 @@ import * as fs from "fs"
import { tmpdir } from "os"
import * as path from "path"
import app from "./styleTree/app"
-import { ColorScheme, createColorScheme } from "./themes/common/colorScheme"
+import { ColorScheme, createColorScheme } from "./theme/colorScheme"
import snakeCase from "./utils/snakeCase"
import { themes } from "./themes"
@@ -35,7 +35,9 @@ function writeThemes(colorSchemes: ColorScheme[], outputDirectory: string) {
}
}
-const colorSchemes: ColorScheme[] = themes.map((theme) => createColorScheme(theme))
+const colorSchemes: ColorScheme[] = themes.map((theme) =>
+ createColorScheme(theme)
+)
// Write new themes to theme directory
writeThemes(colorSchemes, `${assetsDirectory}/themes`)
@@ -0,0 +1,39 @@
+import * as fs from "fs"
+import * as path from "path"
+import { ColorScheme, createColorScheme } from "./common"
+import { themes } from "./themes"
+import { slugify } from "./utils/slugify"
+import { colorSchemeTokens } from "./theme/tokens/colorScheme"
+
+const TOKENS_DIRECTORY = path.join(__dirname, "..", "target", "tokens")
+
+function clearTokens(tokensDirectory: string) {
+ if (!fs.existsSync(tokensDirectory)) {
+ fs.mkdirSync(tokensDirectory, { recursive: true })
+ } else {
+ for (const file of fs.readdirSync(tokensDirectory)) {
+ if (file.endsWith(".json")) {
+ fs.unlinkSync(path.join(tokensDirectory, file))
+ }
+ }
+ }
+}
+
+function writeTokens(colorSchemes: ColorScheme[], tokensDirectory: string) {
+ clearTokens(tokensDirectory)
+
+ for (const colorScheme of colorSchemes) {
+ const fileName = slugify(colorScheme.name)
+ const tokens = colorSchemeTokens(colorScheme)
+ const tokensJSON = JSON.stringify(tokens, null, 2)
+ const outPath = path.join(tokensDirectory, `${fileName}.json`)
+ fs.writeFileSync(outPath, tokensJSON)
+ console.log(`- ${outPath} created`)
+ }
+}
+
+const colorSchemes: ColorScheme[] = themes.map((theme) =>
+ createColorScheme(theme)
+)
+
+writeTokens(colorSchemes, TOKENS_DIRECTORY)
@@ -1,5 +1,5 @@
import chroma from "chroma-js"
-export * from "./themes/common"
+export * from "./theme"
export { chroma }
export const fontFamilies = {
@@ -27,7 +27,7 @@ export type FontWeight =
| "bold"
| "extra_bold"
| "black"
-
+
export const fontWeights: { [key: string]: FontWeight } = {
thin: "thin",
extra_light: "extra_light",
@@ -19,7 +19,7 @@ import terminal from "./terminal"
import contactList from "./contactList"
import lspLogMenu from "./lspLogMenu"
import incomingCallNotification from "./incomingCallNotification"
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import feedback from "./feedback"
import welcome from "./welcome"
import copilot from "./copilot"
@@ -1,85 +1,85 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { text, border, background, foreground } from "./components"
import editor from "./editor"
export default function assistant(colorScheme: ColorScheme) {
- const layer = colorScheme.highest;
+ const layer = colorScheme.highest
return {
- container: {
- background: editor(colorScheme).background,
- padding: { left: 12 }
- },
- header: {
- border: border(layer, "default", { bottom: true, top: true }),
- margin: { bottom: 6, top: 6 },
- background: editor(colorScheme).background
- },
- userSender: {
- ...text(layer, "sans", "default", { size: "sm", weight: "bold" }),
- },
- assistantSender: {
- ...text(layer, "sans", "accent", { size: "sm", weight: "bold" }),
- },
- systemSender: {
- ...text(layer, "sans", "variant", { size: "sm", weight: "bold" }),
- },
- sentAt: {
- margin: { top: 2, left: 8 },
- ...text(layer, "sans", "default", { size: "2xs" }),
- },
- modelInfoContainer: {
- margin: { right: 16, top: 4 },
- },
- model: {
- background: background(layer, "on"),
- border: border(layer, "on", { overlay: true }),
- padding: 4,
- cornerRadius: 4,
- ...text(layer, "sans", "default", { size: "xs" }),
- hover: {
- background: background(layer, "on", "hovered"),
- }
- },
- remainingTokens: {
- background: background(layer, "on"),
- border: border(layer, "on", { overlay: true }),
- padding: 4,
- margin: { left: 4 },
- cornerRadius: 4,
- ...text(layer, "sans", "positive", { size: "xs" }),
- },
- noRemainingTokens: {
- background: background(layer, "on"),
- border: border(layer, "on", { overlay: true }),
- padding: 4,
- margin: { left: 4 },
- cornerRadius: 4,
- ...text(layer, "sans", "negative", { size: "xs" }),
- },
- errorIcon: {
- margin: { left: 8 },
- color: foreground(layer, "negative"),
- width: 12,
- },
- apiKeyEditor: {
- background: background(layer, "on"),
- cornerRadius: 6,
- text: text(layer, "mono", "on"),
- placeholderText: text(layer, "mono", "on", "disabled", {
- size: "xs",
- }),
- selection: colorScheme.players[0],
- border: border(layer, "on"),
- padding: {
- bottom: 4,
- left: 8,
- right: 8,
- top: 4,
- },
- },
- apiKeyPrompt: {
- padding: 10,
- ...text(layer, "sans", "default", { size: "xs" }),
- }
+ container: {
+ background: editor(colorScheme).background,
+ padding: { left: 12 },
+ },
+ header: {
+ border: border(layer, "default", { bottom: true, top: true }),
+ margin: { bottom: 6, top: 6 },
+ background: editor(colorScheme).background,
+ },
+ userSender: {
+ ...text(layer, "sans", "default", { size: "sm", weight: "bold" }),
+ },
+ assistantSender: {
+ ...text(layer, "sans", "accent", { size: "sm", weight: "bold" }),
+ },
+ systemSender: {
+ ...text(layer, "sans", "variant", { size: "sm", weight: "bold" }),
+ },
+ sentAt: {
+ margin: { top: 2, left: 8 },
+ ...text(layer, "sans", "default", { size: "2xs" }),
+ },
+ modelInfoContainer: {
+ margin: { right: 16, top: 4 },
+ },
+ model: {
+ background: background(layer, "on"),
+ border: border(layer, "on", { overlay: true }),
+ padding: 4,
+ cornerRadius: 4,
+ ...text(layer, "sans", "default", { size: "xs" }),
+ hover: {
+ background: background(layer, "on", "hovered"),
+ },
+ },
+ remainingTokens: {
+ background: background(layer, "on"),
+ border: border(layer, "on", { overlay: true }),
+ padding: 4,
+ margin: { left: 4 },
+ cornerRadius: 4,
+ ...text(layer, "sans", "positive", { size: "xs" }),
+ },
+ noRemainingTokens: {
+ background: background(layer, "on"),
+ border: border(layer, "on", { overlay: true }),
+ padding: 4,
+ margin: { left: 4 },
+ cornerRadius: 4,
+ ...text(layer, "sans", "negative", { size: "xs" }),
+ },
+ errorIcon: {
+ margin: { left: 8 },
+ color: foreground(layer, "negative"),
+ width: 12,
+ },
+ apiKeyEditor: {
+ background: background(layer, "on"),
+ cornerRadius: 6,
+ text: text(layer, "mono", "on"),
+ placeholderText: text(layer, "mono", "on", "disabled", {
+ size: "xs",
+ }),
+ selection: colorScheme.players[0],
+ border: border(layer, "on"),
+ padding: {
+ bottom: 4,
+ left: 8,
+ right: 8,
+ top: 4,
+ },
+ },
+ apiKeyPrompt: {
+ padding: 10,
+ ...text(layer, "sans", "default", { size: "xs" }),
+ },
}
}
@@ -1,5 +1,5 @@
-import { ColorScheme } from "../themes/common/colorScheme"
-import { withOpacity } from "../utils/color"
+import { ColorScheme } from "../theme/colorScheme"
+import { withOpacity } from "../theme/color"
import { text, background } from "./components"
export default function commandPalette(colorScheme: ColorScheme) {
@@ -1,5 +1,5 @@
import { fontFamilies, fontSizes, FontWeight } from "../common"
-import { Layer, Styles, StyleSets, Style } from "../themes/common/colorScheme"
+import { Layer, Styles, StyleSets, Style } from "../theme/colorScheme"
function isStyleSet(key: any): key is StyleSets {
return [
@@ -1,5 +1,5 @@
import picker from "./picker"
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, foreground, text } from "./components"
export default function contactFinder(colorScheme: ColorScheme): any {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, borderColor, foreground, text } from "./components"
export default function contactsPanel(colorScheme: ColorScheme) {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, foreground, text } from "./components"
const avatarSize = 12
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, text } from "./components"
export default function contactsPopover(colorScheme: ColorScheme) {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, borderColor, text } from "./components"
export default function contextMenu(colorScheme: ColorScheme) {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, foreground, svg, text } from "./components"
export default function copilot(colorScheme: ColorScheme) {
@@ -1,9 +1,9 @@
-import { withOpacity } from "../utils/color"
-import { ColorScheme, Layer, StyleSets } from "../themes/common/colorScheme"
+import { withOpacity } from "../theme/color"
+import { ColorScheme, Layer, StyleSets } from "../theme/colorScheme"
import { background, border, borderColor, foreground, text } from "./components"
import hoverPopover from "./hoverPopover"
-import { buildSyntax } from "../themes/common/syntax"
+import { buildSyntax } from "../theme/syntax"
export default function editor(colorScheme: ColorScheme) {
const { isLight } = colorScheme
@@ -186,7 +186,10 @@ export default function editor(colorScheme: ColorScheme) {
},
},
source: {
- text: text(colorScheme.middle, "sans", { size: "sm", weight: "bold", }),
+ text: text(colorScheme.middle, "sans", {
+ size: "sm",
+ weight: "bold",
+ }),
},
message: {
highlightText: text(colorScheme.middle, "sans", {
@@ -250,7 +253,7 @@ export default function editor(colorScheme: ColorScheme) {
right: true,
left: true,
bottom: false,
- }
+ },
},
git: {
deleted: isLight
@@ -262,7 +265,7 @@ export default function editor(colorScheme: ColorScheme) {
inserted: isLight
? withOpacity(colorScheme.ramps.green(0.5).hex(), 0.8)
: withOpacity(colorScheme.ramps.green(0.4).hex(), 0.8),
- }
+ },
},
compositionMark: {
underline: {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, text } from "./components"
export default function feedback(colorScheme: ColorScheme) {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, foreground, text } from "./components"
export default function HoverPopover(colorScheme: ColorScheme) {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, text } from "./components"
export default function incomingCallNotification(
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, text } from "./components"
export default function contactsPanel(colorScheme: ColorScheme) {
@@ -1,5 +1,5 @@
-import { ColorScheme } from "../themes/common/colorScheme"
-import { withOpacity } from "../utils/color"
+import { ColorScheme } from "../theme/colorScheme"
+import { withOpacity } from "../theme/color"
import { background, border, text } from "./components"
export default function picker(colorScheme: ColorScheme): any {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, text } from "./components"
export default function projectDiagnostics(colorScheme: ColorScheme) {
@@ -1,5 +1,5 @@
-import { ColorScheme } from "../themes/common/colorScheme"
-import { withOpacity } from "../utils/color"
+import { ColorScheme } from "../theme/colorScheme"
+import { withOpacity } from "../theme/color"
import { background, border, foreground, text } from "./components"
export default function projectPanel(colorScheme: ColorScheme) {
@@ -24,8 +24,8 @@ export default function projectPanel(colorScheme: ColorScheme) {
: colorScheme.ramps.green(0.5).hex(),
conflict: isLight
? colorScheme.ramps.red(0.6).hex()
- : colorScheme.ramps.red(0.5).hex()
- }
+ : colorScheme.ramps.red(0.5).hex(),
+ },
}
let entry = {
@@ -44,7 +44,7 @@ export default function projectPanel(colorScheme: ColorScheme) {
background: background(layer, "active"),
text: text(layer, "mono", "active", { size: "sm" }),
},
- status
+ status,
}
return {
@@ -79,7 +79,7 @@ export default function projectPanel(colorScheme: ColorScheme) {
text: text(layer, "mono", "on", { size: "sm" }),
background: withOpacity(background(layer, "on"), 0.9),
border: border(layer),
- status
+ status,
},
ignoredEntry: {
...entry,
@@ -88,7 +88,7 @@ export default function projectPanel(colorScheme: ColorScheme) {
active: {
...entry.active,
iconColor: foreground(layer, "variant"),
- }
+ },
},
cutEntry: {
...entry,
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, text } from "./components"
export default function projectSharedNotification(
@@ -1,5 +1,5 @@
-import { ColorScheme } from "../themes/common/colorScheme"
-import { withOpacity } from "../utils/color"
+import { ColorScheme } from "../theme/colorScheme"
+import { withOpacity } from "../theme/color"
import { background, border, foreground, text } from "./components"
export default function search(colorScheme: ColorScheme) {
@@ -30,7 +30,7 @@ export default function search(colorScheme: ColorScheme) {
...editor,
minWidth: 100,
maxWidth: 250,
- };
+ }
return {
// TODO: Add an activeMatchBackground on the rust side to differentiate between active and inactive
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background } from "./components"
export default function sharedScreen(colorScheme: ColorScheme) {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, foreground, text } from "./components"
const headerPadding = 8
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, foreground, text } from "./components"
export default function statusBar(colorScheme: ColorScheme) {
@@ -1,5 +1,5 @@
-import { ColorScheme } from "../themes/common/colorScheme"
-import { withOpacity } from "../utils/color"
+import { ColorScheme } from "../theme/colorScheme"
+import { withOpacity } from "../theme/color"
import { text, border, background, foreground } from "./components"
export default function tabBar(colorScheme: ColorScheme) {
@@ -96,7 +96,7 @@ export default function tabBar(colorScheme: ColorScheme) {
},
active: {
color: foreground(layer, "accent"),
- }
+ },
},
paneButtonContainer: {
background: tab.background,
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
export default function terminal(colorScheme: ColorScheme) {
/**
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { background, border, text } from "./components"
export default function tooltip(colorScheme: ColorScheme) {
@@ -1,4 +1,4 @@
-import { ColorScheme } from "../themes/common/colorScheme"
+import { ColorScheme } from "../theme/colorScheme"
import { foreground, text } from "./components"
const headerPadding = 8
@@ -1,5 +1,5 @@
-import { ColorScheme } from "../themes/common/colorScheme"
-import { withOpacity } from "../utils/color"
+import { ColorScheme } from "../theme/colorScheme"
+import { withOpacity } from "../theme/color"
import {
border,
background,
@@ -1,5 +1,5 @@
-import { ColorScheme } from "../themes/common/colorScheme"
-import { withOpacity } from "../utils/color"
+import { ColorScheme } from "../theme/colorScheme"
+import { withOpacity } from "../theme/color"
import {
background,
border,
@@ -123,7 +123,7 @@ export default function workspace(colorScheme: ColorScheme) {
cursor: "Arrow",
background: isLight
? withOpacity(background(colorScheme.lowest), 0.8)
- : withOpacity(background(colorScheme.highest), 0.6)
+ : withOpacity(background(colorScheme.highest), 0.6),
},
zoomedPaneForeground: {
margin: 16,
@@ -143,7 +143,7 @@ export default function workspace(colorScheme: ColorScheme) {
},
right: {
border: border(layer, { left: true }),
- }
+ },
},
paneDivider: {
color: borderColor(layer),
@@ -5,7 +5,7 @@ import {
ThemeConfig,
ThemeAppearance,
ThemeConfigInputColors,
-} from "../../themeConfig"
+} from "./themeConfig"
import { getRamps } from "./ramps"
export interface ColorScheme {
@@ -1,4 +1,4 @@
export * from "./colorScheme"
export * from "./ramps"
export * from "./syntax"
-export * from "../../themeConfig"
+export * from "./themeConfig"
@@ -3,7 +3,7 @@ import { RampSet } from "./colorScheme"
import {
ThemeConfigInputColors,
ThemeConfigInputColorsKeys,
-} from "../../themeConfig"
+} from "./themeConfig"
export function colorRamp(color: Color): Scale {
let endColor = color.desaturate(1).brighten(5)
@@ -1,5 +1,5 @@
import deepmerge from "deepmerge"
-import { FontWeight, fontWeights } from "../../common"
+import { FontWeight, fontWeights } from "../common"
import { ColorScheme } from "./colorScheme"
import chroma from "chroma-js"
@@ -1,5 +1,5 @@
import { Scale, Color } from "chroma-js"
-import { Syntax } from "./themes/common/syntax"
+import { Syntax } from "./syntax"
interface ThemeMeta {
/** The name of the theme */
@@ -23,6 +23,11 @@ interface ThemeMeta {
themeUrl?: string
}
+export type ThemeFamilyMeta = Pick<
+ ThemeMeta,
+ "name" | "author" | "licenseType" | "licenseUrl"
+>
+
export interface ThemeConfigInputColors {
neutral: Scale<Color>
red: Scale<Color>
@@ -0,0 +1,12 @@
+import { ColorScheme } from "../colorScheme"
+import { PlayerTokens, players } from "./players"
+
+interface ColorSchemeTokens {
+ players: PlayerTokens
+}
+
+export function colorSchemeTokens(colorScheme: ColorScheme): ColorSchemeTokens {
+ return {
+ players: players(colorScheme),
+ }
+}
@@ -0,0 +1,28 @@
+import { SingleColorToken } from "@tokens-studio/types"
+import { ColorScheme, Players } from "../../common"
+import { colorToken } from "./token"
+
+export type PlayerToken = Record<"selection" | "cursor", SingleColorToken>
+
+export type PlayerTokens = Record<keyof Players, PlayerToken>
+
+function buildPlayerToken(colorScheme: ColorScheme, index: number): PlayerToken {
+
+ const playerNumber = index.toString() as keyof Players
+
+ return {
+ selection: colorToken(`player${index}Selection`, colorScheme.players[playerNumber].selection),
+ cursor: colorToken(`player${index}Cursor`, colorScheme.players[playerNumber].cursor),
+ }
+}
+
+export const players = (colorScheme: ColorScheme): PlayerTokens => ({
+ "0": buildPlayerToken(colorScheme, 0),
+ "1": buildPlayerToken(colorScheme, 1),
+ "2": buildPlayerToken(colorScheme, 2),
+ "3": buildPlayerToken(colorScheme, 3),
+ "4": buildPlayerToken(colorScheme, 4),
+ "5": buildPlayerToken(colorScheme, 5),
+ "6": buildPlayerToken(colorScheme, 6),
+ "7": buildPlayerToken(colorScheme, 7)
+})
@@ -0,0 +1,14 @@
+import { SingleColorToken, TokenTypes } from "@tokens-studio/types"
+
+export function colorToken(name: string, value: string, description?: string): SingleColorToken {
+ const token: SingleColorToken = {
+ name,
+ type: TokenTypes.COLOR,
+ value,
+ description,
+ }
+
+ if (!token.value || token.value === '') throw new Error("Color token must have a value")
+
+ return token
+}
@@ -1,4 +1,4 @@
-import { ThemeLicenseType, ThemeConfig, ThemeSyntax } from "../../common"
+import { ThemeLicenseType, ThemeSyntax, ThemeFamilyMeta } from "../../common"
export interface Variant {
colors: {
@@ -21,7 +21,7 @@ export interface Variant {
}
}
-export const meta: Partial<ThemeConfig> = {
+export const meta: ThemeFamilyMeta = {
name: "Atelier",
author: "Bram de Haan (http://atelierbramdehaan.nl)",
licenseType: ThemeLicenseType.MIT,
@@ -3,8 +3,8 @@ import {
chroma,
colorRamp,
ThemeLicenseType,
- ThemeConfig,
ThemeSyntax,
+ ThemeFamilyMeta,
} from "../../common"
export const ayu = {
@@ -77,7 +77,7 @@ export const buildSyntax = (t: typeof dark): ThemeSyntax => {
}
}
-export const meta: Partial<ThemeConfig> = {
+export const meta: ThemeFamilyMeta = {
name: "Ayu",
author: "dempfi",
licenseType: ThemeLicenseType.MIT,
@@ -5,9 +5,10 @@ import {
ThemeLicenseType,
ThemeConfig,
ThemeSyntax,
+ ThemeFamilyMeta,
} from "../../common"
-const meta: Partial<ThemeConfig> = {
+const meta: ThemeFamilyMeta = {
name: "Gruvbox",
author: "morhetz <morhetz@gmail.com>",
licenseType: ThemeLicenseType.MIT,
@@ -1,4 +1,4 @@
-import { ThemeConfig } from "./common"
+import { ThemeConfig } from "../theme"
import { darkDefault as gruvboxDark } from "./gruvbox/gruvbox-dark"
import { darkHard as gruvboxDarkHard } from "./gruvbox/gruvbox-dark-hard"
import { darkSoft as gruvboxDarkSoft } from "./gruvbox/gruvbox-dark-soft"
@@ -0,0 +1 @@
+export function slugify(t: string): string { return t.toString().toLowerCase().replace(/\s+/g, '-').replace(/[^\w\-]+/g, '').replace(/\-\-+/g, '-').replace(/^-+/, '').replace(/-+$/, '') }
@@ -6,7 +6,21 @@
"noImplicitAny": true,
"removeComments": true,
"preserveConstEnums": true,
- "sourceMap": true
+ "sourceMap": true,
+ "noEmit": true,
+ "forceConsistentCasingInFileNames": true,
+ "declaration": true,
+ "strict": true,
+ "strictNullChecks": true,
+ "noImplicitThis": true,
+ "alwaysStrict": true,
+ "noUnusedLocals": false,
+ "noUnusedParameters": false,
+ "noImplicitReturns": true,
+ "noFallthroughCasesInSwitch": false,
+ "experimentalDecorators": true,
+ "strictPropertyInitialization": false,
+ "skipLibCheck": true
},
"exclude": ["node_modules"]
}