Detailed changes
@@ -23,8 +23,8 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- token: ${{ steps.get-app-token.outputs.token }}
ref: ${{ inputs.branch }}
+ token: ${{ steps.get-app-token.outputs.token }}
- name: bump_patch_version::run_bump_patch_version::bump_patch_version
run: |
channel="$(cat crates/zed/RELEASE_CHANNEL)"
@@ -4,12 +4,57 @@ name: extension_workflow_rollout
env:
CARGO_TERM_COLOR: always
on:
- workflow_dispatch: {}
+ workflow_dispatch:
+ inputs:
+ filter-repos:
+ description: Comma-separated list of repository names to rollout to. Leave empty for all repos.
+ type: string
+ default: ''
+ change-description:
+ description: Description for the changes to be expected with this rollout
+ type: string
+ default: ''
jobs:
fetch_extension_repos:
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && github.ref == 'refs/heads/main'
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
+ - name: checkout_zed_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ fetch-depth: 0
+ - id: prev-tag
+ name: extension_workflow_rollout::fetch_extension_repos::get_previous_tag_commit
+ run: |
+ PREV_COMMIT=$(git rev-parse "extension-workflows^{commit}" 2>/dev/null || echo "")
+ if [ -z "$PREV_COMMIT" ]; then
+ echo "::error::No previous rollout tag 'extension-workflows' found. Cannot determine file changes."
+ exit 1
+ fi
+ echo "Found previous rollout at commit: $PREV_COMMIT"
+ echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT"
+ - id: calc-changes
+ name: extension_workflow_rollout::fetch_extension_repos::get_removed_files
+ run: |
+ for workflow_type in "ci" "shared"; do
+ if [ "$workflow_type" = "ci" ]; then
+ WORKFLOW_DIR="extensions/workflows"
+ else
+ WORKFLOW_DIR="extensions/workflows/shared"
+ fi
+
+ REMOVED=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \
+ awk '/^D/ { print $2 } /^R/ { print $2 }' | \
+ xargs -I{} basename {} 2>/dev/null | \
+ tr '\n' ' ' || echo "")
+ REMOVED=$(echo "$REMOVED" | xargs)
+
+ echo "Removed files for $workflow_type: $REMOVED"
+ echo "removed_${workflow_type}=$REMOVED" >> "$GITHUB_OUTPUT"
+ done
+ env:
+ PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }}
- id: list-repos
name: extension_workflow_rollout::fetch_extension_repos::get_repositories
uses: actions/github-script@v7
@@ -21,16 +66,42 @@ jobs:
per_page: 100,
});
- const filteredRepos = repos
+ let filteredRepos = repos
.filter(repo => !repo.archived)
.map(repo => repo.name);
+ const filterInput = `${{ inputs.filter-repos }}`.trim();
+ if (filterInput.length > 0) {
+ const allowedNames = filterInput.split(',').map(s => s.trim()).filter(s => s.length > 0);
+ filteredRepos = filteredRepos.filter(name => allowedNames.includes(name));
+ console.log(`Filter applied. Matched ${filteredRepos.length} repos from ${allowedNames.length} requested.`);
+ }
+
console.log(`Found ${filteredRepos.length} extension repos`);
return filteredRepos;
result-encoding: json
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
+ path: ~/.rustup
+ - name: extension_workflow_rollout::fetch_extension_repos::generate_workflow_files
+ run: |
+ cargo xtask workflows "$COMMIT_SHA"
+ env:
+ COMMIT_SHA: ${{ github.sha }}
+ - name: extension_workflow_rollout::fetch_extension_repos::upload_workflow_files
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
+ with:
+ name: extension-workflow-files
+ path: extensions/workflows/**/*.yml
+ if-no-files-found: error
outputs:
repos: ${{ steps.list-repos.outputs.result }}
- timeout-minutes: 5
+ prev_commit: ${{ steps.prev-tag.outputs.prev_commit }}
+ removed_ci: ${{ steps.calc-changes.outputs.removed_ci }}
+ removed_shared: ${{ steps.calc-changes.outputs.removed_shared }}
+ timeout-minutes: 10
rollout_workflows_to_extension:
needs:
- fetch_extension_repos
@@ -53,59 +124,28 @@ jobs:
permission-pull-requests: write
permission-contents: write
permission-workflows: write
- - name: checkout_zed_repo
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- with:
- clean: false
- fetch-depth: 0
- path: zed
- name: checkout_extension_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- token: ${{ steps.generate-token.outputs.token }}
path: extension
repository: zed-extensions/${{ matrix.repo }}
- - id: prev-tag
- name: extension_workflow_rollout::rollout_workflows_to_extension::get_previous_tag_commit
- run: |
- PREV_COMMIT=$(git rev-parse "extension-workflows^{commit}" 2>/dev/null || echo "")
- if [ -z "$PREV_COMMIT" ]; then
- echo "::error::No previous rollout tag 'extension-workflows' found. Cannot determine file changes."
- exit 1
- fi
- echo "Found previous rollout at commit: $PREV_COMMIT"
- echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT"
- working-directory: zed
- - id: calc-changes
- name: extension_workflow_rollout::rollout_workflows_to_extension::get_removed_files
+ token: ${{ steps.generate-token.outputs.token }}
+ - name: extension_workflow_rollout::rollout_workflows_to_extension::download_workflow_files
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
+ with:
+ name: extension-workflow-files
+ path: workflow-files
+ - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files
run: |
+ mkdir -p extension/.github/workflows
+
if [ "$MATRIX_REPO" = "workflows" ]; then
- WORKFLOW_DIR="extensions/workflows"
+ REMOVED_FILES="$REMOVED_CI"
else
- WORKFLOW_DIR="extensions/workflows/shared"
+ REMOVED_FILES="$REMOVED_SHARED"
fi
- echo "Calculating changes from $PREV_COMMIT to HEAD for $WORKFLOW_DIR"
-
- # Get deleted files (status D) and renamed files (status R - old name needs removal)
- # Using -M to detect renames, then extracting files that are gone from their original location
- REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \
- awk '/^D/ { print $2 } /^R/ { print $2 }' | \
- xargs -I{} basename {} 2>/dev/null | \
- tr '\n' ' ' || echo "")
-
- REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs)
-
- echo "Files to remove: $REMOVED_FILES"
- echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT"
- env:
- PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }}
- MATRIX_REPO: ${{ matrix.repo }}
- working-directory: zed
- - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files
- run: |
- mkdir -p extension/.github/workflows
cd extension/.github/workflows
if [ -n "$REMOVED_FILES" ]; then
@@ -119,18 +159,18 @@ jobs:
cd - > /dev/null
if [ "$MATRIX_REPO" = "workflows" ]; then
- cp zed/extensions/workflows/*.yml extension/.github/workflows/
+ cp workflow-files/*.yml extension/.github/workflows/
else
- cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/
+ cp workflow-files/shared/*.yml extension/.github/workflows/
fi
env:
- REMOVED_FILES: ${{ steps.calc-changes.outputs.removed_files }}
+ REMOVED_CI: ${{ needs.fetch_extension_repos.outputs.removed_ci }}
+ REMOVED_SHARED: ${{ needs.fetch_extension_repos.outputs.removed_shared }}
MATRIX_REPO: ${{ matrix.repo }}
- id: short-sha
name: extension_workflow_rollout::rollout_workflows_to_extension::get_short_sha
run: |
- echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT"
- working-directory: zed
+ echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
- id: create-pr
name: extension_workflow_rollout::rollout_workflows_to_extension::create_pull_request
uses: peter-evans/create-pull-request@v7
@@ -140,6 +180,8 @@ jobs:
body: |
This PR updates the CI workflow files from the main Zed repository
based on the commit zed-industries/zed@${{ github.sha }}
+
+ ${{ inputs.change-description }}
commit-message: Update CI workflows to `${{ steps.short-sha.outputs.sha_short }}`
branch: update-workflows
committer: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com>
@@ -151,16 +193,17 @@ jobs:
- name: extension_workflow_rollout::rollout_workflows_to_extension::enable_auto_merge
run: |
if [ -n "$PR_NUMBER" ]; then
- cd extension
gh pr merge "$PR_NUMBER" --auto --squash
fi
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
PR_NUMBER: ${{ steps.create-pr.outputs.pull-request-number }}
+ working-directory: extension
timeout-minutes: 10
create_rollout_tag:
needs:
- rollout_workflows_to_extension
+ if: inputs.filter-repos == ''
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- id: generate-token
@@ -227,9 +227,9 @@ dependencies = [
[[package]]
name = "agent-client-protocol"
-version = "0.9.4"
+version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2659b1089101b15db31137710159421cb44785ecdb5ba784be3b4a6f8cb8a475"
+checksum = "9c56a59cf6315e99f874d2c1f96c69d2da5ffe0087d211297fc4a41f849770a2"
dependencies = [
"agent-client-protocol-schema",
"anyhow",
@@ -244,16 +244,16 @@ dependencies = [
[[package]]
name = "agent-client-protocol-schema"
-version = "0.10.8"
+version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1"
+checksum = "e0497b9a95a404e35799904835c57c6f8c69b9d08ccfd3cb5b7d746425cd6789"
dependencies = [
"anyhow",
"derive_more",
"schemars",
"serde",
"serde_json",
- "strum 0.27.2",
+ "strum 0.28.0",
]
[[package]]
@@ -7151,7 +7151,7 @@ dependencies = [
"serde",
"serde_json",
"serde_yaml",
- "strum_macros",
+ "strum_macros 0.27.2",
]
[[package]]
@@ -16551,7 +16551,16 @@ version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
dependencies = [
- "strum_macros",
+ "strum_macros 0.27.2",
+]
+
+[[package]]
+name = "strum"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9628de9b8791db39ceda2b119bbe13134770b56c138ec1d3af810d045c04f9bd"
+dependencies = [
+ "strum_macros 0.28.0",
]
[[package]]
@@ -16566,6 +16575,18 @@ dependencies = [
"syn 2.0.117",
]
+[[package]]
+name = "strum_macros"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab85eea0270ee17587ed4156089e10b9e6880ee688791d45a905f5b1ca36f664"
+dependencies = [
+ "heck 0.5.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+]
+
[[package]]
name = "subtle"
version = "2.6.1"
@@ -21742,7 +21763,7 @@ dependencies = [
[[package]]
name = "zed"
-version = "0.228.0"
+version = "0.229.0"
dependencies = [
"acp_thread",
"acp_tools",
@@ -473,7 +473,7 @@ ztracing_macro = { path = "crates/ztracing_macro" }
# External crates
#
-agent-client-protocol = { version = "=0.9.4", features = ["unstable"] }
+agent-client-protocol = { version = "=0.10.2", features = ["unstable"] }
aho-corasick = "1.1"
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "9d9640d4" }
any_vec = "0.14"
@@ -4027,7 +4027,7 @@ mod tests {
}
fn authenticate(&self, method: acp::AuthMethodId, _cx: &mut App) -> Task<gpui::Result<()>> {
- if self.auth_methods().iter().any(|m| m.id == method) {
+ if self.auth_methods().iter().any(|m| m.id() == &method) {
Task::ready(Ok(()))
} else {
Task::ready(Err(anyhow!("Invalid Auth Method")))
@@ -60,7 +60,11 @@ pub trait AgentConnection {
}
/// Close an existing session. Allows the agent to free the session from memory.
- fn close_session(&self, _session_id: &acp::SessionId, _cx: &mut App) -> Task<Result<()>> {
+ fn close_session(
+ self: Rc<Self>,
+ _session_id: &acp::SessionId,
+ _cx: &mut App,
+ ) -> Task<Result<()>> {
Task::ready(Err(anyhow::Error::msg("Closing sessions is not supported")))
}
@@ -1028,6 +1028,11 @@ impl ActionLog {
.collect()
}
+ /// Returns the total number of lines added and removed across all unreviewed buffers.
+ pub fn diff_stats(&self, cx: &App) -> DiffStats {
+ DiffStats::all_files(&self.changed_buffers(cx), cx)
+ }
+
/// Iterate over buffers changed since last read or edited by the model
pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
self.tracked_buffers
@@ -1044,6 +1049,46 @@ impl ActionLog {
}
}
+#[derive(Default, Debug, Clone, Copy)]
+pub struct DiffStats {
+ pub lines_added: u32,
+ pub lines_removed: u32,
+}
+
+impl DiffStats {
+ pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
+ let mut stats = DiffStats::default();
+ let diff_snapshot = diff.snapshot(cx);
+ let buffer_snapshot = buffer.snapshot();
+ let base_text = diff_snapshot.base_text();
+
+ for hunk in diff_snapshot.hunks(&buffer_snapshot) {
+ let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
+ stats.lines_added += added_rows;
+
+ let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
+ let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
+ let removed_rows = base_end.saturating_sub(base_start);
+ stats.lines_removed += removed_rows;
+ }
+
+ stats
+ }
+
+ pub fn all_files(
+ changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
+ cx: &App,
+ ) -> Self {
+ let mut total = DiffStats::default();
+ for (buffer, diff) in changed_buffers {
+ let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
+ total.lines_added += stats.lines_added;
+ total.lines_removed += stats.lines_removed;
+ }
+ total
+ }
+}
+
#[derive(Clone)]
pub struct ActionLogTelemetry {
pub agent_telemetry_id: SharedString,
@@ -1418,7 +1418,11 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
true
}
- fn close_session(&self, session_id: &acp::SessionId, cx: &mut App) -> Task<Result<()>> {
+ fn close_session(
+ self: Rc<Self>,
+ session_id: &acp::SessionId,
+ cx: &mut App,
+ ) -> Task<Result<()>> {
self.0.update(cx, |agent, _cx| {
let project_id = agent.sessions.get(session_id).map(|s| s.project_id);
agent.sessions.remove(session_id);
@@ -118,7 +118,7 @@ pub struct Edit {
pub new_text: String,
}
-#[derive(Default, Debug, Deserialize)]
+#[derive(Clone, Default, Debug, Deserialize)]
struct StreamingEditFileToolPartialInput {
#[serde(default)]
display_description: Option<String>,
@@ -132,7 +132,7 @@ struct StreamingEditFileToolPartialInput {
edits: Option<Vec<PartialEdit>>,
}
-#[derive(Default, Debug, Deserialize)]
+#[derive(Clone, Default, Debug, Deserialize)]
pub struct PartialEdit {
#[serde(default)]
pub old_text: Option<String>,
@@ -314,12 +314,19 @@ impl AgentTool for StreamingEditFileTool {
) -> Task<Result<Self::Output, Self::Output>> {
cx.spawn(async move |cx: &mut AsyncApp| {
let mut state: Option<EditSession> = None;
+ let mut last_partial: Option<StreamingEditFileToolPartialInput> = None;
loop {
futures::select! {
partial = input.recv_partial().fuse() => {
let Some(partial_value) = partial else { break };
if let Ok(parsed) = serde_json::from_value::<StreamingEditFileToolPartialInput>(partial_value) {
+ let path_complete = parsed.path.is_some()
+ && parsed.path.as_ref() == last_partial.as_ref().and_then(|p| p.path.as_ref());
+
+ last_partial = Some(parsed.clone());
+
if state.is_none()
+ && path_complete
&& let StreamingEditFileToolPartialInput {
path: Some(path),
display_description: Some(display_description),
@@ -1907,6 +1914,13 @@ mod tests {
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Setup + single edit that stays in-progress (no second edit to prove completion)
+ sender.send_partial(json!({
+ "display_description": "Single edit",
+ "path": "root/file.txt",
+ "mode": "edit",
+ }));
+ cx.run_until_parked();
+
sender.send_partial(json!({
"display_description": "Single edit",
"path": "root/file.txt",
@@ -3475,6 +3489,12 @@ mod tests {
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Transition to BufferResolved
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "path": "root/file.txt",
+ }));
+ cx.run_until_parked();
+
sender.send_partial(json!({
"display_description": "Overwrite file",
"path": "root/file.txt",
@@ -3550,8 +3570,9 @@ mod tests {
// Verify buffer still has old content (no content partial yet)
let buffer = project.update(cx, |project, cx| {
let path = project.find_project_path("root/file.txt", cx).unwrap();
- project.get_open_buffer(&path, cx).unwrap()
+ project.open_buffer(path, cx)
});
+ let buffer = buffer.await.unwrap();
assert_eq!(
buffer.read_with(cx, |b, _| b.text()),
"old line 1\nold line 2\nold line 3\n"
@@ -3735,6 +3756,106 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_streaming_edit_file_tool_fields_out_of_order_in_write_mode(
+ cx: &mut TestAppContext,
+ ) {
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "old_content"})).await;
+ let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (event_stream, _receiver) = ToolCallEventStream::test();
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
+
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "mode": "write"
+ }));
+ cx.run_until_parked();
+
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "mode": "write",
+ "content": "new_content"
+ }));
+ cx.run_until_parked();
+
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "mode": "write",
+ "content": "new_content",
+ "path": "root"
+ }));
+ cx.run_until_parked();
+
+ // Send final.
+ sender.send_final(json!({
+ "display_description": "Overwrite file",
+ "mode": "write",
+ "content": "new_content",
+ "path": "root/file.txt"
+ }));
+
+ let result = task.await;
+ let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else {
+ panic!("expected success");
+ };
+ assert_eq!(new_text, "new_content");
+ }
+
+ #[gpui::test]
+ async fn test_streaming_edit_file_tool_fields_out_of_order_in_edit_mode(
+ cx: &mut TestAppContext,
+ ) {
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "old_content"})).await;
+ let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (event_stream, _receiver) = ToolCallEventStream::test();
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
+
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "mode": "edit"
+ }));
+ cx.run_until_parked();
+
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "mode": "edit",
+ "edits": [{"old_text": "old_content"}]
+ }));
+ cx.run_until_parked();
+
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "mode": "edit",
+ "edits": [{"old_text": "old_content", "new_text": "new_content"}]
+ }));
+ cx.run_until_parked();
+
+ sender.send_partial(json!({
+ "display_description": "Overwrite file",
+ "mode": "edit",
+ "edits": [{"old_text": "old_content", "new_text": "new_content"}],
+ "path": "root"
+ }));
+ cx.run_until_parked();
+
+ // Send final.
+ sender.send_final(json!({
+ "display_description": "Overwrite file",
+ "mode": "edit",
+ "edits": [{"old_text": "old_content", "new_text": "new_content"}],
+ "path": "root/file.txt"
+ }));
+ cx.run_until_parked();
+
+ let result = task.await;
+ let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else {
+ panic!("expected success");
+ };
+ assert_eq!(new_text, "new_content");
+ }
+
async fn setup_test_with_fs(
cx: &mut TestAppContext,
fs: Arc<project::FakeFs>,
@@ -279,7 +279,7 @@ impl AcpConnection {
acp::InitializeRequest::new(acp::ProtocolVersion::V1)
.client_capabilities(
acp::ClientCapabilities::new()
- .fs(acp::FileSystemCapability::new()
+ .fs(acp::FileSystemCapabilities::new()
.read_text_file(true)
.write_text_file(true))
.terminal(true)
@@ -331,11 +331,11 @@ impl AcpConnection {
"env": command.env.clone().unwrap_or_default(),
});
let meta = acp::Meta::from_iter([("terminal-auth".to_string(), value)]);
- vec![
- acp::AuthMethod::new("spawn-gemini-cli", "Login")
+ vec![acp::AuthMethod::Agent(
+ acp::AuthMethodAgent::new("spawn-gemini-cli", "Login")
.description("Login with your Google or Vertex AI account")
.meta(meta),
- ]
+ )]
} else {
response.auth_methods
};
@@ -744,6 +744,31 @@ impl AgentConnection for AcpConnection {
})
}
+ fn supports_close_session(&self) -> bool {
+ self.agent_capabilities.session_capabilities.close.is_some()
+ }
+
+ fn close_session(
+ self: Rc<Self>,
+ session_id: &acp::SessionId,
+ cx: &mut App,
+ ) -> Task<Result<()>> {
+ if !self.agent_capabilities.session_capabilities.close.is_none() {
+ return Task::ready(Err(anyhow!(LoadError::Other(
+ "Closing sessions is not supported by this agent.".into()
+ ))));
+ }
+
+ let conn = self.connection.clone();
+ let session_id = session_id.clone();
+ cx.foreground_executor().spawn(async move {
+ conn.close_session(acp::CloseSessionRequest::new(session_id.clone()))
+ .await?;
+ self.sessions.borrow_mut().remove(&session_id);
+ Ok(())
+ })
+ }
+
fn auth_methods(&self) -> &[acp::AuthMethod] {
&self.auth_methods
}
@@ -1373,10 +1398,10 @@ impl acp::Client for ClientDelegate {
Ok(acp::CreateTerminalResponse::new(terminal_id))
}
- async fn kill_terminal_command(
+ async fn kill_terminal(
&self,
- args: acp::KillTerminalCommandRequest,
- ) -> Result<acp::KillTerminalCommandResponse, acp::Error> {
+ args: acp::KillTerminalRequest,
+ ) -> Result<acp::KillTerminalResponse, acp::Error> {
self.session_thread(&args.session_id)?
.update(&mut self.cx.clone(), |thread, cx| {
thread.kill_terminal(args.terminal_id, cx)
@@ -48,7 +48,7 @@ use crate::{
NewNativeAgentThreadFromSummary,
};
use crate::{
- ExpandMessageEditor, ThreadHistory, ThreadHistoryEvent,
+ ExpandMessageEditor, ThreadHistory, ThreadHistoryView, ThreadHistoryViewEvent,
text_thread_history::{TextThreadHistory, TextThreadHistoryEvent},
};
use agent_settings::AgentSettings;
@@ -863,6 +863,7 @@ pub struct AgentPanel {
fs: Arc<dyn Fs>,
language_registry: Arc<LanguageRegistry>,
acp_history: Entity<ThreadHistory>,
+ acp_history_view: Entity<ThreadHistoryView>,
text_thread_history: Entity<TextThreadHistory>,
thread_store: Entity<ThreadStore>,
text_thread_store: Entity<assistant_text_thread::TextThreadStore>,
@@ -1072,14 +1073,15 @@ impl AgentPanel {
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let thread_store = ThreadStore::global(cx);
- let acp_history = cx.new(|cx| ThreadHistory::new(None, window, cx));
+ let acp_history = cx.new(|cx| ThreadHistory::new(None, cx));
+ let acp_history_view = cx.new(|cx| ThreadHistoryView::new(acp_history.clone(), window, cx));
let text_thread_history =
cx.new(|cx| TextThreadHistory::new(text_thread_store.clone(), window, cx));
cx.subscribe_in(
- &acp_history,
+ &acp_history_view,
window,
|this, _, event, window, cx| match event {
- ThreadHistoryEvent::Open(thread) => {
+ ThreadHistoryViewEvent::Open(thread) => {
this.load_agent_thread(
thread.session_id.clone(),
thread.cwd.clone(),
@@ -1213,6 +1215,7 @@ impl AgentPanel {
pending_serialization: None,
onboarding,
acp_history,
+ acp_history_view,
text_thread_history,
thread_store,
selected_agent: AgentType::default(),
@@ -3046,7 +3049,7 @@ impl Focusable for AgentPanel {
ActiveView::Uninitialized => self.focus_handle.clone(),
ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx),
ActiveView::History { kind } => match kind {
- HistoryKind::AgentThreads => self.acp_history.focus_handle(cx),
+ HistoryKind::AgentThreads => self.acp_history_view.focus_handle(cx),
HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx),
},
ActiveView::TextThread {
@@ -4763,7 +4766,7 @@ impl Render for AgentPanel {
.child(server_view.clone())
.child(self.render_drag_target(cx)),
ActiveView::History { kind } => match kind {
- HistoryKind::AgentThreads => parent.child(self.acp_history.clone()),
+ HistoryKind::AgentThreads => parent.child(self.acp_history_view.clone()),
HistoryKind::TextThreads => parent.child(self.text_thread_history.clone()),
},
ActiveView::TextThread {
@@ -33,6 +33,7 @@ pub mod test_support;
mod text_thread_editor;
mod text_thread_history;
mod thread_history;
+mod thread_history_view;
mod ui;
use std::rc::Rc;
@@ -74,7 +75,8 @@ pub(crate) use mode_selector::ModeSelector;
pub(crate) use model_selector::ModelSelector;
pub(crate) use model_selector_popover::ModelSelectorPopover;
pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor};
-pub(crate) use thread_history::*;
+pub(crate) use thread_history::ThreadHistory;
+pub(crate) use thread_history_view::*;
use zed_actions;
actions!(
@@ -5,7 +5,7 @@ use acp_thread::{
UserMessageId,
};
use acp_thread::{AgentConnection, Plan};
-use action_log::{ActionLog, ActionLogTelemetry};
+use action_log::{ActionLog, ActionLogTelemetry, DiffStats};
use agent::{NativeAgentServer, NativeAgentSessionList, SharedThread, ThreadStore};
use agent_client_protocol::{self as acp, PromptCapabilities};
use agent_servers::AgentServer;
@@ -46,7 +46,7 @@ use std::sync::Arc;
use std::time::Instant;
use std::{collections::BTreeMap, rc::Rc, time::Duration};
use terminal_view::terminal_panel::TerminalPanel;
-use text::{Anchor, ToPoint as _};
+use text::Anchor;
use theme::AgentFontSize;
use ui::{
Callout, CircularProgress, CommonAnimationExt, ContextMenu, ContextMenuEntry, CopyButton,
@@ -463,7 +463,7 @@ impl ConnectedServerState {
let tasks = self
.threads
.keys()
- .map(|id| self.connection.close_session(id, cx));
+ .map(|id| self.connection.clone().close_session(id, cx));
let task = futures::future::join_all(tasks);
cx.background_spawn(async move {
task.await;
@@ -1431,7 +1431,7 @@ impl ConnectionView {
.connection()
.auth_methods()
.iter()
- .any(|method| method.id.0.as_ref() == "claude-login")
+ .any(|method| method.id().0.as_ref() == "claude-login")
{
available_commands.push(acp::AvailableCommand::new("login", "Authenticate"));
available_commands.push(acp::AvailableCommand::new("logout", "Authenticate"));
@@ -1495,10 +1495,15 @@ impl ConnectionView {
let agent_telemetry_id = connection.telemetry_id();
// Check for the experimental "terminal-auth" _meta field
- let auth_method = connection.auth_methods().iter().find(|m| m.id == method);
+ let auth_method = connection.auth_methods().iter().find(|m| m.id() == &method);
if let Some(terminal_auth) = auth_method
- .and_then(|a| a.meta.as_ref())
+ .and_then(|a| match a {
+ acp::AuthMethod::EnvVar(env_var) => env_var.meta.as_ref(),
+ acp::AuthMethod::Terminal(terminal) => terminal.meta.as_ref(),
+ acp::AuthMethod::Agent(agent) => agent.meta.as_ref(),
+ _ => None,
+ })
.and_then(|m| m.get("terminal-auth"))
{
// Extract terminal auth details from meta
@@ -1882,7 +1887,7 @@ impl ConnectionView {
.enumerate()
.rev()
.map(|(ix, method)| {
- let (method_id, name) = (method.id.0.clone(), method.name.clone());
+ let (method_id, name) = (method.id().0.clone(), method.name().to_string());
let agent_telemetry_id = connection.telemetry_id();
Button::new(method_id.clone(), name)
@@ -1894,8 +1899,8 @@ impl ConnectionView {
this.style(ButtonStyle::Outlined)
}
})
- .when_some(method.description.clone(), |this, description| {
- this.tooltip(Tooltip::text(description))
+ .when_some(method.description(), |this, description| {
+ this.tooltip(Tooltip::text(description.to_string()))
})
.on_click({
cx.listener(move |this, _, window, cx| {
@@ -2896,7 +2901,7 @@ pub(crate) mod tests {
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
// Create history without an initial session list - it will be set after connection
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
@@ -3002,7 +3007,7 @@ pub(crate) mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
@@ -3061,7 +3066,7 @@ pub(crate) mod tests {
let captured_cwd = connection.captured_cwd.clone();
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
@@ -3118,7 +3123,7 @@ pub(crate) mod tests {
let captured_cwd = connection.captured_cwd.clone();
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
@@ -3175,7 +3180,7 @@ pub(crate) mod tests {
let captured_cwd = connection.captured_cwd.clone();
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
@@ -3493,7 +3498,7 @@ pub(crate) mod tests {
// Set up thread view in workspace 1
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project1.clone(), cx)));
@@ -3713,7 +3718,7 @@ pub(crate) mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
@@ -4074,7 +4079,10 @@ pub(crate) mod tests {
fn new() -> Self {
Self {
authenticated: Arc::new(Mutex::new(false)),
- auth_method: acp::AuthMethod::new(Self::AUTH_METHOD_ID, "Test Login"),
+ auth_method: acp::AuthMethod::Agent(acp::AuthMethodAgent::new(
+ Self::AUTH_METHOD_ID,
+ "Test Login",
+ )),
}
}
}
@@ -4127,7 +4135,7 @@ pub(crate) mod tests {
method_id: acp::AuthMethodId,
_cx: &mut App,
) -> Task<gpui::Result<()>> {
- if method_id == self.auth_method.id {
+ if &method_id == self.auth_method.id() {
*self.authenticated.lock() = true;
Task::ready(Ok(()))
} else {
@@ -4446,7 +4454,7 @@ pub(crate) mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
- let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx)));
let connection_store =
cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
@@ -156,43 +156,6 @@ impl ThreadFeedbackState {
}
}
-#[derive(Default, Clone, Copy)]
-struct DiffStats {
- lines_added: u32,
- lines_removed: u32,
-}
-
-impl DiffStats {
- fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
- let mut stats = DiffStats::default();
- let diff_snapshot = diff.snapshot(cx);
- let buffer_snapshot = buffer.snapshot();
- let base_text = diff_snapshot.base_text();
-
- for hunk in diff_snapshot.hunks(&buffer_snapshot) {
- let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
- stats.lines_added += added_rows;
-
- let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
- let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
- let removed_rows = base_end.saturating_sub(base_start);
- stats.lines_removed += removed_rows;
- }
-
- stats
- }
-
- fn all_files(changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>, cx: &App) -> Self {
- let mut total = DiffStats::default();
- for (buffer, diff) in changed_buffers {
- let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
- total.lines_added += stats.lines_added;
- total.lines_removed += stats.lines_removed;
- }
- total
- }
-}
-
pub enum AcpThreadViewEvent {
FirstSendRequested { content: Vec<acp::ContentBlock> },
}
@@ -7446,7 +7409,7 @@ impl ThreadView {
// TODO: Add keyboard navigation.
let is_hovered =
self.hovered_recent_history_item == Some(index);
- crate::thread_history::HistoryEntryElement::new(
+ crate::thread_history_view::HistoryEntryElement::new(
entry,
self.server_view.clone(),
)
@@ -508,8 +508,7 @@ mod tests {
});
let thread_store = None;
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let view_state = cx.new(|_cx| {
EntryViewState::new(
@@ -2155,7 +2155,7 @@ pub mod test {
});
let thread_store = cx.new(|cx| ThreadStore::new(cx));
- let history = cx.new(|cx| crate::ThreadHistory::new(None, window, cx));
+ let history = cx.new(|cx| crate::ThreadHistory::new(None, cx));
// Add editor to workspace
workspace.update(cx, |workspace, cx| {
@@ -1708,8 +1708,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = None;
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -1822,8 +1821,7 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let workspace_handle = workspace.downgrade();
let message_editor = workspace.update_in(cx, |_, window, cx| {
cx.new(|cx| {
@@ -1978,8 +1976,7 @@ mod tests {
let mut cx = VisualTestContext::from_window(window.into(), cx);
let thread_store = None;
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
let available_commands = Rc::new(RefCell::new(vec![
acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"),
@@ -2213,8 +2210,7 @@ mod tests {
}
let thread_store = cx.new(|cx| ThreadStore::new(cx));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
@@ -2709,8 +2705,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -2810,8 +2805,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let session_id = acp::SessionId::new("thread-123");
let title = Some("Previous Conversation".into());
@@ -2886,8 +2880,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = None;
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -2943,8 +2936,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = None;
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -2998,8 +2990,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -3054,8 +3045,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -3119,8 +3109,7 @@ mod tests {
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let (message_editor, editor) = workspace.update_in(cx, |workspace, window, cx| {
let workspace_handle = cx.weak_entity();
@@ -3279,8 +3268,7 @@ mod tests {
});
let thread_store = Some(cx.new(|cx| ThreadStore::new(cx)));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
// Create a new `MessageEditor`. The `EditorMode::full()` has to be used
// to ensure we have a fixed viewport, so we can eventually actually
@@ -3400,8 +3388,7 @@ mod tests {
let mut cx = VisualTestContext::from_window(window.into(), cx);
let thread_store = cx.new(|cx| ThreadStore::new(cx));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
let workspace_handle = cx.weak_entity();
@@ -3483,8 +3470,7 @@ mod tests {
let mut cx = VisualTestContext::from_window(window.into(), cx);
let thread_store = cx.new(|cx| ThreadStore::new(cx));
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| {
let workspace_handle = cx.weak_entity();
@@ -3568,8 +3554,7 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -3721,8 +3706,7 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
- let history =
- cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx)));
+ let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx)));
let message_editor = cx.update(|window, cx| {
cx.new(|cx| {
@@ -1,5 +1,6 @@
use crate::{AgentPanel, AgentPanelEvent, NewThread};
use acp_thread::ThreadStatus;
+use action_log::DiffStats;
use agent::ThreadStore;
use agent_client_protocol as acp;
use agent_settings::AgentSettings;
@@ -73,6 +74,7 @@ struct ActiveThreadInfo {
icon: IconName,
icon_from_external_svg: Option<SharedString>,
is_background: bool,
+ diff_stats: DiffStats,
}
impl From<&ActiveThreadInfo> for acp_thread::AgentSessionInfo {
@@ -98,6 +100,7 @@ struct ThreadEntry {
is_live: bool,
is_background: bool,
highlight_positions: Vec<usize>,
+ diff_stats: DiffStats,
}
#[derive(Clone)]
@@ -402,6 +405,8 @@ impl Sidebar {
}
};
+ let diff_stats = thread.action_log().read(cx).diff_stats(cx);
+
ActiveThreadInfo {
session_id,
title,
@@ -409,6 +414,7 @@ impl Sidebar {
icon,
icon_from_external_svg,
is_background,
+ diff_stats,
}
})
.collect()
@@ -472,6 +478,7 @@ impl Sidebar {
is_live: false,
is_background: false,
highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
});
}
}
@@ -497,6 +504,7 @@ impl Sidebar {
thread.icon_from_external_svg = info.icon_from_external_svg.clone();
thread.is_live = true;
thread.is_background = info.is_background;
+ thread.diff_stats = info.diff_stats;
}
}
@@ -1171,6 +1179,12 @@ impl Sidebar {
.highlight_positions(thread.highlight_positions.to_vec())
.status(thread.status)
.notified(has_notification)
+ .when(thread.diff_stats.lines_added > 0, |this| {
+ this.added(thread.diff_stats.lines_added as usize)
+ })
+ .when(thread.diff_stats.lines_removed > 0, |this| {
+ this.removed(thread.diff_stats.lines_removed as usize)
+ })
.selected(self.focused_thread.as_ref() == Some(&session_info.session_id))
.focused(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
@@ -1987,6 +2001,7 @@ mod tests {
is_live: false,
is_background: false,
highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
}),
// Active thread with Running status
ListEntry::Thread(ThreadEntry {
@@ -2005,6 +2020,7 @@ mod tests {
is_live: true,
is_background: false,
highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
}),
// Active thread with Error status
ListEntry::Thread(ThreadEntry {
@@ -2023,6 +2039,7 @@ mod tests {
is_live: true,
is_background: false,
highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
}),
// Thread with WaitingForConfirmation status, not active
ListEntry::Thread(ThreadEntry {
@@ -2041,6 +2058,7 @@ mod tests {
is_live: false,
is_background: false,
highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
}),
// Background thread that completed (should show notification)
ListEntry::Thread(ThreadEntry {
@@ -2059,6 +2077,7 @@ mod tests {
is_live: true,
is_background: true,
highlight_positions: Vec::new(),
+ diff_stats: DiffStats::default(),
}),
// View More entry
ListEntry::ViewMore {
@@ -1,118 +1,21 @@
-use crate::ConnectionView;
-use crate::{AgentPanel, RemoveHistory, RemoveSelectedThread};
use acp_thread::{AgentSessionInfo, AgentSessionList, AgentSessionListRequest, SessionListUpdate};
use agent_client_protocol as acp;
-use chrono::{Datelike as _, Local, NaiveDate, TimeDelta, Utc};
-use editor::{Editor, EditorEvent};
-use fuzzy::StringMatchCandidate;
-use gpui::{
- App, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Task,
- UniformListScrollHandle, WeakEntity, Window, uniform_list,
-};
-use std::{fmt::Display, ops::Range, rc::Rc};
-use text::Bias;
-use time::{OffsetDateTime, UtcOffset};
-use ui::{
- ElementId, HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tab, Tooltip,
- WithScrollbar, prelude::*,
-};
-
-const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread");
-
-fn thread_title(entry: &AgentSessionInfo) -> &SharedString {
- entry
- .title
- .as_ref()
- .filter(|title| !title.is_empty())
- .unwrap_or(DEFAULT_TITLE)
-}
+use gpui::{App, Task};
+use std::rc::Rc;
+use ui::prelude::*;
pub struct ThreadHistory {
session_list: Option<Rc<dyn AgentSessionList>>,
sessions: Vec<AgentSessionInfo>,
- scroll_handle: UniformListScrollHandle,
- selected_index: usize,
- hovered_index: Option<usize>,
- search_editor: Entity<Editor>,
- search_query: SharedString,
- visible_items: Vec<ListItemType>,
- local_timezone: UtcOffset,
- confirming_delete_history: bool,
- _visible_items_task: Task<()>,
_refresh_task: Task<()>,
_watch_task: Option<Task<()>>,
- _subscriptions: Vec<gpui::Subscription>,
-}
-
-enum ListItemType {
- BucketSeparator(TimeBucket),
- Entry {
- entry: AgentSessionInfo,
- format: EntryTimeFormat,
- },
- SearchResult {
- entry: AgentSessionInfo,
- positions: Vec<usize>,
- },
-}
-
-impl ListItemType {
- fn history_entry(&self) -> Option<&AgentSessionInfo> {
- match self {
- ListItemType::Entry { entry, .. } => Some(entry),
- ListItemType::SearchResult { entry, .. } => Some(entry),
- _ => None,
- }
- }
}
-pub enum ThreadHistoryEvent {
- Open(AgentSessionInfo),
-}
-
-impl EventEmitter<ThreadHistoryEvent> for ThreadHistory {}
-
impl ThreadHistory {
- pub fn new(
- session_list: Option<Rc<dyn AgentSessionList>>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) -> Self {
- let search_editor = cx.new(|cx| {
- let mut editor = Editor::single_line(window, cx);
- editor.set_placeholder_text("Search threads...", window, cx);
- editor
- });
-
- let search_editor_subscription =
- cx.subscribe(&search_editor, |this, search_editor, event, cx| {
- if let EditorEvent::BufferEdited = event {
- let query = search_editor.read(cx).text(cx);
- if this.search_query != query {
- this.search_query = query.into();
- this.update_visible_items(false, cx);
- }
- }
- });
-
- let scroll_handle = UniformListScrollHandle::default();
-
+ pub fn new(session_list: Option<Rc<dyn AgentSessionList>>, cx: &mut Context<Self>) -> Self {
let mut this = Self {
session_list: None,
sessions: Vec::new(),
- scroll_handle,
- selected_index: 0,
- hovered_index: None,
- visible_items: Default::default(),
- search_editor,
- local_timezone: UtcOffset::from_whole_seconds(
- chrono::Local::now().offset().local_minus_utc(),
- )
- .unwrap(),
- search_query: SharedString::default(),
- confirming_delete_history: false,
- _subscriptions: vec![search_editor_subscription],
- _visible_items_task: Task::ready(()),
_refresh_task: Task::ready(()),
_watch_task: None,
};
@@ -120,43 +23,6 @@ impl ThreadHistory {
this
}
- fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context<Self>) {
- let entries = self.sessions.clone();
- let new_list_items = if self.search_query.is_empty() {
- self.add_list_separators(entries, cx)
- } else {
- self.filter_search_results(entries, cx)
- };
- let selected_history_entry = if preserve_selected_item {
- self.selected_history_entry().cloned()
- } else {
- None
- };
-
- self._visible_items_task = cx.spawn(async move |this, cx| {
- let new_visible_items = new_list_items.await;
- this.update(cx, |this, cx| {
- let new_selected_index = if let Some(history_entry) = selected_history_entry {
- new_visible_items
- .iter()
- .position(|visible_entry| {
- visible_entry
- .history_entry()
- .is_some_and(|entry| entry.session_id == history_entry.session_id)
- })
- .unwrap_or(0)
- } else {
- 0
- };
-
- this.visible_items = new_visible_items;
- this.set_selected_index(new_selected_index, Bias::Right, cx);
- cx.notify();
- })
- .ok();
- });
- }
-
pub fn set_session_list(
&mut self,
session_list: Option<Rc<dyn AgentSessionList>>,
@@ -170,9 +36,6 @@ impl ThreadHistory {
self.session_list = session_list;
self.sessions.clear();
- self.visible_items.clear();
- self.selected_index = 0;
- self._visible_items_task = Task::ready(());
self._refresh_task = Task::ready(());
let Some(session_list) = self.session_list.as_ref() else {
@@ -181,9 +44,8 @@ impl ThreadHistory {
return;
};
let Some(rx) = session_list.watch(cx) else {
- // No watch support - do a one-time refresh
self._watch_task = None;
- self.refresh_sessions(false, false, cx);
+ self.refresh_sessions(false, cx);
return;
};
session_list.notify_refresh();
@@ -191,7 +53,6 @@ impl ThreadHistory {
self._watch_task = Some(cx.spawn(async move |this, cx| {
while let Ok(first_update) = rx.recv().await {
let mut updates = vec![first_update];
- // Collect any additional updates that are already in the channel
while let Ok(update) = rx.try_recv() {
updates.push(update);
}
@@ -202,7 +63,7 @@ impl ThreadHistory {
.any(|u| matches!(u, SessionListUpdate::Refresh));
if needs_refresh {
- this.refresh_sessions(true, false, cx);
+ this.refresh_sessions(false, cx);
} else {
for update in updates {
if let SessionListUpdate::SessionInfo { session_id, update } = update {
@@ -217,7 +78,7 @@ impl ThreadHistory {
}
pub(crate) fn refresh_full_history(&mut self, cx: &mut Context<Self>) {
- self.refresh_sessions(true, true, cx);
+ self.refresh_sessions(true, cx);
}
fn apply_info_update(
@@ -258,23 +119,15 @@ impl ThreadHistory {
session.meta = Some(meta);
}
- self.update_visible_items(true, cx);
+ cx.notify();
}
- fn refresh_sessions(
- &mut self,
- preserve_selected_item: bool,
- load_all_pages: bool,
- cx: &mut Context<Self>,
- ) {
+ fn refresh_sessions(&mut self, load_all_pages: bool, cx: &mut Context<Self>) {
let Some(session_list) = self.session_list.clone() else {
- self.update_visible_items(preserve_selected_item, cx);
+ cx.notify();
return;
};
- // If a new refresh arrives while pagination is in progress, the previous
- // `_refresh_task` is cancelled. This is intentional (latest refresh wins),
- // but means sessions may be in a partial state until the new refresh completes.
self._refresh_task = cx.spawn(async move |this, cx| {
let mut cursor: Option<String> = None;
let mut is_first_page = true;
@@ -305,7 +158,7 @@ impl ThreadHistory {
} else {
this.sessions.extend(page_sessions);
}
- this.update_visible_items(preserve_selected_item, cx);
+ cx.notify();
})
.ok();
@@ -378,693 +231,11 @@ impl ThreadHistory {
}
}
- fn add_list_separators(
- &self,
- entries: Vec<AgentSessionInfo>,
- cx: &App,
- ) -> Task<Vec<ListItemType>> {
- cx.background_spawn(async move {
- let mut items = Vec::with_capacity(entries.len() + 1);
- let mut bucket = None;
- let today = Local::now().naive_local().date();
-
- for entry in entries.into_iter() {
- let entry_bucket = entry
- .updated_at
- .map(|timestamp| {
- let entry_date = timestamp.with_timezone(&Local).naive_local().date();
- TimeBucket::from_dates(today, entry_date)
- })
- .unwrap_or(TimeBucket::All);
-
- if Some(entry_bucket) != bucket {
- bucket = Some(entry_bucket);
- items.push(ListItemType::BucketSeparator(entry_bucket));
- }
-
- items.push(ListItemType::Entry {
- entry,
- format: entry_bucket.into(),
- });
- }
- items
- })
- }
-
- fn filter_search_results(
- &self,
- entries: Vec<AgentSessionInfo>,
- cx: &App,
- ) -> Task<Vec<ListItemType>> {
- let query = self.search_query.clone();
- cx.background_spawn({
- let executor = cx.background_executor().clone();
- async move {
- let mut candidates = Vec::with_capacity(entries.len());
-
- for (idx, entry) in entries.iter().enumerate() {
- candidates.push(StringMatchCandidate::new(idx, thread_title(entry)));
- }
-
- const MAX_MATCHES: usize = 100;
-
- let matches = fuzzy::match_strings(
- &candidates,
- &query,
- false,
- true,
- MAX_MATCHES,
- &Default::default(),
- executor,
- )
- .await;
-
- matches
- .into_iter()
- .map(|search_match| ListItemType::SearchResult {
- entry: entries[search_match.candidate_id].clone(),
- positions: search_match.positions,
- })
- .collect()
- }
- })
- }
-
- fn search_produced_no_matches(&self) -> bool {
- self.visible_items.is_empty() && !self.search_query.is_empty()
- }
-
- fn selected_history_entry(&self) -> Option<&AgentSessionInfo> {
- self.get_history_entry(self.selected_index)
- }
-
- fn get_history_entry(&self, visible_items_ix: usize) -> Option<&AgentSessionInfo> {
- self.visible_items.get(visible_items_ix)?.history_entry()
- }
-
- fn set_selected_index(&mut self, mut index: usize, bias: Bias, cx: &mut Context<Self>) {
- if self.visible_items.len() == 0 {
- self.selected_index = 0;
- return;
- }
- while matches!(
- self.visible_items.get(index),
- None | Some(ListItemType::BucketSeparator(..))
- ) {
- index = match bias {
- Bias::Left => {
- if index == 0 {
- self.visible_items.len() - 1
- } else {
- index - 1
- }
- }
- Bias::Right => {
- if index >= self.visible_items.len() - 1 {
- 0
- } else {
- index + 1
- }
- }
- };
- }
- self.selected_index = index;
- self.scroll_handle
- .scroll_to_item(index, ScrollStrategy::Top);
- cx.notify()
- }
-
- pub fn select_previous(
- &mut self,
- _: &menu::SelectPrevious,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- if self.selected_index == 0 {
- self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx);
- } else {
- self.set_selected_index(self.selected_index - 1, Bias::Left, cx);
- }
- }
-
- pub fn select_next(
- &mut self,
- _: &menu::SelectNext,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- if self.selected_index == self.visible_items.len() - 1 {
- self.set_selected_index(0, Bias::Right, cx);
+ pub(crate) fn delete_sessions(&self, cx: &mut App) -> Task<anyhow::Result<()>> {
+ if let Some(session_list) = self.session_list.as_ref() {
+ session_list.delete_sessions(cx)
} else {
- self.set_selected_index(self.selected_index + 1, Bias::Right, cx);
- }
- }
-
- fn select_first(
- &mut self,
- _: &menu::SelectFirst,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- self.set_selected_index(0, Bias::Right, cx);
- }
-
- fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context<Self>) {
- self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx);
- }
-
- fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
- self.confirm_entry(self.selected_index, cx);
- }
-
- fn confirm_entry(&mut self, ix: usize, cx: &mut Context<Self>) {
- let Some(entry) = self.get_history_entry(ix) else {
- return;
- };
- cx.emit(ThreadHistoryEvent::Open(entry.clone()));
- }
-
- fn remove_selected_thread(
- &mut self,
- _: &RemoveSelectedThread,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- self.remove_thread(self.selected_index, cx)
- }
-
- fn remove_thread(&mut self, visible_item_ix: usize, cx: &mut Context<Self>) {
- let Some(entry) = self.get_history_entry(visible_item_ix) else {
- return;
- };
- let Some(session_list) = self.session_list.as_ref() else {
- return;
- };
- if !session_list.supports_delete() {
- return;
- }
- let task = session_list.delete_session(&entry.session_id, cx);
- task.detach_and_log_err(cx);
- }
-
- fn remove_history(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
- let Some(session_list) = self.session_list.as_ref() else {
- return;
- };
- if !session_list.supports_delete() {
- return;
- }
- session_list.delete_sessions(cx).detach_and_log_err(cx);
- self.confirming_delete_history = false;
- cx.notify();
- }
-
- fn prompt_delete_history(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
- self.confirming_delete_history = true;
- cx.notify();
- }
-
- fn cancel_delete_history(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
- self.confirming_delete_history = false;
- cx.notify();
- }
-
- fn render_list_items(
- &mut self,
- range: Range<usize>,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) -> Vec<AnyElement> {
- self.visible_items
- .get(range.clone())
- .into_iter()
- .flatten()
- .enumerate()
- .map(|(ix, item)| self.render_list_item(item, range.start + ix, cx))
- .collect()
- }
-
- fn render_list_item(&self, item: &ListItemType, ix: usize, cx: &Context<Self>) -> AnyElement {
- match item {
- ListItemType::Entry { entry, format } => self
- .render_history_entry(entry, *format, ix, Vec::default(), cx)
- .into_any(),
- ListItemType::SearchResult { entry, positions } => self.render_history_entry(
- entry,
- EntryTimeFormat::DateAndTime,
- ix,
- positions.clone(),
- cx,
- ),
- ListItemType::BucketSeparator(bucket) => div()
- .px(DynamicSpacing::Base06.rems(cx))
- .pt_2()
- .pb_1()
- .child(
- Label::new(bucket.to_string())
- .size(LabelSize::XSmall)
- .color(Color::Muted),
- )
- .into_any_element(),
- }
- }
-
- fn render_history_entry(
- &self,
- entry: &AgentSessionInfo,
- format: EntryTimeFormat,
- ix: usize,
- highlight_positions: Vec<usize>,
- cx: &Context<Self>,
- ) -> AnyElement {
- let selected = ix == self.selected_index;
- let hovered = Some(ix) == self.hovered_index;
- let entry_time = entry.updated_at;
- let display_text = match (format, entry_time) {
- (EntryTimeFormat::DateAndTime, Some(entry_time)) => {
- let now = Utc::now();
- let duration = now.signed_duration_since(entry_time);
- let days = duration.num_days();
-
- format!("{}d", days)
- }
- (EntryTimeFormat::TimeOnly, Some(entry_time)) => {
- format.format_timestamp(entry_time.timestamp(), self.local_timezone)
- }
- (_, None) => "—".to_string(),
- };
-
- let title = thread_title(entry).clone();
- let full_date = entry_time
- .map(|time| {
- EntryTimeFormat::DateAndTime.format_timestamp(time.timestamp(), self.local_timezone)
- })
- .unwrap_or_else(|| "Unknown".to_string());
-
- h_flex()
- .w_full()
- .pb_1()
- .child(
- ListItem::new(ix)
- .rounded()
- .toggle_state(selected)
- .spacing(ListItemSpacing::Sparse)
- .start_slot(
- h_flex()
- .w_full()
- .gap_2()
- .justify_between()
- .child(
- HighlightedLabel::new(thread_title(entry), highlight_positions)
- .size(LabelSize::Small)
- .truncate(),
- )
- .child(
- Label::new(display_text)
- .color(Color::Muted)
- .size(LabelSize::XSmall),
- ),
- )
- .tooltip(move |_, cx| {
- Tooltip::with_meta(title.clone(), None, full_date.clone(), cx)
- })
- .on_hover(cx.listener(move |this, is_hovered, _window, cx| {
- if *is_hovered {
- this.hovered_index = Some(ix);
- } else if this.hovered_index == Some(ix) {
- this.hovered_index = None;
- }
-
- cx.notify();
- }))
- .end_slot::<IconButton>(if hovered && self.supports_delete() {
- Some(
- IconButton::new("delete", IconName::Trash)
- .shape(IconButtonShape::Square)
- .icon_size(IconSize::XSmall)
- .icon_color(Color::Muted)
- .tooltip(move |_window, cx| {
- Tooltip::for_action("Delete", &RemoveSelectedThread, cx)
- })
- .on_click(cx.listener(move |this, _, _, cx| {
- this.remove_thread(ix, cx);
- cx.stop_propagation()
- })),
- )
- } else {
- None
- })
- .on_click(cx.listener(move |this, _, _, cx| this.confirm_entry(ix, cx))),
- )
- .into_any_element()
- }
-}
-
-impl Focusable for ThreadHistory {
- fn focus_handle(&self, cx: &App) -> FocusHandle {
- self.search_editor.focus_handle(cx)
- }
-}
-
-impl Render for ThreadHistory {
- fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let has_no_history = self.is_empty();
-
- v_flex()
- .key_context("ThreadHistory")
- .size_full()
- .bg(cx.theme().colors().panel_background)
- .on_action(cx.listener(Self::select_previous))
- .on_action(cx.listener(Self::select_next))
- .on_action(cx.listener(Self::select_first))
- .on_action(cx.listener(Self::select_last))
- .on_action(cx.listener(Self::confirm))
- .on_action(cx.listener(Self::remove_selected_thread))
- .on_action(cx.listener(|this, _: &RemoveHistory, window, cx| {
- this.remove_history(window, cx);
- }))
- .child(
- h_flex()
- .h(Tab::container_height(cx))
- .w_full()
- .py_1()
- .px_2()
- .gap_2()
- .justify_between()
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .child(
- Icon::new(IconName::MagnifyingGlass)
- .color(Color::Muted)
- .size(IconSize::Small),
- )
- .child(self.search_editor.clone()),
- )
- .child({
- let view = v_flex()
- .id("list-container")
- .relative()
- .overflow_hidden()
- .flex_grow();
-
- if has_no_history {
- view.justify_center().items_center().child(
- Label::new("You don't have any past threads yet.")
- .size(LabelSize::Small)
- .color(Color::Muted),
- )
- } else if self.search_produced_no_matches() {
- view.justify_center()
- .items_center()
- .child(Label::new("No threads match your search.").size(LabelSize::Small))
- } else {
- view.child(
- uniform_list(
- "thread-history",
- self.visible_items.len(),
- cx.processor(|this, range: Range<usize>, window, cx| {
- this.render_list_items(range, window, cx)
- }),
- )
- .p_1()
- .pr_4()
- .track_scroll(&self.scroll_handle)
- .flex_grow(),
- )
- .vertical_scrollbar_for(&self.scroll_handle, window, cx)
- }
- })
- .when(!has_no_history && self.supports_delete(), |this| {
- this.child(
- h_flex()
- .p_2()
- .border_t_1()
- .border_color(cx.theme().colors().border_variant)
- .when(!self.confirming_delete_history, |this| {
- this.child(
- Button::new("delete_history", "Delete All History")
- .full_width()
- .style(ButtonStyle::Outlined)
- .label_size(LabelSize::Small)
- .on_click(cx.listener(|this, _, window, cx| {
- this.prompt_delete_history(window, cx);
- })),
- )
- })
- .when(self.confirming_delete_history, |this| {
- this.w_full()
- .gap_2()
- .flex_wrap()
- .justify_between()
- .child(
- h_flex()
- .flex_wrap()
- .gap_1()
- .child(
- Label::new("Delete all threads?")
- .size(LabelSize::Small),
- )
- .child(
- Label::new("You won't be able to recover them later.")
- .size(LabelSize::Small)
- .color(Color::Muted),
- ),
- )
- .child(
- h_flex()
- .gap_1()
- .child(
- Button::new("cancel_delete", "Cancel")
- .label_size(LabelSize::Small)
- .on_click(cx.listener(|this, _, window, cx| {
- this.cancel_delete_history(window, cx);
- })),
- )
- .child(
- Button::new("confirm_delete", "Delete")
- .style(ButtonStyle::Tinted(ui::TintColor::Error))
- .color(Color::Error)
- .label_size(LabelSize::Small)
- .on_click(cx.listener(|_, _, window, cx| {
- window.dispatch_action(
- Box::new(RemoveHistory),
- cx,
- );
- })),
- ),
- )
- }),
- )
- })
- }
-}
-
-#[derive(IntoElement)]
-pub struct HistoryEntryElement {
- entry: AgentSessionInfo,
- thread_view: WeakEntity<ConnectionView>,
- selected: bool,
- hovered: bool,
- supports_delete: bool,
- on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
-}
-
-impl HistoryEntryElement {
- pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity<ConnectionView>) -> Self {
- Self {
- entry,
- thread_view,
- selected: false,
- hovered: false,
- supports_delete: false,
- on_hover: Box::new(|_, _, _| {}),
- }
- }
-
- pub fn supports_delete(mut self, supports_delete: bool) -> Self {
- self.supports_delete = supports_delete;
- self
- }
-
- pub fn hovered(mut self, hovered: bool) -> Self {
- self.hovered = hovered;
- self
- }
-
- pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self {
- self.on_hover = Box::new(on_hover);
- self
- }
-}
-
-impl RenderOnce for HistoryEntryElement {
- fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
- let id = ElementId::Name(self.entry.session_id.0.clone().into());
- let title = thread_title(&self.entry).clone();
- let formatted_time = self
- .entry
- .updated_at
- .map(|timestamp| {
- let now = chrono::Utc::now();
- let duration = now.signed_duration_since(timestamp);
-
- if duration.num_days() > 0 {
- format!("{}d", duration.num_days())
- } else if duration.num_hours() > 0 {
- format!("{}h ago", duration.num_hours())
- } else if duration.num_minutes() > 0 {
- format!("{}m ago", duration.num_minutes())
- } else {
- "Just now".to_string()
- }
- })
- .unwrap_or_else(|| "Unknown".to_string());
-
- ListItem::new(id)
- .rounded()
- .toggle_state(self.selected)
- .spacing(ListItemSpacing::Sparse)
- .start_slot(
- h_flex()
- .w_full()
- .gap_2()
- .justify_between()
- .child(Label::new(title).size(LabelSize::Small).truncate())
- .child(
- Label::new(formatted_time)
- .color(Color::Muted)
- .size(LabelSize::XSmall),
- ),
- )
- .on_hover(self.on_hover)
- .end_slot::<IconButton>(if (self.hovered || self.selected) && self.supports_delete {
- Some(
- IconButton::new("delete", IconName::Trash)
- .shape(IconButtonShape::Square)
- .icon_size(IconSize::XSmall)
- .icon_color(Color::Muted)
- .tooltip(move |_window, cx| {
- Tooltip::for_action("Delete", &RemoveSelectedThread, cx)
- })
- .on_click({
- let thread_view = self.thread_view.clone();
- let session_id = self.entry.session_id.clone();
-
- move |_event, _window, cx| {
- if let Some(thread_view) = thread_view.upgrade() {
- thread_view.update(cx, |thread_view, cx| {
- thread_view.delete_history_entry(&session_id, cx);
- });
- }
- }
- }),
- )
- } else {
- None
- })
- .on_click({
- let thread_view = self.thread_view.clone();
- let entry = self.entry;
-
- move |_event, window, cx| {
- if let Some(workspace) = thread_view
- .upgrade()
- .and_then(|view| view.read(cx).workspace().upgrade())
- {
- if let Some(panel) = workspace.read(cx).panel::<AgentPanel>(cx) {
- panel.update(cx, |panel, cx| {
- panel.load_agent_thread(
- entry.session_id.clone(),
- entry.cwd.clone(),
- entry.title.clone(),
- window,
- cx,
- );
- });
- }
- }
- }
- })
- }
-}
-
-#[derive(Clone, Copy)]
-pub enum EntryTimeFormat {
- DateAndTime,
- TimeOnly,
-}
-
-impl EntryTimeFormat {
- fn format_timestamp(&self, timestamp: i64, timezone: UtcOffset) -> String {
- let timestamp = OffsetDateTime::from_unix_timestamp(timestamp).unwrap();
-
- match self {
- EntryTimeFormat::DateAndTime => time_format::format_localized_timestamp(
- timestamp,
- OffsetDateTime::now_utc(),
- timezone,
- time_format::TimestampFormat::EnhancedAbsolute,
- ),
- EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)),
- }
- }
-}
-
-impl From<TimeBucket> for EntryTimeFormat {
- fn from(bucket: TimeBucket) -> Self {
- match bucket {
- TimeBucket::Today => EntryTimeFormat::TimeOnly,
- TimeBucket::Yesterday => EntryTimeFormat::TimeOnly,
- TimeBucket::ThisWeek => EntryTimeFormat::DateAndTime,
- TimeBucket::PastWeek => EntryTimeFormat::DateAndTime,
- TimeBucket::All => EntryTimeFormat::DateAndTime,
- }
- }
-}
-
-#[derive(PartialEq, Eq, Clone, Copy, Debug)]
-enum TimeBucket {
- Today,
- Yesterday,
- ThisWeek,
- PastWeek,
- All,
-}
-
-impl TimeBucket {
- fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self {
- if date == reference {
- return TimeBucket::Today;
- }
-
- if date == reference - TimeDelta::days(1) {
- return TimeBucket::Yesterday;
- }
-
- let week = date.iso_week();
-
- if reference.iso_week() == week {
- return TimeBucket::ThisWeek;
- }
-
- let last_week = (reference - TimeDelta::days(7)).iso_week();
-
- if week == last_week {
- return TimeBucket::PastWeek;
- }
-
- TimeBucket::All
- }
-}
-
-impl Display for TimeBucket {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- TimeBucket::Today => write!(f, "Today"),
- TimeBucket::Yesterday => write!(f, "Yesterday"),
- TimeBucket::ThisWeek => write!(f, "This Week"),
- TimeBucket::PastWeek => write!(f, "Past Week"),
- TimeBucket::All => write!(f, "All"),
+ Task::ready(Ok(()))
}
}
}
@@ -1073,7 +244,6 @@ impl Display for TimeBucket {
mod tests {
use super::*;
use acp_thread::AgentSessionListResponse;
- use chrono::NaiveDate;
use gpui::TestAppContext;
use std::{
any::Any,
@@ -1246,9 +416,7 @@ mod tests {
vec![test_session("session-2", "Second")],
));
- let (history, cx) = cx.add_window_view(|window, cx| {
- ThreadHistory::new(Some(session_list.clone()), window, cx)
- });
+ let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx));
cx.run_until_parked();
history.update(cx, |history, _cx| {
@@ -1270,9 +438,7 @@ mod tests {
vec![test_session("session-2", "Second")],
));
- let (history, cx) = cx.add_window_view(|window, cx| {
- ThreadHistory::new(Some(session_list.clone()), window, cx)
- });
+ let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx));
cx.run_until_parked();
session_list.clear_requested_cursors();
@@ -1307,9 +473,7 @@ mod tests {
vec![test_session("session-2", "Second")],
));
- let (history, cx) = cx.add_window_view(|window, cx| {
- ThreadHistory::new(Some(session_list.clone()), window, cx)
- });
+ let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx));
cx.run_until_parked();
history.update(cx, |history, cx| history.refresh_full_history(cx));
@@ -1340,9 +504,7 @@ mod tests {
vec![test_session("session-2", "Second")],
));
- let (history, cx) = cx.add_window_view(|window, cx| {
- ThreadHistory::new(Some(session_list.clone()), window, cx)
- });
+ let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx));
cx.run_until_parked();
history.update(cx, |history, cx| history.refresh_full_history(cx));
@@ -1371,9 +533,7 @@ mod tests {
vec![test_session("session-2", "Second")],
));
- let (history, cx) = cx.add_window_view(|window, cx| {
- ThreadHistory::new(Some(session_list.clone()), window, cx)
- });
+ let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx));
cx.run_until_parked();
history.update(cx, |history, cx| history.refresh_full_history(cx));
@@ -0,0 +1,878 @@
+use crate::thread_history::ThreadHistory;
+use crate::{AgentPanel, ConnectionView, RemoveHistory, RemoveSelectedThread};
+use acp_thread::AgentSessionInfo;
+use chrono::{Datelike as _, Local, NaiveDate, TimeDelta, Utc};
+use editor::{Editor, EditorEvent};
+use fuzzy::StringMatchCandidate;
+use gpui::{
+ AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Task,
+ UniformListScrollHandle, WeakEntity, Window, uniform_list,
+};
+use std::{fmt::Display, ops::Range};
+use text::Bias;
+use time::{OffsetDateTime, UtcOffset};
+use ui::{
+ ElementId, HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tab, Tooltip,
+ WithScrollbar, prelude::*,
+};
+
+const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread");
+
+pub(crate) fn thread_title(entry: &AgentSessionInfo) -> &SharedString {
+ entry
+ .title
+ .as_ref()
+ .filter(|title| !title.is_empty())
+ .unwrap_or(DEFAULT_TITLE)
+}
+
+pub struct ThreadHistoryView {
+ history: Entity<ThreadHistory>,
+ scroll_handle: UniformListScrollHandle,
+ selected_index: usize,
+ hovered_index: Option<usize>,
+ search_editor: Entity<Editor>,
+ search_query: SharedString,
+ visible_items: Vec<ListItemType>,
+ local_timezone: UtcOffset,
+ confirming_delete_history: bool,
+ _visible_items_task: Task<()>,
+ _subscriptions: Vec<gpui::Subscription>,
+}
+
+enum ListItemType {
+ BucketSeparator(TimeBucket),
+ Entry {
+ entry: AgentSessionInfo,
+ format: EntryTimeFormat,
+ },
+ SearchResult {
+ entry: AgentSessionInfo,
+ positions: Vec<usize>,
+ },
+}
+
+impl ListItemType {
+ fn history_entry(&self) -> Option<&AgentSessionInfo> {
+ match self {
+ ListItemType::Entry { entry, .. } => Some(entry),
+ ListItemType::SearchResult { entry, .. } => Some(entry),
+ _ => None,
+ }
+ }
+}
+
+pub enum ThreadHistoryViewEvent {
+ Open(AgentSessionInfo),
+}
+
+impl EventEmitter<ThreadHistoryViewEvent> for ThreadHistoryView {}
+
+impl ThreadHistoryView {
+ pub fn new(
+ history: Entity<ThreadHistory>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Self {
+ let search_editor = cx.new(|cx| {
+ let mut editor = Editor::single_line(window, cx);
+ editor.set_placeholder_text("Search threads...", window, cx);
+ editor
+ });
+
+ let search_editor_subscription =
+ cx.subscribe(&search_editor, |this, search_editor, event, cx| {
+ if let EditorEvent::BufferEdited = event {
+ let query = search_editor.read(cx).text(cx);
+ if this.search_query != query {
+ this.search_query = query.into();
+ this.update_visible_items(false, cx);
+ }
+ }
+ });
+
+ let history_subscription = cx.observe(&history, |this, _, cx| {
+ this.update_visible_items(true, cx);
+ });
+
+ let scroll_handle = UniformListScrollHandle::default();
+
+ let mut this = Self {
+ history,
+ scroll_handle,
+ selected_index: 0,
+ hovered_index: None,
+ visible_items: Default::default(),
+ search_editor,
+ local_timezone: UtcOffset::from_whole_seconds(
+ chrono::Local::now().offset().local_minus_utc(),
+ )
+ .unwrap(),
+ search_query: SharedString::default(),
+ confirming_delete_history: false,
+ _subscriptions: vec![search_editor_subscription, history_subscription],
+ _visible_items_task: Task::ready(()),
+ };
+ this.update_visible_items(false, cx);
+ this
+ }
+
+ fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context<Self>) {
+ let entries = self.history.read(cx).sessions().to_vec();
+ let new_list_items = if self.search_query.is_empty() {
+ self.add_list_separators(entries, cx)
+ } else {
+ self.filter_search_results(entries, cx)
+ };
+ let selected_history_entry = if preserve_selected_item {
+ self.selected_history_entry().cloned()
+ } else {
+ None
+ };
+
+ self._visible_items_task = cx.spawn(async move |this, cx| {
+ let new_visible_items = new_list_items.await;
+ this.update(cx, |this, cx| {
+ let new_selected_index = if let Some(history_entry) = selected_history_entry {
+ new_visible_items
+ .iter()
+ .position(|visible_entry| {
+ visible_entry
+ .history_entry()
+ .is_some_and(|entry| entry.session_id == history_entry.session_id)
+ })
+ .unwrap_or(0)
+ } else {
+ 0
+ };
+
+ this.visible_items = new_visible_items;
+ this.set_selected_index(new_selected_index, Bias::Right, cx);
+ cx.notify();
+ })
+ .ok();
+ });
+ }
+
+ fn add_list_separators(
+ &self,
+ entries: Vec<AgentSessionInfo>,
+ cx: &App,
+ ) -> Task<Vec<ListItemType>> {
+ cx.background_spawn(async move {
+ let mut items = Vec::with_capacity(entries.len() + 1);
+ let mut bucket = None;
+ let today = Local::now().naive_local().date();
+
+ for entry in entries.into_iter() {
+ let entry_bucket = entry
+ .updated_at
+ .map(|timestamp| {
+ let entry_date = timestamp.with_timezone(&Local).naive_local().date();
+ TimeBucket::from_dates(today, entry_date)
+ })
+ .unwrap_or(TimeBucket::All);
+
+ if Some(entry_bucket) != bucket {
+ bucket = Some(entry_bucket);
+ items.push(ListItemType::BucketSeparator(entry_bucket));
+ }
+
+ items.push(ListItemType::Entry {
+ entry,
+ format: entry_bucket.into(),
+ });
+ }
+ items
+ })
+ }
+
+ fn filter_search_results(
+ &self,
+ entries: Vec<AgentSessionInfo>,
+ cx: &App,
+ ) -> Task<Vec<ListItemType>> {
+ let query = self.search_query.clone();
+ cx.background_spawn({
+ let executor = cx.background_executor().clone();
+ async move {
+ let mut candidates = Vec::with_capacity(entries.len());
+
+ for (idx, entry) in entries.iter().enumerate() {
+ candidates.push(StringMatchCandidate::new(idx, thread_title(entry)));
+ }
+
+ const MAX_MATCHES: usize = 100;
+
+ let matches = fuzzy::match_strings(
+ &candidates,
+ &query,
+ false,
+ true,
+ MAX_MATCHES,
+ &Default::default(),
+ executor,
+ )
+ .await;
+
+ matches
+ .into_iter()
+ .map(|search_match| ListItemType::SearchResult {
+ entry: entries[search_match.candidate_id].clone(),
+ positions: search_match.positions,
+ })
+ .collect()
+ }
+ })
+ }
+
+ fn search_produced_no_matches(&self) -> bool {
+ self.visible_items.is_empty() && !self.search_query.is_empty()
+ }
+
+ fn selected_history_entry(&self) -> Option<&AgentSessionInfo> {
+ self.get_history_entry(self.selected_index)
+ }
+
+ fn get_history_entry(&self, visible_items_ix: usize) -> Option<&AgentSessionInfo> {
+ self.visible_items.get(visible_items_ix)?.history_entry()
+ }
+
+ fn set_selected_index(&mut self, mut index: usize, bias: Bias, cx: &mut Context<Self>) {
+ if self.visible_items.len() == 0 {
+ self.selected_index = 0;
+ return;
+ }
+ while matches!(
+ self.visible_items.get(index),
+ None | Some(ListItemType::BucketSeparator(..))
+ ) {
+ index = match bias {
+ Bias::Left => {
+ if index == 0 {
+ self.visible_items.len() - 1
+ } else {
+ index - 1
+ }
+ }
+ Bias::Right => {
+ if index >= self.visible_items.len() - 1 {
+ 0
+ } else {
+ index + 1
+ }
+ }
+ };
+ }
+ self.selected_index = index;
+ self.scroll_handle
+ .scroll_to_item(index, ScrollStrategy::Top);
+ cx.notify()
+ }
+
+ fn select_previous(
+ &mut self,
+ _: &menu::SelectPrevious,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self.selected_index == 0 {
+ self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx);
+ } else {
+ self.set_selected_index(self.selected_index - 1, Bias::Left, cx);
+ }
+ }
+
+ fn select_next(&mut self, _: &menu::SelectNext, _window: &mut Window, cx: &mut Context<Self>) {
+ if self.selected_index == self.visible_items.len() - 1 {
+ self.set_selected_index(0, Bias::Right, cx);
+ } else {
+ self.set_selected_index(self.selected_index + 1, Bias::Right, cx);
+ }
+ }
+
+ fn select_first(
+ &mut self,
+ _: &menu::SelectFirst,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.set_selected_index(0, Bias::Right, cx);
+ }
+
+ fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context<Self>) {
+ self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx);
+ }
+
+ fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
+ self.confirm_entry(self.selected_index, cx);
+ }
+
+ fn confirm_entry(&mut self, ix: usize, cx: &mut Context<Self>) {
+ let Some(entry) = self.get_history_entry(ix) else {
+ return;
+ };
+ cx.emit(ThreadHistoryViewEvent::Open(entry.clone()));
+ }
+
+ fn remove_selected_thread(
+ &mut self,
+ _: &RemoveSelectedThread,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.remove_thread(self.selected_index, cx)
+ }
+
+ fn remove_thread(&mut self, visible_item_ix: usize, cx: &mut Context<Self>) {
+ let Some(entry) = self.get_history_entry(visible_item_ix) else {
+ return;
+ };
+ if !self.history.read(cx).supports_delete() {
+ return;
+ }
+ let session_id = entry.session_id.clone();
+ self.history.update(cx, |history, cx| {
+ history
+ .delete_session(&session_id, cx)
+ .detach_and_log_err(cx);
+ });
+ }
+
+ fn remove_history(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
+ if !self.history.read(cx).supports_delete() {
+ return;
+ }
+ self.history.update(cx, |history, cx| {
+ history.delete_sessions(cx).detach_and_log_err(cx);
+ });
+ self.confirming_delete_history = false;
+ cx.notify();
+ }
+
+ fn prompt_delete_history(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
+ self.confirming_delete_history = true;
+ cx.notify();
+ }
+
+ fn cancel_delete_history(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
+ self.confirming_delete_history = false;
+ cx.notify();
+ }
+
+ fn render_list_items(
+ &mut self,
+ range: Range<usize>,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Vec<AnyElement> {
+ self.visible_items
+ .get(range.clone())
+ .into_iter()
+ .flatten()
+ .enumerate()
+ .map(|(ix, item)| self.render_list_item(item, range.start + ix, cx))
+ .collect()
+ }
+
+ fn render_list_item(&self, item: &ListItemType, ix: usize, cx: &Context<Self>) -> AnyElement {
+ match item {
+ ListItemType::Entry { entry, format } => self
+ .render_history_entry(entry, *format, ix, Vec::default(), cx)
+ .into_any(),
+ ListItemType::SearchResult { entry, positions } => self.render_history_entry(
+ entry,
+ EntryTimeFormat::DateAndTime,
+ ix,
+ positions.clone(),
+ cx,
+ ),
+ ListItemType::BucketSeparator(bucket) => div()
+ .px(DynamicSpacing::Base06.rems(cx))
+ .pt_2()
+ .pb_1()
+ .child(
+ Label::new(bucket.to_string())
+ .size(LabelSize::XSmall)
+ .color(Color::Muted),
+ )
+ .into_any_element(),
+ }
+ }
+
+ fn render_history_entry(
+ &self,
+ entry: &AgentSessionInfo,
+ format: EntryTimeFormat,
+ ix: usize,
+ highlight_positions: Vec<usize>,
+ cx: &Context<Self>,
+ ) -> AnyElement {
+ let selected = ix == self.selected_index;
+ let hovered = Some(ix) == self.hovered_index;
+ let entry_time = entry.updated_at;
+ let display_text = match (format, entry_time) {
+ (EntryTimeFormat::DateAndTime, Some(entry_time)) => {
+ let now = Utc::now();
+ let duration = now.signed_duration_since(entry_time);
+ let days = duration.num_days();
+
+ format!("{}d", days)
+ }
+ (EntryTimeFormat::TimeOnly, Some(entry_time)) => {
+ format.format_timestamp(entry_time.timestamp(), self.local_timezone)
+ }
+ (_, None) => "—".to_string(),
+ };
+
+ let title = thread_title(entry).clone();
+ let full_date = entry_time
+ .map(|time| {
+ EntryTimeFormat::DateAndTime.format_timestamp(time.timestamp(), self.local_timezone)
+ })
+ .unwrap_or_else(|| "Unknown".to_string());
+
+ let supports_delete = self.history.read(cx).supports_delete();
+
+ h_flex()
+ .w_full()
+ .pb_1()
+ .child(
+ ListItem::new(ix)
+ .rounded()
+ .toggle_state(selected)
+ .spacing(ListItemSpacing::Sparse)
+ .start_slot(
+ h_flex()
+ .w_full()
+ .gap_2()
+ .justify_between()
+ .child(
+ HighlightedLabel::new(thread_title(entry), highlight_positions)
+ .size(LabelSize::Small)
+ .truncate(),
+ )
+ .child(
+ Label::new(display_text)
+ .color(Color::Muted)
+ .size(LabelSize::XSmall),
+ ),
+ )
+ .tooltip(move |_, cx| {
+ Tooltip::with_meta(title.clone(), None, full_date.clone(), cx)
+ })
+ .on_hover(cx.listener(move |this, is_hovered, _window, cx| {
+ if *is_hovered {
+ this.hovered_index = Some(ix);
+ } else if this.hovered_index == Some(ix) {
+ this.hovered_index = None;
+ }
+
+ cx.notify();
+ }))
+ .end_slot::<IconButton>(if hovered && supports_delete {
+ Some(
+ IconButton::new("delete", IconName::Trash)
+ .shape(IconButtonShape::Square)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .tooltip(move |_window, cx| {
+ Tooltip::for_action("Delete", &RemoveSelectedThread, cx)
+ })
+ .on_click(cx.listener(move |this, _, _, cx| {
+ this.remove_thread(ix, cx);
+ cx.stop_propagation()
+ })),
+ )
+ } else {
+ None
+ })
+ .on_click(cx.listener(move |this, _, _, cx| this.confirm_entry(ix, cx))),
+ )
+ .into_any_element()
+ }
+}
+
+impl Focusable for ThreadHistoryView {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ self.search_editor.focus_handle(cx)
+ }
+}
+
+impl Render for ThreadHistoryView {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let has_no_history = self.history.read(cx).is_empty();
+ let supports_delete = self.history.read(cx).supports_delete();
+
+ v_flex()
+ .key_context("ThreadHistory")
+ .size_full()
+ .bg(cx.theme().colors().panel_background)
+ .on_action(cx.listener(Self::select_previous))
+ .on_action(cx.listener(Self::select_next))
+ .on_action(cx.listener(Self::select_first))
+ .on_action(cx.listener(Self::select_last))
+ .on_action(cx.listener(Self::confirm))
+ .on_action(cx.listener(Self::remove_selected_thread))
+ .on_action(cx.listener(|this, _: &RemoveHistory, window, cx| {
+ this.remove_history(window, cx);
+ }))
+ .child(
+ h_flex()
+ .h(Tab::container_height(cx))
+ .w_full()
+ .py_1()
+ .px_2()
+ .gap_2()
+ .justify_between()
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Icon::new(IconName::MagnifyingGlass)
+ .color(Color::Muted)
+ .size(IconSize::Small),
+ )
+ .child(self.search_editor.clone()),
+ )
+ .child({
+ let view = v_flex()
+ .id("list-container")
+ .relative()
+ .overflow_hidden()
+ .flex_grow();
+
+ if has_no_history {
+ view.justify_center().items_center().child(
+ Label::new("You don't have any past threads yet.")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ } else if self.search_produced_no_matches() {
+ view.justify_center()
+ .items_center()
+ .child(Label::new("No threads match your search.").size(LabelSize::Small))
+ } else {
+ view.child(
+ uniform_list(
+ "thread-history",
+ self.visible_items.len(),
+ cx.processor(|this, range: Range<usize>, window, cx| {
+ this.render_list_items(range, window, cx)
+ }),
+ )
+ .p_1()
+ .pr_4()
+ .track_scroll(&self.scroll_handle)
+ .flex_grow(),
+ )
+ .vertical_scrollbar_for(&self.scroll_handle, window, cx)
+ }
+ })
+ .when(!has_no_history && supports_delete, |this| {
+ this.child(
+ h_flex()
+ .p_2()
+ .border_t_1()
+ .border_color(cx.theme().colors().border_variant)
+ .when(!self.confirming_delete_history, |this| {
+ this.child(
+ Button::new("delete_history", "Delete All History")
+ .full_width()
+ .style(ButtonStyle::Outlined)
+ .label_size(LabelSize::Small)
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.prompt_delete_history(window, cx);
+ })),
+ )
+ })
+ .when(self.confirming_delete_history, |this| {
+ this.w_full()
+ .gap_2()
+ .flex_wrap()
+ .justify_between()
+ .child(
+ h_flex()
+ .flex_wrap()
+ .gap_1()
+ .child(
+ Label::new("Delete all threads?")
+ .size(LabelSize::Small),
+ )
+ .child(
+ Label::new("You won't be able to recover them later.")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
+ )
+ .child(
+ h_flex()
+ .gap_1()
+ .child(
+ Button::new("cancel_delete", "Cancel")
+ .label_size(LabelSize::Small)
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.cancel_delete_history(window, cx);
+ })),
+ )
+ .child(
+ Button::new("confirm_delete", "Delete")
+ .style(ButtonStyle::Tinted(ui::TintColor::Error))
+ .color(Color::Error)
+ .label_size(LabelSize::Small)
+ .on_click(cx.listener(|_, _, window, cx| {
+ window.dispatch_action(
+ Box::new(RemoveHistory),
+ cx,
+ );
+ })),
+ ),
+ )
+ }),
+ )
+ })
+ }
+}
+
+#[derive(IntoElement)]
+pub struct HistoryEntryElement {
+ entry: AgentSessionInfo,
+ thread_view: WeakEntity<ConnectionView>,
+ selected: bool,
+ hovered: bool,
+ supports_delete: bool,
+ on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
+}
+
+impl HistoryEntryElement {
+ pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity<ConnectionView>) -> Self {
+ Self {
+ entry,
+ thread_view,
+ selected: false,
+ hovered: false,
+ supports_delete: false,
+ on_hover: Box::new(|_, _, _| {}),
+ }
+ }
+
+ pub fn supports_delete(mut self, supports_delete: bool) -> Self {
+ self.supports_delete = supports_delete;
+ self
+ }
+
+ pub fn hovered(mut self, hovered: bool) -> Self {
+ self.hovered = hovered;
+ self
+ }
+
+ pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self {
+ self.on_hover = Box::new(on_hover);
+ self
+ }
+}
+
+impl RenderOnce for HistoryEntryElement {
+ fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ let id = ElementId::Name(self.entry.session_id.0.clone().into());
+ let title = thread_title(&self.entry).clone();
+ let formatted_time = self
+ .entry
+ .updated_at
+ .map(|timestamp| {
+ let now = chrono::Utc::now();
+ let duration = now.signed_duration_since(timestamp);
+
+ if duration.num_days() > 0 {
+ format!("{}d", duration.num_days())
+ } else if duration.num_hours() > 0 {
+ format!("{}h ago", duration.num_hours())
+ } else if duration.num_minutes() > 0 {
+ format!("{}m ago", duration.num_minutes())
+ } else {
+ "Just now".to_string()
+ }
+ })
+ .unwrap_or_else(|| "Unknown".to_string());
+
+ ListItem::new(id)
+ .rounded()
+ .toggle_state(self.selected)
+ .spacing(ListItemSpacing::Sparse)
+ .start_slot(
+ h_flex()
+ .w_full()
+ .gap_2()
+ .justify_between()
+ .child(Label::new(title).size(LabelSize::Small).truncate())
+ .child(
+ Label::new(formatted_time)
+ .color(Color::Muted)
+ .size(LabelSize::XSmall),
+ ),
+ )
+ .on_hover(self.on_hover)
+ .end_slot::<IconButton>(if (self.hovered || self.selected) && self.supports_delete {
+ Some(
+ IconButton::new("delete", IconName::Trash)
+ .shape(IconButtonShape::Square)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .tooltip(move |_window, cx| {
+ Tooltip::for_action("Delete", &RemoveSelectedThread, cx)
+ })
+ .on_click({
+ let thread_view = self.thread_view.clone();
+ let session_id = self.entry.session_id.clone();
+
+ move |_event, _window, cx| {
+ if let Some(thread_view) = thread_view.upgrade() {
+ thread_view.update(cx, |thread_view, cx| {
+ thread_view.delete_history_entry(&session_id, cx);
+ });
+ }
+ }
+ }),
+ )
+ } else {
+ None
+ })
+ .on_click({
+ let thread_view = self.thread_view.clone();
+ let entry = self.entry;
+
+ move |_event, window, cx| {
+ if let Some(workspace) = thread_view
+ .upgrade()
+ .and_then(|view| view.read(cx).workspace().upgrade())
+ {
+ if let Some(panel) = workspace.read(cx).panel::<AgentPanel>(cx) {
+ panel.update(cx, |panel, cx| {
+ panel.load_agent_thread(
+ entry.session_id.clone(),
+ entry.cwd.clone(),
+ entry.title.clone(),
+ window,
+ cx,
+ );
+ });
+ }
+ }
+ }
+ })
+ }
+}
+
+#[derive(Clone, Copy)]
+pub enum EntryTimeFormat {
+ DateAndTime,
+ TimeOnly,
+}
+
+impl EntryTimeFormat {
+ fn format_timestamp(&self, timestamp: i64, timezone: UtcOffset) -> String {
+ let timestamp = OffsetDateTime::from_unix_timestamp(timestamp).unwrap();
+
+ match self {
+ EntryTimeFormat::DateAndTime => time_format::format_localized_timestamp(
+ timestamp,
+ OffsetDateTime::now_utc(),
+ timezone,
+ time_format::TimestampFormat::EnhancedAbsolute,
+ ),
+ EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)),
+ }
+ }
+}
+
+impl From<TimeBucket> for EntryTimeFormat {
+ fn from(bucket: TimeBucket) -> Self {
+ match bucket {
+ TimeBucket::Today => EntryTimeFormat::TimeOnly,
+ TimeBucket::Yesterday => EntryTimeFormat::TimeOnly,
+ TimeBucket::ThisWeek => EntryTimeFormat::DateAndTime,
+ TimeBucket::PastWeek => EntryTimeFormat::DateAndTime,
+ TimeBucket::All => EntryTimeFormat::DateAndTime,
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Clone, Copy, Debug)]
+enum TimeBucket {
+ Today,
+ Yesterday,
+ ThisWeek,
+ PastWeek,
+ All,
+}
+
+impl TimeBucket {
+ fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self {
+ if date == reference {
+ return TimeBucket::Today;
+ }
+
+ if date == reference - TimeDelta::days(1) {
+ return TimeBucket::Yesterday;
+ }
+
+ let week = date.iso_week();
+
+ if reference.iso_week() == week {
+ return TimeBucket::ThisWeek;
+ }
+
+ let last_week = (reference - TimeDelta::days(7)).iso_week();
+
+ if week == last_week {
+ return TimeBucket::PastWeek;
+ }
+
+ TimeBucket::All
+ }
+}
+
+impl Display for TimeBucket {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ TimeBucket::Today => write!(f, "Today"),
+ TimeBucket::Yesterday => write!(f, "Yesterday"),
+ TimeBucket::ThisWeek => write!(f, "This Week"),
+ TimeBucket::PastWeek => write!(f, "Past Week"),
+ TimeBucket::All => write!(f, "All"),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use chrono::NaiveDate;
+
+ #[test]
+ fn test_time_bucket_from_dates() {
+ let today = NaiveDate::from_ymd_opt(2025, 1, 15).unwrap();
+
+ assert_eq!(TimeBucket::from_dates(today, today), TimeBucket::Today);
+
+ let yesterday = NaiveDate::from_ymd_opt(2025, 1, 14).unwrap();
+ assert_eq!(
+ TimeBucket::from_dates(today, yesterday),
+ TimeBucket::Yesterday
+ );
+
+ let this_week = NaiveDate::from_ymd_opt(2025, 1, 13).unwrap();
+ assert_eq!(
+ TimeBucket::from_dates(today, this_week),
+ TimeBucket::ThisWeek
+ );
+
+ let past_week = NaiveDate::from_ymd_opt(2025, 1, 7).unwrap();
+ assert_eq!(
+ TimeBucket::from_dates(today, past_week),
+ TimeBucket::PastWeek
+ );
+
+ let old = NaiveDate::from_ymd_opt(2024, 12, 1).unwrap();
+ assert_eq!(TimeBucket::from_dates(today, old), TimeBucket::All);
+ }
+}
@@ -145,7 +145,7 @@ impl Editor {
_: &Window,
cx: &mut Context<Self>,
) {
- if !self.mode().is_full() {
+ if !self.lsp_data_enabled() {
return;
}
let Some(project) = self.project.as_ref() else {
@@ -147,7 +147,7 @@ impl Editor {
for_buffer: Option<BufferId>,
cx: &mut Context<Self>,
) {
- if !self.mode().is_full() {
+ if !self.lsp_data_enabled() {
return;
}
let Some(project) = self.project.clone() else {
@@ -35,13 +35,13 @@ mod lsp_ext;
mod mouse_context_menu;
pub mod movement;
mod persistence;
+mod runnables;
mod rust_analyzer_ext;
pub mod scroll;
mod selections_collection;
pub mod semantic_tokens;
mod split;
pub mod split_editor_view;
-pub mod tasks;
#[cfg(test)]
mod code_completion_tests;
@@ -133,8 +133,8 @@ use language::{
BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape,
DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind,
IndentSize, Language, LanguageName, LanguageRegistry, LanguageScope, LocalFile, OffsetRangeExt,
- OutlineItem, Point, Runnable, Selection, SelectionGoal, TextObject, TransactionId,
- TreeSitterOptions, WordsQuery,
+ OutlineItem, Point, Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions,
+ WordsQuery,
language_settings::{
self, LanguageSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode,
all_language_settings, language_settings,
@@ -158,7 +158,7 @@ use project::{
BreakpointWithPosition, CodeAction, Completion, CompletionDisplayOptions, CompletionIntent,
CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, InlayId,
InvalidationStrategy, Location, LocationLink, LspAction, PrepareRenameResponse, Project,
- ProjectItem, ProjectPath, ProjectTransaction, TaskSourceKind,
+ ProjectItem, ProjectPath, ProjectTransaction,
debugger::{
breakpoint_store::{
Breakpoint, BreakpointEditAction, BreakpointSessionState, BreakpointState,
@@ -200,7 +200,7 @@ use std::{
sync::Arc,
time::{Duration, Instant},
};
-use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables};
+use task::TaskVariables;
use text::{BufferId, FromAnchor, OffsetUtf16, Rope, ToOffset as _, ToPoint as _};
use theme::{
AccentColors, ActiveTheme, GlobalTheme, PlayerColor, StatusColors, SyntaxTheme, Theme,
@@ -231,6 +231,7 @@ use crate::{
InlineValueCache,
inlay_hints::{LspInlayHintData, inlay_hint_settings},
},
+ runnables::{ResolvedTasks, RunnableData, RunnableTasks},
scroll::{ScrollOffset, ScrollPixelOffset},
selections_collection::resolve_selections_wrapping_blocks,
semantic_tokens::SemanticTokenState,
@@ -857,37 +858,6 @@ impl BufferSerialization {
}
}
-#[derive(Clone, Debug)]
-struct RunnableTasks {
- templates: Vec<(TaskSourceKind, TaskTemplate)>,
- offset: multi_buffer::Anchor,
- // We need the column at which the task context evaluation should take place (when we're spawning it via gutter).
- column: u32,
- // Values of all named captures, including those starting with '_'
- extra_variables: HashMap<String, String>,
- // Full range of the tagged region. We use it to determine which `extra_variables` to grab for context resolution in e.g. a modal.
- context_range: Range<BufferOffset>,
-}
-
-impl RunnableTasks {
- fn resolve<'a>(
- &'a self,
- cx: &'a task::TaskContext,
- ) -> impl Iterator<Item = (TaskSourceKind, ResolvedTask)> + 'a {
- self.templates.iter().filter_map(|(kind, template)| {
- template
- .resolve_task(&kind.to_id_base(), cx)
- .map(|task| (kind.clone(), task))
- })
- }
-}
-
-#[derive(Clone)]
-pub struct ResolvedTasks {
- templates: SmallVec<[(TaskSourceKind, ResolvedTask); 1]>,
- position: Anchor,
-}
-
/// Addons allow storing per-editor state in other crates (e.g. Vim)
pub trait Addon: 'static {
fn extend_key_context(&self, _: &mut KeyContext, _: &App) {}
@@ -1295,8 +1265,7 @@ pub struct Editor {
last_bounds: Option<Bounds<Pixels>>,
last_position_map: Option<Rc<PositionMap>>,
expect_bounds_change: Option<Bounds<Pixels>>,
- tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>,
- tasks_update_task: Option<Task<()>>,
+ runnables: RunnableData,
breakpoint_store: Option<Entity<BreakpointStore>>,
gutter_breakpoint_indicator: (Option<PhantomBreakpointIndicator>, Option<Task<()>>),
pub(crate) gutter_diff_review_indicator: (Option<PhantomDiffReviewIndicator>, Option<Task<()>>),
@@ -2173,16 +2142,9 @@ impl Editor {
editor.registered_buffers.clear();
editor.register_visible_buffers(cx);
editor.invalidate_semantic_tokens(None);
+ editor.refresh_runnables(window, cx);
editor.update_lsp_data(None, window, cx);
editor.refresh_inlay_hints(InlayHintRefreshReason::ServerRemoved, cx);
- if editor.tasks_update_task.is_none() {
- editor.tasks_update_task = Some(editor.refresh_runnables(window, cx));
- }
- }
- project::Event::LanguageServerAdded(..) => {
- if editor.tasks_update_task.is_none() {
- editor.tasks_update_task = Some(editor.refresh_runnables(window, cx));
- }
}
project::Event::SnippetEdit(id, snippet_edits) => {
// todo(lw): Non singletons
@@ -2210,6 +2172,7 @@ impl Editor {
let buffer_id = *buffer_id;
if editor.buffer().read(cx).buffer(buffer_id).is_some() {
editor.register_buffer(buffer_id, cx);
+ editor.refresh_runnables(window, cx);
editor.update_lsp_data(Some(buffer_id), window, cx);
editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
refresh_linked_ranges(editor, window, cx);
@@ -2288,7 +2251,7 @@ impl Editor {
&task_inventory,
window,
|editor, _, window, cx| {
- editor.tasks_update_task = Some(editor.refresh_runnables(window, cx));
+ editor.refresh_runnables(window, cx);
},
));
};
@@ -2529,7 +2492,6 @@ impl Editor {
}),
blame: None,
blame_subscription: None,
- tasks: BTreeMap::default(),
breakpoint_store,
gutter_breakpoint_indicator: (None, None),
@@ -2565,7 +2527,7 @@ impl Editor {
]
})
.unwrap_or_default(),
- tasks_update_task: None,
+ runnables: RunnableData::new(),
pull_diagnostics_task: Task::ready(()),
colors: None,
refresh_colors_task: Task::ready(()),
@@ -2632,7 +2594,6 @@ impl Editor {
cx.notify();
}));
}
- editor.tasks_update_task = Some(editor.refresh_runnables(window, cx));
editor._subscriptions.extend(project_subscriptions);
editor._subscriptions.push(cx.subscribe_in(
@@ -2668,6 +2629,7 @@ impl Editor {
);
if !editor.buffer().read(cx).is_singleton() {
editor.update_lsp_data(None, window, cx);
+ editor.refresh_runnables(window, cx);
}
})
.ok();
@@ -5791,18 +5753,11 @@ impl Editor {
let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let multi_buffer = self.buffer().read(cx);
let multi_buffer_snapshot = multi_buffer.snapshot(cx);
- let multi_buffer_visible_start = self
- .scroll_manager
- .native_anchor(&display_snapshot, cx)
- .anchor
- .to_point(&multi_buffer_snapshot);
- let multi_buffer_visible_end = multi_buffer_snapshot.clip_point(
- multi_buffer_visible_start
- + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0),
- Bias::Left,
- );
multi_buffer_snapshot
- .range_to_buffer_ranges(multi_buffer_visible_start..=multi_buffer_visible_end)
+ .range_to_buffer_ranges(
+ self.multi_buffer_visible_range(&display_snapshot, cx)
+ .to_inclusive(),
+ )
.into_iter()
.filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty())
.filter_map(|(buffer, excerpt_visible_range, excerpt_id)| {
@@ -6737,8 +6692,8 @@ impl Editor {
};
let buffer_id = buffer.read(cx).remote_id();
let tasks = self
- .tasks
- .get(&(buffer_id, buffer_row))
+ .runnables
+ .runnables((buffer_id, buffer_row))
.map(|t| Arc::new(t.to_owned()));
if !self.focus_handle.is_focused(window) {
@@ -7733,7 +7688,7 @@ impl Editor {
#[ztracing::instrument(skip_all)]
fn refresh_outline_symbols_at_cursor(&mut self, cx: &mut Context<Editor>) {
- if !self.mode.is_full() {
+ if !self.lsp_data_enabled() {
return;
}
let cursor = self.selections.newest_anchor().head();
@@ -7789,24 +7744,13 @@ impl Editor {
self.debounced_selection_highlight_complete = false;
}
if on_buffer_edit || query_changed {
- let multi_buffer_visible_start = self
- .scroll_manager
- .native_anchor(&display_snapshot, cx)
- .anchor
- .to_point(&multi_buffer_snapshot);
- let multi_buffer_visible_end = multi_buffer_snapshot.clip_point(
- multi_buffer_visible_start
- + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0),
- Bias::Left,
- );
- let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end;
self.quick_selection_highlight_task = Some((
query_range.clone(),
self.update_selection_occurrence_highlights(
snapshot.buffer.clone(),
query_text.clone(),
query_range.clone(),
- multi_buffer_visible_range,
+ self.multi_buffer_visible_range(&display_snapshot, cx),
false,
window,
cx,
@@ -7841,6 +7785,27 @@ impl Editor {
}
}
+ pub fn multi_buffer_visible_range(
+ &self,
+ display_snapshot: &DisplaySnapshot,
+ cx: &App,
+ ) -> Range<Point> {
+ let visible_start = self
+ .scroll_manager
+ .native_anchor(display_snapshot, cx)
+ .anchor
+ .to_point(display_snapshot.buffer_snapshot())
+ .to_display_point(display_snapshot);
+
+ let mut target_end = visible_start;
+ *target_end.row_mut() += self.visible_line_count().unwrap_or(0.).ceil() as u32;
+
+ visible_start.to_point(display_snapshot)
+ ..display_snapshot
+ .clip_point(target_end, Bias::Right)
+ .to_point(display_snapshot)
+ }
+
pub fn refresh_edit_prediction(
&mut self,
debounce: bool,
@@ -8809,19 +8774,6 @@ impl Editor {
Some(self.edit_prediction_provider.as_ref()?.provider.clone())
}
- fn clear_tasks(&mut self) {
- self.tasks.clear()
- }
-
- fn insert_tasks(&mut self, key: (BufferId, BufferRow), value: RunnableTasks) {
- if self.tasks.insert(key, value).is_some() {
- // This case should hopefully be rare, but just in case...
- log::error!(
- "multiple different run targets found on a single line, only the last target will be rendered"
- )
- }
- }
-
/// Get all display points of breakpoints that will be rendered within editor
///
/// This function is used to handle overlaps between breakpoints and Code action/runner symbol.
@@ -9199,156 +9151,6 @@ impl Editor {
})
}
- pub fn spawn_nearest_task(
- &mut self,
- action: &SpawnNearestTask,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let Some((workspace, _)) = self.workspace.clone() else {
- return;
- };
- let Some(project) = self.project.clone() else {
- return;
- };
-
- // Try to find a closest, enclosing node using tree-sitter that has a task
- let Some((buffer, buffer_row, tasks)) = self
- .find_enclosing_node_task(cx)
- // Or find the task that's closest in row-distance.
- .or_else(|| self.find_closest_task(cx))
- else {
- return;
- };
-
- let reveal_strategy = action.reveal;
- let task_context = Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx);
- cx.spawn_in(window, async move |_, cx| {
- let context = task_context.await?;
- let (task_source_kind, mut resolved_task) = tasks.resolve(&context).next()?;
-
- let resolved = &mut resolved_task.resolved;
- resolved.reveal = reveal_strategy;
-
- workspace
- .update_in(cx, |workspace, window, cx| {
- workspace.schedule_resolved_task(
- task_source_kind,
- resolved_task,
- false,
- window,
- cx,
- );
- })
- .ok()
- })
- .detach();
- }
-
- fn find_closest_task(
- &mut self,
- cx: &mut Context<Self>,
- ) -> Option<(Entity<Buffer>, u32, Arc<RunnableTasks>)> {
- let cursor_row = self
- .selections
- .newest_adjusted(&self.display_snapshot(cx))
- .head()
- .row;
-
- let ((buffer_id, row), tasks) = self
- .tasks
- .iter()
- .min_by_key(|((_, row), _)| cursor_row.abs_diff(*row))?;
-
- let buffer = self.buffer.read(cx).buffer(*buffer_id)?;
- let tasks = Arc::new(tasks.to_owned());
- Some((buffer, *row, tasks))
- }
-
- fn find_enclosing_node_task(
- &mut self,
- cx: &mut Context<Self>,
- ) -> Option<(Entity<Buffer>, u32, Arc<RunnableTasks>)> {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let offset = self
- .selections
- .newest::<MultiBufferOffset>(&self.display_snapshot(cx))
- .head();
- let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
- let offset = excerpt.map_offset_to_buffer(offset);
- let buffer_id = excerpt.buffer().remote_id();
-
- let layer = excerpt.buffer().syntax_layer_at(offset)?;
- let mut cursor = layer.node().walk();
-
- while cursor.goto_first_child_for_byte(offset.0).is_some() {
- if cursor.node().end_byte() == offset.0 {
- cursor.goto_next_sibling();
- }
- }
-
- // Ascend to the smallest ancestor that contains the range and has a task.
- loop {
- let node = cursor.node();
- let node_range = node.byte_range();
- let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row;
-
- // Check if this node contains our offset
- if node_range.start <= offset.0 && node_range.end >= offset.0 {
- // If it contains offset, check for task
- if let Some(tasks) = self.tasks.get(&(buffer_id, symbol_start_row)) {
- let buffer = self.buffer.read(cx).buffer(buffer_id)?;
- return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned())));
- }
- }
-
- if !cursor.goto_parent() {
- break;
- }
- }
- None
- }
-
- fn render_run_indicator(
- &self,
- _style: &EditorStyle,
- is_active: bool,
- row: DisplayRow,
- breakpoint: Option<(Anchor, Breakpoint, Option<BreakpointSessionState>)>,
- cx: &mut Context<Self>,
- ) -> IconButton {
- let color = Color::Muted;
- let position = breakpoint.as_ref().map(|(anchor, _, _)| *anchor);
-
- IconButton::new(
- ("run_indicator", row.0 as usize),
- ui::IconName::PlayOutlined,
- )
- .shape(ui::IconButtonShape::Square)
- .icon_size(IconSize::XSmall)
- .icon_color(color)
- .toggle_state(is_active)
- .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| {
- let quick_launch = match e {
- ClickEvent::Keyboard(_) => true,
- ClickEvent::Mouse(e) => e.down.button == MouseButton::Left,
- };
-
- window.focus(&editor.focus_handle(cx), cx);
- editor.toggle_code_actions(
- &ToggleCodeActions {
- deployed_from: Some(CodeActionSource::RunMenu(row)),
- quick_launch,
- },
- window,
- cx,
- );
- }))
- .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| {
- editor.set_breakpoint_context_menu(row, position, event.position(), window, cx);
- }))
- }
-
pub fn context_menu_visible(&self) -> bool {
!self.edit_prediction_preview_is_active()
&& self
@@ -17153,236 +16955,6 @@ impl Editor {
});
}
- fn refresh_runnables(&mut self, window: &mut Window, cx: &mut Context<Self>) -> Task<()> {
- if !EditorSettings::get_global(cx).gutter.runnables || !self.enable_runnables {
- self.clear_tasks();
- return Task::ready(());
- }
- let project = self.project().map(Entity::downgrade);
- let task_sources = self.lsp_task_sources(cx);
- let multi_buffer = self.buffer.downgrade();
- cx.spawn_in(window, async move |editor, cx| {
- cx.background_executor().timer(UPDATE_DEBOUNCE).await;
- let Some(project) = project.and_then(|p| p.upgrade()) else {
- return;
- };
- let Ok(display_snapshot) = editor.update(cx, |this, cx| {
- this.display_map.update(cx, |map, cx| map.snapshot(cx))
- }) else {
- return;
- };
-
- let hide_runnables = project.update(cx, |project, _| project.is_via_collab());
- if hide_runnables {
- return;
- }
- let new_rows =
- cx.background_spawn({
- let snapshot = display_snapshot.clone();
- async move {
- Self::fetch_runnable_ranges(&snapshot, Anchor::min()..Anchor::max())
- }
- })
- .await;
- let Ok(lsp_tasks) =
- cx.update(|_, cx| crate::lsp_tasks(project.clone(), &task_sources, None, cx))
- else {
- return;
- };
- let lsp_tasks = lsp_tasks.await;
-
- let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| {
- lsp_tasks
- .into_iter()
- .flat_map(|(kind, tasks)| {
- tasks.into_iter().filter_map(move |(location, task)| {
- Some((kind.clone(), location?, task))
- })
- })
- .fold(HashMap::default(), |mut acc, (kind, location, task)| {
- let buffer = location.target.buffer;
- let buffer_snapshot = buffer.read(cx).snapshot();
- let offset = display_snapshot.buffer_snapshot().excerpts().find_map(
- |(excerpt_id, snapshot, _)| {
- if snapshot.remote_id() == buffer_snapshot.remote_id() {
- display_snapshot
- .buffer_snapshot()
- .anchor_in_excerpt(excerpt_id, location.target.range.start)
- } else {
- None
- }
- },
- );
- if let Some(offset) = offset {
- let task_buffer_range =
- location.target.range.to_point(&buffer_snapshot);
- let context_buffer_range =
- task_buffer_range.to_offset(&buffer_snapshot);
- let context_range = BufferOffset(context_buffer_range.start)
- ..BufferOffset(context_buffer_range.end);
-
- acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row))
- .or_insert_with(|| RunnableTasks {
- templates: Vec::new(),
- offset,
- column: task_buffer_range.start.column,
- extra_variables: HashMap::default(),
- context_range,
- })
- .templates
- .push((kind, task.original_task().clone()));
- }
-
- acc
- })
- }) else {
- return;
- };
-
- let Ok(prefer_lsp) = multi_buffer.update(cx, |buffer, cx| {
- buffer.language_settings(cx).tasks.prefer_lsp
- }) else {
- return;
- };
-
- let rows = Self::runnable_rows(
- project,
- display_snapshot,
- prefer_lsp && !lsp_tasks_by_rows.is_empty(),
- new_rows,
- cx.clone(),
- )
- .await;
- editor
- .update(cx, |editor, _| {
- editor.clear_tasks();
- for (key, mut value) in rows {
- if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&key) {
- value.templates.extend(lsp_tasks.templates);
- }
-
- editor.insert_tasks(key, value);
- }
- for (key, value) in lsp_tasks_by_rows {
- editor.insert_tasks(key, value);
- }
- })
- .ok();
- })
- }
- fn fetch_runnable_ranges(
- snapshot: &DisplaySnapshot,
- range: Range<Anchor>,
- ) -> Vec<(Range<MultiBufferOffset>, language::RunnableRange)> {
- snapshot.buffer_snapshot().runnable_ranges(range).collect()
- }
-
- fn runnable_rows(
- project: Entity<Project>,
- snapshot: DisplaySnapshot,
- prefer_lsp: bool,
- runnable_ranges: Vec<(Range<MultiBufferOffset>, language::RunnableRange)>,
- cx: AsyncWindowContext,
- ) -> Task<Vec<((BufferId, BufferRow), RunnableTasks)>> {
- cx.spawn(async move |cx| {
- let mut runnable_rows = Vec::with_capacity(runnable_ranges.len());
- for (run_range, mut runnable) in runnable_ranges {
- let Some(tasks) = cx
- .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx))
- .ok()
- else {
- continue;
- };
- let mut tasks = tasks.await;
-
- if prefer_lsp {
- tasks.retain(|(task_kind, _)| {
- !matches!(task_kind, TaskSourceKind::Language { .. })
- });
- }
- if tasks.is_empty() {
- continue;
- }
-
- let point = run_range.start.to_point(&snapshot.buffer_snapshot());
- let Some(row) = snapshot
- .buffer_snapshot()
- .buffer_line_for_row(MultiBufferRow(point.row))
- .map(|(_, range)| range.start.row)
- else {
- continue;
- };
-
- let context_range =
- BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end);
- runnable_rows.push((
- (runnable.buffer_id, row),
- RunnableTasks {
- templates: tasks,
- offset: snapshot.buffer_snapshot().anchor_before(run_range.start),
- context_range,
- column: point.column,
- extra_variables: runnable.extra_captures,
- },
- ));
- }
- runnable_rows
- })
- }
-
- fn templates_with_tags(
- project: &Entity<Project>,
- runnable: &mut Runnable,
- cx: &mut App,
- ) -> Task<Vec<(TaskSourceKind, TaskTemplate)>> {
- let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| {
- let (worktree_id, file) = project
- .buffer_for_id(runnable.buffer, cx)
- .and_then(|buffer| buffer.read(cx).file())
- .map(|file| (file.worktree_id(cx), file.clone()))
- .unzip();
-
- (
- project.task_store().read(cx).task_inventory().cloned(),
- worktree_id,
- file,
- )
- });
-
- let tags = mem::take(&mut runnable.tags);
- let language = runnable.language.clone();
- cx.spawn(async move |cx| {
- let mut templates_with_tags = Vec::new();
- if let Some(inventory) = inventory {
- for RunnableTag(tag) in tags {
- let new_tasks = inventory.update(cx, |inventory, cx| {
- inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx)
- });
- templates_with_tags.extend(new_tasks.await.into_iter().filter(
- move |(_, template)| {
- template.tags.iter().any(|source_tag| source_tag == &tag)
- },
- ));
- }
- }
- templates_with_tags.sort_by_key(|(kind, _)| kind.to_owned());
-
- if let Some((leading_tag_source, _)) = templates_with_tags.first() {
- // Strongest source wins; if we have worktree tag binding, prefer that to
- // global and language bindings;
- // if we have a global binding, prefer that to language binding.
- let first_mismatch = templates_with_tags
- .iter()
- .position(|(tag_source, _)| tag_source != leading_tag_source);
- if let Some(index) = first_mismatch {
- templates_with_tags.truncate(index);
- }
- }
-
- templates_with_tags
- })
- }
-
pub fn move_to_enclosing_bracket(
&mut self,
_: &MoveToEnclosingBracket,
@@ -19607,7 +19179,7 @@ impl Editor {
}
pub fn diagnostics_enabled(&self) -> bool {
- self.diagnostics_enabled && self.mode.is_full()
+ self.diagnostics_enabled && self.lsp_data_enabled()
}
pub fn inline_diagnostics_enabled(&self) -> bool {
@@ -19771,10 +19343,7 @@ impl Editor {
// `ActiveDiagnostic::All` is a special mode where editor's diagnostics are managed by the external view,
// skip any LSP updates for it.
- if self.active_diagnostics == ActiveDiagnostic::All
- || !self.mode().is_full()
- || !self.diagnostics_enabled()
- {
+ if self.active_diagnostics == ActiveDiagnostic::All || !self.diagnostics_enabled() {
return None;
}
let pull_diagnostics_settings = ProjectSettings::get_global(cx)
@@ -24182,7 +23751,6 @@ impl Editor {
predecessor,
excerpts,
} => {
- self.tasks_update_task = Some(self.refresh_runnables(window, cx));
let buffer_id = buffer.read(cx).remote_id();
if self.buffer.read(cx).diff_for(buffer_id).is_none()
&& let Some(project) = &self.project
@@ -24200,6 +23768,7 @@ impl Editor {
.invalidate_buffer(&buffer.read(cx).remote_id());
self.update_lsp_data(Some(buffer_id), window, cx);
self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+ self.refresh_runnables(window, cx);
self.colorize_brackets(false, cx);
self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx);
cx.emit(EditorEvent::ExcerptsAdded {
@@ -24218,8 +23787,7 @@ impl Editor {
self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx);
for buffer_id in removed_buffer_ids {
self.registered_buffers.remove(buffer_id);
- self.tasks
- .retain(|(task_buffer_id, _), _| task_buffer_id != buffer_id);
+ self.clear_runnables(Some(*buffer_id));
self.semantic_token_state.invalidate_buffer(buffer_id);
self.display_map.update(cx, |display_map, cx| {
display_map.invalidate_semantic_highlights(*buffer_id);
@@ -24261,10 +23829,12 @@ impl Editor {
}
self.colorize_brackets(false, cx);
self.update_lsp_data(None, window, cx);
+ self.refresh_runnables(window, cx);
cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() })
}
multi_buffer::Event::Reparsed(buffer_id) => {
- self.tasks_update_task = Some(self.refresh_runnables(window, cx));
+ self.clear_runnables(Some(*buffer_id));
+ self.refresh_runnables(window, cx);
self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx);
self.colorize_brackets(true, cx);
jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx);
@@ -24272,7 +23842,7 @@ impl Editor {
cx.emit(EditorEvent::Reparsed(*buffer_id));
}
multi_buffer::Event::DiffHunksToggled => {
- self.tasks_update_task = Some(self.refresh_runnables(window, cx));
+ self.refresh_runnables(window, cx);
}
multi_buffer::Event::LanguageChanged(buffer_id, is_fresh_language) => {
if !is_fresh_language {
@@ -24408,7 +23978,7 @@ impl Editor {
.unwrap_or(DiagnosticSeverity::Hint);
self.set_max_diagnostics_severity(new_severity, cx);
}
- self.tasks_update_task = Some(self.refresh_runnables(window, cx));
+ self.refresh_runnables(window, cx);
self.update_edit_prediction_settings(cx);
self.refresh_edit_prediction(true, false, window, cx);
self.refresh_inline_values(cx);
@@ -25628,13 +25198,17 @@ impl Editor {
}
}
+ fn lsp_data_enabled(&self) -> bool {
+ self.enable_lsp_data && self.mode().is_full()
+ }
+
fn update_lsp_data(
&mut self,
for_buffer: Option<BufferId>,
window: &mut Window,
cx: &mut Context<'_, Self>,
) {
- if !self.enable_lsp_data {
+ if !self.lsp_data_enabled() {
return;
}
@@ -25648,7 +25222,7 @@ impl Editor {
}
fn register_visible_buffers(&mut self, cx: &mut Context<Self>) {
- if !self.mode().is_full() {
+ if !self.lsp_data_enabled() {
return;
}
for (_, (visible_buffer, _, _)) in self.visible_excerpts(true, cx) {
@@ -25657,7 +25231,7 @@ impl Editor {
}
fn register_buffer(&mut self, buffer_id: BufferId, cx: &mut Context<Self>) {
- if !self.mode().is_full() {
+ if !self.lsp_data_enabled() {
return;
}
@@ -5,6 +5,7 @@ use crate::{
edit_prediction_tests::FakeEditPredictionDelegate,
element::StickyHeader,
linked_editing_ranges::LinkedEditingRanges,
+ runnables::RunnableTasks,
scroll::scroll_amount::ScrollAmount,
test::{
assert_text_with_selections, build_editor, editor_content_with_blocks,
@@ -24403,20 +24404,24 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) {
editor.update_in(cx, |editor, window, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
- editor.tasks.insert(
- (buffer.read(cx).remote_id(), 3),
+ editor.runnables.insert(
+ buffer.read(cx).remote_id(),
+ 3,
+ buffer.read(cx).version(),
RunnableTasks {
- templates: vec![],
+ templates: Vec::new(),
offset: snapshot.anchor_before(MultiBufferOffset(43)),
column: 0,
extra_variables: HashMap::default(),
context_range: BufferOffset(43)..BufferOffset(85),
},
);
- editor.tasks.insert(
- (buffer.read(cx).remote_id(), 8),
+ editor.runnables.insert(
+ buffer.read(cx).remote_id(),
+ 8,
+ buffer.read(cx).version(),
RunnableTasks {
- templates: vec![],
+ templates: Vec::new(),
offset: snapshot.anchor_before(MultiBufferOffset(86)),
column: 0,
extra_variables: HashMap::default(),
@@ -3275,9 +3275,9 @@ impl EditorElement {
snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right);
editor
- .tasks
- .iter()
- .filter_map(|(_, tasks)| {
+ .runnables
+ .all_runnables()
+ .filter_map(|tasks| {
let multibuffer_point = tasks.offset.to_point(&snapshot.buffer_snapshot());
if multibuffer_point < offset_range_start
|| multibuffer_point > offset_range_end
@@ -13,7 +13,7 @@ impl Editor {
_window: &Window,
cx: &mut Context<Self>,
) {
- if !self.mode().is_full() || !self.use_document_folding_ranges {
+ if !self.lsp_data_enabled() || !self.use_document_folding_ranges {
return;
}
let Some(project) = self.project.clone() else {
@@ -292,7 +292,7 @@ impl Editor {
reason: InlayHintRefreshReason,
cx: &mut Context<Self>,
) {
- if !self.mode().is_full() || self.inlay_hints.is_none() {
+ if !self.lsp_data_enabled() || self.inlay_hints.is_none() {
return;
}
let Some(semantics_provider) = self.semantics_provider() else {
@@ -50,7 +50,7 @@ pub(super) fn refresh_linked_ranges(
window: &mut Window,
cx: &mut Context<Editor>,
) -> Option<()> {
- if !editor.mode().is_full() || editor.pending_rename.is_some() {
+ if !editor.lsp_data_enabled() || editor.pending_rename.is_some() {
return None;
}
let project = editor.project()?.downgrade();
@@ -0,0 +1,915 @@
+use std::{collections::BTreeMap, mem, ops::Range, sync::Arc};
+
+use clock::Global;
+use collections::HashMap;
+use gpui::{
+ App, AppContext as _, AsyncWindowContext, ClickEvent, Context, Entity, Focusable as _,
+ MouseButton, Task, Window,
+};
+use language::{Buffer, BufferRow, Runnable};
+use lsp::LanguageServerName;
+use multi_buffer::{
+ Anchor, BufferOffset, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _,
+};
+use project::{
+ Location, Project, TaskSourceKind,
+ debugger::breakpoint_store::{Breakpoint, BreakpointSessionState},
+ project_settings::ProjectSettings,
+};
+use settings::Settings as _;
+use smallvec::SmallVec;
+use task::{ResolvedTask, RunnableTag, TaskContext, TaskTemplate, TaskVariables, VariableName};
+use text::{BufferId, OffsetRangeExt as _, ToOffset as _, ToPoint as _};
+use ui::{Clickable as _, Color, IconButton, IconSize, Toggleable as _};
+
+use crate::{
+ CodeActionSource, Editor, EditorSettings, EditorStyle, RangeToAnchorExt, SpawnNearestTask,
+ ToggleCodeActions, UPDATE_DEBOUNCE, display_map::DisplayRow,
+};
+
+#[derive(Debug)]
+pub(super) struct RunnableData {
+ runnables: HashMap<BufferId, (Global, BTreeMap<BufferRow, RunnableTasks>)>,
+ runnables_update_task: Task<()>,
+}
+
+impl RunnableData {
+ pub fn new() -> Self {
+ Self {
+ runnables: HashMap::default(),
+ runnables_update_task: Task::ready(()),
+ }
+ }
+
+ pub fn runnables(
+ &self,
+ (buffer_id, buffer_row): (BufferId, BufferRow),
+ ) -> Option<&RunnableTasks> {
+ self.runnables.get(&buffer_id)?.1.get(&buffer_row)
+ }
+
+ pub fn all_runnables(&self) -> impl Iterator<Item = &RunnableTasks> {
+ self.runnables
+ .values()
+ .flat_map(|(_, tasks)| tasks.values())
+ }
+
+ pub fn has_cached(&self, buffer_id: BufferId, version: &Global) -> bool {
+ self.runnables
+ .get(&buffer_id)
+ .is_some_and(|(cached_version, _)| !version.changed_since(cached_version))
+ }
+
+ #[cfg(test)]
+ pub fn insert(
+ &mut self,
+ buffer_id: BufferId,
+ buffer_row: BufferRow,
+ version: Global,
+ tasks: RunnableTasks,
+ ) {
+ self.runnables
+ .entry(buffer_id)
+ .or_insert_with(|| (version, BTreeMap::default()))
+ .1
+ .insert(buffer_row, tasks);
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct RunnableTasks {
+ pub templates: Vec<(TaskSourceKind, TaskTemplate)>,
+ pub offset: multi_buffer::Anchor,
+ // We need the column at which the task context evaluation should take place (when we're spawning it via gutter).
+ pub column: u32,
+ // Values of all named captures, including those starting with '_'
+ pub extra_variables: HashMap<String, String>,
+ // Full range of the tagged region. We use it to determine which `extra_variables` to grab for context resolution in e.g. a modal.
+ pub context_range: Range<BufferOffset>,
+}
+
+impl RunnableTasks {
+ pub fn resolve<'a>(
+ &'a self,
+ cx: &'a task::TaskContext,
+ ) -> impl Iterator<Item = (TaskSourceKind, ResolvedTask)> + 'a {
+ self.templates.iter().filter_map(|(kind, template)| {
+ template
+ .resolve_task(&kind.to_id_base(), cx)
+ .map(|task| (kind.clone(), task))
+ })
+ }
+}
+
+#[derive(Clone)]
+pub struct ResolvedTasks {
+ pub templates: SmallVec<[(TaskSourceKind, ResolvedTask); 1]>,
+ pub position: Anchor,
+}
+
+impl Editor {
+ pub fn refresh_runnables(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ if !self.mode().is_full()
+ || !EditorSettings::get_global(cx).gutter.runnables
+ || !self.enable_runnables
+ {
+ self.clear_runnables(None);
+ return;
+ }
+ if let Some(buffer) = self.buffer().read(cx).as_singleton() {
+ if self
+ .runnables
+ .has_cached(buffer.read(cx).remote_id(), &buffer.read(cx).version())
+ {
+ return;
+ }
+ }
+
+ let project = self.project().map(Entity::downgrade);
+ let lsp_task_sources = self.lsp_task_sources(true, true, cx);
+ let multi_buffer = self.buffer.downgrade();
+ self.runnables.runnables_update_task = cx.spawn_in(window, async move |editor, cx| {
+ cx.background_executor().timer(UPDATE_DEBOUNCE).await;
+ let Some(project) = project.and_then(|p| p.upgrade()) else {
+ return;
+ };
+
+ let hide_runnables = project.update(cx, |project, _| project.is_via_collab());
+ if hide_runnables {
+ return;
+ }
+ let lsp_tasks = if lsp_task_sources.is_empty() {
+ Vec::new()
+ } else {
+ let Ok(lsp_tasks) = cx
+ .update(|_, cx| crate::lsp_tasks(project.clone(), &lsp_task_sources, None, cx))
+ else {
+ return;
+ };
+ lsp_tasks.await
+ };
+ let new_rows = {
+ let Some((multi_buffer_snapshot, multi_buffer_query_range)) = editor
+ .update(cx, |editor, cx| {
+ let multi_buffer = editor.buffer().read(cx);
+ if multi_buffer.is_singleton() {
+ Some((multi_buffer.snapshot(cx), Anchor::min()..Anchor::max()))
+ } else {
+ let display_snapshot =
+ editor.display_map.update(cx, |map, cx| map.snapshot(cx));
+ let multi_buffer_query_range =
+ editor.multi_buffer_visible_range(&display_snapshot, cx);
+ let multi_buffer_snapshot = display_snapshot.buffer();
+ Some((
+ multi_buffer_snapshot.clone(),
+ multi_buffer_query_range.to_anchors(&multi_buffer_snapshot),
+ ))
+ }
+ })
+ .ok()
+ .flatten()
+ else {
+ return;
+ };
+ cx.background_spawn({
+ async move {
+ multi_buffer_snapshot
+ .runnable_ranges(multi_buffer_query_range)
+ .collect()
+ }
+ })
+ .await
+ };
+
+ let Ok(multi_buffer_snapshot) =
+ editor.update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx))
+ else {
+ return;
+ };
+ let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| {
+ lsp_tasks
+ .into_iter()
+ .flat_map(|(kind, tasks)| {
+ tasks.into_iter().filter_map(move |(location, task)| {
+ Some((kind.clone(), location?, task))
+ })
+ })
+ .fold(HashMap::default(), |mut acc, (kind, location, task)| {
+ let buffer = location.target.buffer;
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let offset = multi_buffer_snapshot.excerpts().find_map(
+ |(excerpt_id, snapshot, _)| {
+ if snapshot.remote_id() == buffer_snapshot.remote_id() {
+ multi_buffer_snapshot
+ .anchor_in_excerpt(excerpt_id, location.target.range.start)
+ } else {
+ None
+ }
+ },
+ );
+ if let Some(offset) = offset {
+ let task_buffer_range =
+ location.target.range.to_point(&buffer_snapshot);
+ let context_buffer_range =
+ task_buffer_range.to_offset(&buffer_snapshot);
+ let context_range = BufferOffset(context_buffer_range.start)
+ ..BufferOffset(context_buffer_range.end);
+
+ acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row))
+ .or_insert_with(|| RunnableTasks {
+ templates: Vec::new(),
+ offset,
+ column: task_buffer_range.start.column,
+ extra_variables: HashMap::default(),
+ context_range,
+ })
+ .templates
+ .push((kind, task.original_task().clone()));
+ }
+
+ acc
+ })
+ }) else {
+ return;
+ };
+
+ let Ok(prefer_lsp) = multi_buffer.update(cx, |buffer, cx| {
+ buffer.language_settings(cx).tasks.prefer_lsp
+ }) else {
+ return;
+ };
+
+ let rows = Self::runnable_rows(
+ project,
+ multi_buffer_snapshot,
+ prefer_lsp && !lsp_tasks_by_rows.is_empty(),
+ new_rows,
+ cx.clone(),
+ )
+ .await;
+ editor
+ .update(cx, |editor, cx| {
+ for ((buffer_id, row), mut new_tasks) in rows {
+ let Some(buffer) = editor.buffer().read(cx).buffer(buffer_id) else {
+ continue;
+ };
+
+ if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&(buffer_id, row)) {
+ new_tasks.templates.extend(lsp_tasks.templates);
+ }
+ editor.insert_runnables(
+ buffer_id,
+ buffer.read(cx).version(),
+ row,
+ new_tasks,
+ );
+ }
+ for ((buffer_id, row), new_tasks) in lsp_tasks_by_rows {
+ let Some(buffer) = editor.buffer().read(cx).buffer(buffer_id) else {
+ continue;
+ };
+ editor.insert_runnables(
+ buffer_id,
+ buffer.read(cx).version(),
+ row,
+ new_tasks,
+ );
+ }
+ })
+ .ok();
+ });
+ }
+
+ pub fn spawn_nearest_task(
+ &mut self,
+ action: &SpawnNearestTask,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some((workspace, _)) = self.workspace.clone() else {
+ return;
+ };
+ let Some(project) = self.project.clone() else {
+ return;
+ };
+
+ // Try to find a closest, enclosing node using tree-sitter that has a task
+ let Some((buffer, buffer_row, tasks)) = self
+ .find_enclosing_node_task(cx)
+ // Or find the task that's closest in row-distance.
+ .or_else(|| self.find_closest_task(cx))
+ else {
+ return;
+ };
+
+ let reveal_strategy = action.reveal;
+ let task_context = Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx);
+ cx.spawn_in(window, async move |_, cx| {
+ let context = task_context.await?;
+ let (task_source_kind, mut resolved_task) = tasks.resolve(&context).next()?;
+
+ let resolved = &mut resolved_task.resolved;
+ resolved.reveal = reveal_strategy;
+
+ workspace
+ .update_in(cx, |workspace, window, cx| {
+ workspace.schedule_resolved_task(
+ task_source_kind,
+ resolved_task,
+ false,
+ window,
+ cx,
+ );
+ })
+ .ok()
+ })
+ .detach();
+ }
+
+ pub fn clear_runnables(&mut self, for_buffer: Option<BufferId>) {
+ if let Some(buffer_id) = for_buffer {
+ self.runnables.runnables.remove(&buffer_id);
+ } else {
+ self.runnables.runnables.clear();
+ }
+ self.runnables.runnables_update_task = Task::ready(());
+ }
+
+ pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task<Option<TaskContext>> {
+ let Some(project) = self.project.clone() else {
+ return Task::ready(None);
+ };
+ let (selection, buffer, editor_snapshot) = {
+ let selection = self.selections.newest_adjusted(&self.display_snapshot(cx));
+ let Some((buffer, _)) = self
+ .buffer()
+ .read(cx)
+ .point_to_buffer_offset(selection.start, cx)
+ else {
+ return Task::ready(None);
+ };
+ let snapshot = self.snapshot(window, cx);
+ (selection, buffer, snapshot)
+ };
+ let selection_range = selection.range();
+ let start = editor_snapshot
+ .display_snapshot
+ .buffer_snapshot()
+ .anchor_after(selection_range.start)
+ .text_anchor;
+ let end = editor_snapshot
+ .display_snapshot
+ .buffer_snapshot()
+ .anchor_after(selection_range.end)
+ .text_anchor;
+ let location = Location {
+ buffer,
+ range: start..end,
+ };
+ let captured_variables = {
+ let mut variables = TaskVariables::default();
+ let buffer = location.buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ let snapshot = buffer.snapshot();
+ let starting_point = location.range.start.to_point(&snapshot);
+ let starting_offset = starting_point.to_offset(&snapshot);
+ for (_, tasks) in self
+ .runnables
+ .runnables
+ .get(&buffer_id)
+ .into_iter()
+ .flat_map(|(_, tasks)| tasks.range(0..starting_point.row + 1))
+ {
+ if !tasks
+ .context_range
+ .contains(&crate::BufferOffset(starting_offset))
+ {
+ continue;
+ }
+ for (capture_name, value) in tasks.extra_variables.iter() {
+ variables.insert(
+ VariableName::Custom(capture_name.to_owned().into()),
+ value.clone(),
+ );
+ }
+ }
+ variables
+ };
+
+ project.update(cx, |project, cx| {
+ project.task_store().update(cx, |task_store, cx| {
+ task_store.task_context_for_location(captured_variables, location, cx)
+ })
+ })
+ }
+
+ pub fn lsp_task_sources(
+ &self,
+ visible_only: bool,
+ skip_cached: bool,
+ cx: &mut Context<Self>,
+ ) -> HashMap<LanguageServerName, Vec<BufferId>> {
+ if !self.lsp_data_enabled() {
+ return HashMap::default();
+ }
+ let buffers = if visible_only {
+ self.visible_excerpts(true, cx)
+ .into_values()
+ .map(|(buffer, _, _)| buffer)
+ .collect()
+ } else {
+ self.buffer().read(cx).all_buffers()
+ };
+
+ let lsp_settings = &ProjectSettings::get_global(cx).lsp;
+
+ buffers
+ .into_iter()
+ .filter_map(|buffer| {
+ let lsp_tasks_source = buffer
+ .read(cx)
+ .language()?
+ .context_provider()?
+ .lsp_task_source()?;
+ if lsp_settings
+ .get(&lsp_tasks_source)
+ .is_none_or(|s| s.enable_lsp_tasks)
+ {
+ let buffer_id = buffer.read(cx).remote_id();
+ if skip_cached
+ && self
+ .runnables
+ .has_cached(buffer_id, &buffer.read(cx).version())
+ {
+ None
+ } else {
+ Some((lsp_tasks_source, buffer_id))
+ }
+ } else {
+ None
+ }
+ })
+ .fold(
+ HashMap::default(),
+ |mut acc, (lsp_task_source, buffer_id)| {
+ acc.entry(lsp_task_source)
+ .or_insert_with(Vec::new)
+ .push(buffer_id);
+ acc
+ },
+ )
+ }
+
+ pub fn find_enclosing_node_task(
+ &mut self,
+ cx: &mut Context<Self>,
+ ) -> Option<(Entity<Buffer>, u32, Arc<RunnableTasks>)> {
+ let snapshot = self.buffer.read(cx).snapshot(cx);
+ let offset = self
+ .selections
+ .newest::<MultiBufferOffset>(&self.display_snapshot(cx))
+ .head();
+ let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
+ let offset = excerpt.map_offset_to_buffer(offset);
+ let buffer_id = excerpt.buffer().remote_id();
+
+ let layer = excerpt.buffer().syntax_layer_at(offset)?;
+ let mut cursor = layer.node().walk();
+
+ while cursor.goto_first_child_for_byte(offset.0).is_some() {
+ if cursor.node().end_byte() == offset.0 {
+ cursor.goto_next_sibling();
+ }
+ }
+
+ // Ascend to the smallest ancestor that contains the range and has a task.
+ loop {
+ let node = cursor.node();
+ let node_range = node.byte_range();
+ let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row;
+
+ // Check if this node contains our offset
+ if node_range.start <= offset.0 && node_range.end >= offset.0 {
+ // If it contains offset, check for task
+ if let Some(tasks) = self
+ .runnables
+ .runnables
+ .get(&buffer_id)
+ .and_then(|(_, tasks)| tasks.get(&symbol_start_row))
+ {
+ let buffer = self.buffer.read(cx).buffer(buffer_id)?;
+ return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned())));
+ }
+ }
+
+ if !cursor.goto_parent() {
+ break;
+ }
+ }
+ None
+ }
+
+ pub fn render_run_indicator(
+ &self,
+ _style: &EditorStyle,
+ is_active: bool,
+ row: DisplayRow,
+ breakpoint: Option<(Anchor, Breakpoint, Option<BreakpointSessionState>)>,
+ cx: &mut Context<Self>,
+ ) -> IconButton {
+ let color = Color::Muted;
+ let position = breakpoint.as_ref().map(|(anchor, _, _)| *anchor);
+
+ IconButton::new(
+ ("run_indicator", row.0 as usize),
+ ui::IconName::PlayOutlined,
+ )
+ .shape(ui::IconButtonShape::Square)
+ .icon_size(IconSize::XSmall)
+ .icon_color(color)
+ .toggle_state(is_active)
+ .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| {
+ let quick_launch = match e {
+ ClickEvent::Keyboard(_) => true,
+ ClickEvent::Mouse(e) => e.down.button == MouseButton::Left,
+ };
+
+ window.focus(&editor.focus_handle(cx), cx);
+ editor.toggle_code_actions(
+ &ToggleCodeActions {
+ deployed_from: Some(CodeActionSource::RunMenu(row)),
+ quick_launch,
+ },
+ window,
+ cx,
+ );
+ }))
+ .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| {
+ editor.set_breakpoint_context_menu(row, position, event.position(), window, cx);
+ }))
+ }
+
+ fn insert_runnables(
+ &mut self,
+ buffer: BufferId,
+ version: Global,
+ row: BufferRow,
+ new_tasks: RunnableTasks,
+ ) {
+ let (old_version, tasks) = self.runnables.runnables.entry(buffer).or_default();
+ if !old_version.changed_since(&version) {
+ *old_version = version;
+ tasks.insert(row, new_tasks);
+ }
+ }
+
+ fn runnable_rows(
+ project: Entity<Project>,
+ snapshot: MultiBufferSnapshot,
+ prefer_lsp: bool,
+ runnable_ranges: Vec<(Range<MultiBufferOffset>, language::RunnableRange)>,
+ cx: AsyncWindowContext,
+ ) -> Task<Vec<((BufferId, BufferRow), RunnableTasks)>> {
+ cx.spawn(async move |cx| {
+ let mut runnable_rows = Vec::with_capacity(runnable_ranges.len());
+ for (run_range, mut runnable) in runnable_ranges {
+ let Some(tasks) = cx
+ .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx))
+ .ok()
+ else {
+ continue;
+ };
+ let mut tasks = tasks.await;
+
+ if prefer_lsp {
+ tasks.retain(|(task_kind, _)| {
+ !matches!(task_kind, TaskSourceKind::Language { .. })
+ });
+ }
+ if tasks.is_empty() {
+ continue;
+ }
+
+ let point = run_range.start.to_point(&snapshot);
+ let Some(row) = snapshot
+ .buffer_line_for_row(MultiBufferRow(point.row))
+ .map(|(_, range)| range.start.row)
+ else {
+ continue;
+ };
+
+ let context_range =
+ BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end);
+ runnable_rows.push((
+ (runnable.buffer_id, row),
+ RunnableTasks {
+ templates: tasks,
+ offset: snapshot.anchor_before(run_range.start),
+ context_range,
+ column: point.column,
+ extra_variables: runnable.extra_captures,
+ },
+ ));
+ }
+ runnable_rows
+ })
+ }
+
+ fn templates_with_tags(
+ project: &Entity<Project>,
+ runnable: &mut Runnable,
+ cx: &mut App,
+ ) -> Task<Vec<(TaskSourceKind, TaskTemplate)>> {
+ let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| {
+ let (worktree_id, file) = project
+ .buffer_for_id(runnable.buffer, cx)
+ .and_then(|buffer| buffer.read(cx).file())
+ .map(|file| (file.worktree_id(cx), file.clone()))
+ .unzip();
+
+ (
+ project.task_store().read(cx).task_inventory().cloned(),
+ worktree_id,
+ file,
+ )
+ });
+
+ let tags = mem::take(&mut runnable.tags);
+ let language = runnable.language.clone();
+ cx.spawn(async move |cx| {
+ let mut templates_with_tags = Vec::new();
+ if let Some(inventory) = inventory {
+ for RunnableTag(tag) in tags {
+ let new_tasks = inventory.update(cx, |inventory, cx| {
+ inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx)
+ });
+ templates_with_tags.extend(new_tasks.await.into_iter().filter(
+ move |(_, template)| {
+ template.tags.iter().any(|source_tag| source_tag == &tag)
+ },
+ ));
+ }
+ }
+ templates_with_tags.sort_by_key(|(kind, _)| kind.to_owned());
+
+ if let Some((leading_tag_source, _)) = templates_with_tags.first() {
+ // Strongest source wins; if we have worktree tag binding, prefer that to
+ // global and language bindings;
+ // if we have a global binding, prefer that to language binding.
+ let first_mismatch = templates_with_tags
+ .iter()
+ .position(|(tag_source, _)| tag_source != leading_tag_source);
+ if let Some(index) = first_mismatch {
+ templates_with_tags.truncate(index);
+ }
+ }
+
+ templates_with_tags
+ })
+ }
+
+ fn find_closest_task(
+ &mut self,
+ cx: &mut Context<Self>,
+ ) -> Option<(Entity<Buffer>, u32, Arc<RunnableTasks>)> {
+ let cursor_row = self
+ .selections
+ .newest_adjusted(&self.display_snapshot(cx))
+ .head()
+ .row;
+
+ let ((buffer_id, row), tasks) = self
+ .runnables
+ .runnables
+ .iter()
+ .flat_map(|(buffer_id, (_, tasks))| {
+ tasks.iter().map(|(row, tasks)| ((*buffer_id, *row), tasks))
+ })
+ .min_by_key(|((_, row), _)| cursor_row.abs_diff(*row))?;
+
+ let buffer = self.buffer.read(cx).buffer(buffer_id)?;
+ let tasks = Arc::new(tasks.to_owned());
+ Some((buffer, row, tasks))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::{sync::Arc, time::Duration};
+
+ use gpui::{AppContext as _, Task, TestAppContext};
+ use indoc::indoc;
+ use language::ContextProvider;
+ use languages::rust_lang;
+ use multi_buffer::{MultiBuffer, PathKey};
+ use project::{FakeFs, Project};
+ use serde_json::json;
+ use task::{TaskTemplate, TaskTemplates};
+ use text::Point;
+ use util::path;
+
+ use crate::{
+ Editor, UPDATE_DEBOUNCE, editor_tests::init_test, scroll::scroll_amount::ScrollAmount,
+ };
+
+ struct TestRustContextProvider;
+
+ impl ContextProvider for TestRustContextProvider {
+ fn associated_tasks(
+ &self,
+ _: Option<Arc<dyn language::File>>,
+ _: &gpui::App,
+ ) -> Task<Option<TaskTemplates>> {
+ Task::ready(Some(TaskTemplates(vec![
+ TaskTemplate {
+ label: "Run main".into(),
+ command: "cargo".into(),
+ args: vec!["run".into()],
+ tags: vec!["rust-main".into()],
+ ..TaskTemplate::default()
+ },
+ TaskTemplate {
+ label: "Run test".into(),
+ command: "cargo".into(),
+ args: vec!["test".into()],
+ tags: vec!["rust-test".into()],
+ ..TaskTemplate::default()
+ },
+ ])))
+ }
+ }
+
+ fn rust_lang_with_task_context() -> Arc<language::Language> {
+ Arc::new(
+ Arc::try_unwrap(rust_lang())
+ .unwrap()
+ .with_context_provider(Some(Arc::new(TestRustContextProvider))),
+ )
+ }
+
+ fn collect_runnable_labels(
+ editor: &Editor,
+ ) -> Vec<(text::BufferId, language::BufferRow, Vec<String>)> {
+ let mut result = editor
+ .runnables
+ .runnables
+ .iter()
+ .flat_map(|(buffer_id, (_, tasks))| {
+ tasks.iter().map(move |(row, runnable_tasks)| {
+ let mut labels: Vec<String> = runnable_tasks
+ .templates
+ .iter()
+ .map(|(_, template)| template.label.clone())
+ .collect();
+ labels.sort();
+ (*buffer_id, *row, labels)
+ })
+ })
+ .collect::<Vec<_>>();
+ result.sort_by_key(|(id, row, _)| (*id, *row));
+ result
+ }
+
+ #[gpui::test]
+ async fn test_multi_buffer_runnables_on_scroll(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let padding_lines = 50;
+ let mut first_rs = String::from("fn main() {\n println!(\"hello\");\n}\n");
+ for _ in 0..padding_lines {
+ first_rs.push_str("//\n");
+ }
+ let test_one_row = 3 + padding_lines as u32 + 1;
+ first_rs.push_str("#[test]\nfn test_one() {\n assert!(true);\n}\n");
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/project"),
+ json!({
+ "first.rs": first_rs,
+ "second.rs": indoc! {"
+ #[test]
+ fn test_two() {
+ assert!(true);
+ }
+
+ #[test]
+ fn test_three() {
+ assert!(true);
+ }
+ "},
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ language_registry.add(rust_lang_with_task_context());
+
+ let buffer_1 = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/project/first.rs"), cx)
+ })
+ .await
+ .unwrap();
+ let buffer_2 = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/project/second.rs"), cx)
+ })
+ .await
+ .unwrap();
+
+ let buffer_1_id = buffer_1.read_with(cx, |buffer, _| buffer.remote_id());
+ let buffer_2_id = buffer_2.read_with(cx, |buffer, _| buffer.remote_id());
+
+ let multi_buffer = cx.new(|cx| {
+ let mut multi_buffer = MultiBuffer::new(language::Capability::ReadWrite);
+ let end = buffer_1.read(cx).max_point();
+ multi_buffer.set_excerpts_for_path(
+ PathKey::sorted(0),
+ buffer_1.clone(),
+ [Point::new(0, 0)..end],
+ 0,
+ cx,
+ );
+ multi_buffer.set_excerpts_for_path(
+ PathKey::sorted(1),
+ buffer_2.clone(),
+ [Point::new(0, 0)..Point::new(8, 1)],
+ 0,
+ cx,
+ );
+ multi_buffer
+ });
+
+ let editor = cx.add_window(|window, cx| {
+ Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx)
+ });
+ cx.executor().advance_clock(Duration::from_millis(500));
+ cx.executor().run_until_parked();
+
+ // Clear stale data from startup events, then refresh.
+ // first.rs is long enough that second.rs is below the ~47-line viewport.
+ editor
+ .update(cx, |editor, window, cx| {
+ editor.clear_runnables(None);
+ editor.refresh_runnables(window, cx);
+ })
+ .unwrap();
+ cx.executor().advance_clock(UPDATE_DEBOUNCE);
+ cx.executor().run_until_parked();
+ assert_eq!(
+ editor
+ .update(cx, |editor, _, _| collect_runnable_labels(editor))
+ .unwrap(),
+ vec![(buffer_1_id, 0, vec!["Run main".to_string()])],
+ "Only fn main from first.rs should be visible before scrolling"
+ );
+
+ // Scroll down to bring second.rs excerpts into view.
+ editor
+ .update(cx, |editor, window, cx| {
+ editor.scroll_screen(&ScrollAmount::Page(1.0), window, cx);
+ })
+ .unwrap();
+ cx.executor().advance_clock(Duration::from_millis(200));
+ cx.executor().run_until_parked();
+
+ let after_scroll = editor
+ .update(cx, |editor, _, _| collect_runnable_labels(editor))
+ .unwrap();
+ assert_eq!(
+ after_scroll,
+ vec![
+ (buffer_1_id, 0, vec!["Run main".to_string()]),
+ (buffer_1_id, test_one_row, vec!["Run test".to_string()]),
+ (buffer_2_id, 1, vec!["Run test".to_string()]),
+ (buffer_2_id, 6, vec!["Run test".to_string()]),
+ ],
+ "Tree-sitter should detect both #[test] fns in second.rs after scroll"
+ );
+
+ // Edit second.rs to invalidate its cache; first.rs data should persist.
+ buffer_2.update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "// added comment\n")], None, cx);
+ });
+ editor
+ .update(cx, |editor, window, cx| {
+ editor.scroll_screen(&ScrollAmount::Page(-1.0), window, cx);
+ })
+ .unwrap();
+ cx.executor().advance_clock(Duration::from_millis(200));
+ cx.executor().run_until_parked();
+
+ assert_eq!(
+ editor
+ .update(cx, |editor, _, _| collect_runnable_labels(editor))
+ .unwrap(),
+ vec![
+ (buffer_1_id, 0, vec!["Run main".to_string()]),
+ (buffer_1_id, test_one_row, vec!["Run test".to_string()]),
+ ],
+ "first.rs runnables should survive an edit to second.rs"
+ );
+ }
+}
@@ -119,7 +119,7 @@ impl Editor {
for_server: Option<RefreshForServer>,
cx: &mut Context<Self>,
) {
- if !self.mode().is_full() || !self.semantic_token_state.enabled() {
+ if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
self.invalidate_semantic_tokens(None);
self.display_map.update(cx, |display_map, _| {
match Arc::get_mut(&mut display_map.semantic_token_highlights) {
@@ -446,6 +446,9 @@ impl SplittableEditor {
let mut editor =
Editor::for_multibuffer(rhs_multibuffer.clone(), Some(project.clone()), window, cx);
editor.set_expand_all_diff_hunks(cx);
+ editor.disable_runnables();
+ editor.disable_diagnostics(cx);
+ editor.set_minimap_visibility(crate::MinimapVisibility::Disabled, window, cx);
editor
});
// TODO(split-diff) we might want to tag editor events with whether they came from rhs/lhs
@@ -1,110 +0,0 @@
-use crate::Editor;
-
-use collections::HashMap;
-use gpui::{App, Task, Window};
-use lsp::LanguageServerName;
-use project::{Location, project_settings::ProjectSettings};
-use settings::Settings as _;
-use task::{TaskContext, TaskVariables, VariableName};
-use text::{BufferId, ToOffset, ToPoint};
-
-impl Editor {
- pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task<Option<TaskContext>> {
- let Some(project) = self.project.clone() else {
- return Task::ready(None);
- };
- let (selection, buffer, editor_snapshot) = {
- let selection = self.selections.newest_adjusted(&self.display_snapshot(cx));
- let Some((buffer, _)) = self
- .buffer()
- .read(cx)
- .point_to_buffer_offset(selection.start, cx)
- else {
- return Task::ready(None);
- };
- let snapshot = self.snapshot(window, cx);
- (selection, buffer, snapshot)
- };
- let selection_range = selection.range();
- let start = editor_snapshot
- .display_snapshot
- .buffer_snapshot()
- .anchor_after(selection_range.start)
- .text_anchor;
- let end = editor_snapshot
- .display_snapshot
- .buffer_snapshot()
- .anchor_after(selection_range.end)
- .text_anchor;
- let location = Location {
- buffer,
- range: start..end,
- };
- let captured_variables = {
- let mut variables = TaskVariables::default();
- let buffer = location.buffer.read(cx);
- let buffer_id = buffer.remote_id();
- let snapshot = buffer.snapshot();
- let starting_point = location.range.start.to_point(&snapshot);
- let starting_offset = starting_point.to_offset(&snapshot);
- for (_, tasks) in self
- .tasks
- .range((buffer_id, 0)..(buffer_id, starting_point.row + 1))
- {
- if !tasks
- .context_range
- .contains(&crate::BufferOffset(starting_offset))
- {
- continue;
- }
- for (capture_name, value) in tasks.extra_variables.iter() {
- variables.insert(
- VariableName::Custom(capture_name.to_owned().into()),
- value.clone(),
- );
- }
- }
- variables
- };
-
- project.update(cx, |project, cx| {
- project.task_store().update(cx, |task_store, cx| {
- task_store.task_context_for_location(captured_variables, location, cx)
- })
- })
- }
-
- pub fn lsp_task_sources(&self, cx: &App) -> HashMap<LanguageServerName, Vec<BufferId>> {
- let lsp_settings = &ProjectSettings::get_global(cx).lsp;
-
- self.buffer()
- .read(cx)
- .all_buffers()
- .into_iter()
- .filter_map(|buffer| {
- let lsp_tasks_source = buffer
- .read(cx)
- .language()?
- .context_provider()?
- .lsp_task_source()?;
- if lsp_settings
- .get(&lsp_tasks_source)
- .is_none_or(|s| s.enable_lsp_tasks)
- {
- let buffer_id = buffer.read(cx).remote_id();
- Some((lsp_tasks_source, buffer_id))
- } else {
- None
- }
- })
- .fold(
- HashMap::default(),
- |mut acc, (lsp_task_source, buffer_id)| {
- acc.entry(lsp_task_source)
- .or_insert_with(Vec::new)
- .push(buffer_id);
- acc
- },
- )
- }
-}
@@ -15,7 +15,7 @@ use project::{
git_store::{GitStoreEvent, RepositoryEvent},
};
use settings::Settings;
-use std::{ops::Range, sync::Arc};
+use std::{cell::RefCell, ops::Range, rc::Rc, sync::Arc};
use ui::{ActiveTheme, Divider, Element as _, Styled, Window, prelude::*};
use util::{ResultExt as _, debug_panic, maybe};
use workspace::{
@@ -534,7 +534,9 @@ pub(crate) fn register_conflict_notification(
) {
let git_store = workspace.project().read(cx).git_store().clone();
- cx.subscribe(&git_store, |workspace, _git_store, event, cx| {
+ let last_shown_paths: Rc<RefCell<HashSet<String>>> = Rc::new(RefCell::new(HashSet::default()));
+
+ cx.subscribe(&git_store, move |workspace, _git_store, event, cx| {
let conflicts_changed = matches!(
event,
GitStoreEvent::ConflictsUpdated
@@ -546,10 +548,15 @@ pub(crate) fn register_conflict_notification(
let paths = collect_conflicted_file_paths(workspace, cx);
let notification_id = merge_conflict_notification_id();
+ let current_paths_set: HashSet<String> = paths.iter().cloned().collect();
if paths.is_empty() {
+ last_shown_paths.borrow_mut().clear();
workspace.dismiss_notification(¬ification_id, cx);
- } else {
+ } else if *last_shown_paths.borrow() != current_paths_set {
+ // Only show the notification if the set of conflicted paths has changed.
+ // This prevents re-showing after the user dismisses it while working on the same conflicts.
+ *last_shown_paths.borrow_mut() = current_paths_set;
let file_count = paths.len();
workspace.show_notification(notification_id, cx, |cx| {
cx.new(|cx| {
@@ -560,7 +567,7 @@ pub(crate) fn register_conflict_notification(
};
MessageNotification::new(message, cx)
- .primary_message("Resolve Conflicts with Agent")
+ .primary_message("Resolve with Agent")
.primary_icon(IconName::ZedAssistant)
.primary_icon_color(Color::Muted)
.primary_on_click({
@@ -259,7 +259,7 @@ impl AudioStack {
apm: Arc<Mutex<apm::AudioProcessingModule>>,
mixer: Arc<Mutex<audio_mixer::AudioMixer>>,
sample_rate: u32,
- num_channels: u32,
+ _num_channels: u32,
output_audio_device: Option<DeviceId>,
) -> Result<()> {
// Prevent App Nap from throttling audio playback on macOS.
@@ -271,6 +271,7 @@ impl AudioStack {
let mut device_change_listener = DeviceChangeListener::new(false)?;
let (output_device, output_config) =
crate::default_device(false, output_audio_device.as_ref())?;
+ info!("Output config: {output_config:?}");
let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
let mixer = mixer.clone();
let apm = apm.clone();
@@ -301,7 +302,12 @@ impl AudioStack {
let sampled = resampler.remix_and_resample(
mixed,
sample_rate / 100,
- num_channels,
+ // We need to assume output number of channels as otherwise we will
+ // crash in process_reverse_stream otherwise as livekit's audio resampler
+ // does not seem to support non-matching channel counts.
+ // NOTE: you can verify this by debug printing buf.len() after this stage.
+ // For 2->4 channel upmix, we should see buf.len=1920, buf we get only 960.
+ output_config.channels() as u32,
sample_rate,
output_config.channels() as u32,
output_config.sample_rate(),
@@ -316,7 +316,9 @@ pub fn task_contexts(
let lsp_task_sources = active_editor
.as_ref()
- .map(|active_editor| active_editor.update(cx, |editor, cx| editor.lsp_task_sources(cx)))
+ .map(|active_editor| {
+ active_editor.update(cx, |editor, cx| editor.lsp_task_sources(false, false, cx))
+ })
.unwrap_or_default();
let latest_selection = active_editor.as_ref().map(|active_editor| {
@@ -227,6 +227,12 @@ impl RenderOnce for ThreadItem {
.gradient_stop(0.8)
.group_name("thread-item");
+ let has_diff_stats = self.added.is_some() || self.removed.is_some();
+ let added_count = self.added.unwrap_or(0);
+ let removed_count = self.removed.unwrap_or(0);
+ let diff_stat_id = self.id.clone();
+ let has_worktree = self.worktree.is_some();
+
v_flex()
.id(self.id.clone())
.group("thread-item")
@@ -235,7 +241,7 @@ impl RenderOnce for ThreadItem {
.cursor_pointer()
.w_full()
.map(|this| {
- if self.worktree.is_some() {
+ if has_worktree || has_diff_stats {
this.p_2()
} else {
this.px_2().py_1()
@@ -300,35 +306,24 @@ impl RenderOnce for ThreadItem {
.gap_1p5()
.child(icon_container()) // Icon Spacing
.child(worktree_label)
- // TODO: Uncomment the elements below when we're ready to expose this data
- // .child(dot_separator())
- // .child(
- // Label::new(self.timestamp)
- // .size(LabelSize::Small)
- // .color(Color::Muted),
- // )
- // .child(
- // Label::new("•")
- // .size(LabelSize::Small)
- // .color(Color::Muted)
- // .alpha(0.5),
- // )
- // .when(has_no_changes, |this| {
- // this.child(
- // Label::new("No Changes")
- // .size(LabelSize::Small)
- // .color(Color::Muted),
- // )
- // })
- .when(self.added.is_some() || self.removed.is_some(), |this| {
+ .when(has_diff_stats, |this| {
this.child(DiffStat::new(
- self.id,
- self.added.unwrap_or(0),
- self.removed.unwrap_or(0),
+ diff_stat_id.clone(),
+ added_count,
+ removed_count,
))
}),
)
})
+ .when(!has_worktree && has_diff_stats, |this| {
+ this.child(
+ h_flex()
+ .min_w_0()
+ .gap_1p5()
+ .child(icon_container()) // Icon Spacing
+ .child(DiffStat::new(diff_stat_id, added_count, removed_count)),
+ )
+ })
.when_some(self.on_click, |this, on_click| this.on_click(on_click))
}
}
@@ -657,15 +657,17 @@ impl RenderOnce for NotificationFrame {
IconButton::new(close_id, close_icon)
.tooltip(move |_window, cx| {
if suppress {
- Tooltip::for_action(
- "Suppress.\nClose with click.",
- &SuppressNotification,
+ Tooltip::with_meta(
+ "Suppress",
+ Some(&SuppressNotification),
+ "Click to Close",
cx,
)
} else if show_suppress_button {
- Tooltip::for_action(
- "Close.\nSuppress with shift-click.",
- &menu::Cancel,
+ Tooltip::with_meta(
+ "Close",
+ Some(&menu::Cancel),
+ "Shift-click to Suppress",
cx,
)
} else {
@@ -2,7 +2,7 @@
description = "The fast, collaborative code editor."
edition.workspace = true
name = "zed"
-version = "0.228.0"
+version = "0.229.0"
publish.workspace = true
license = "GPL-3.0-or-later"
authors = ["Zed Team <hi@zed.dev>"]
@@ -368,7 +368,10 @@ mark.fade-out {
.searchbar-outer {
margin-inline-start: auto;
margin-inline-end: auto;
+ width: 100%;
max-width: var(--content-max-width);
+ box-sizing: border-box;
+ padding: 16px;
}
#searchbar {
@@ -394,21 +397,21 @@ mark.fade-out {
.searchresults-header {
font-weight: bold;
font-size: 1em;
- padding-block-start: 18px;
+ padding-block-start: 0;
padding-block-end: 0;
- padding-inline-start: 5px;
- padding-inline-end: 0;
color: var(--searchresults-header-fg);
}
ul#searchresults {
list-style: none;
padding-inline-start: 0;
+ margin-block-end: 0;
}
ul#searchresults li {
margin: 10px 0px;
padding: 2px;
border-radius: 2px;
+ scroll-margin-block-end: 10px;
}
ul#searchresults li.focus {
background-color: var(--searchresults-li-bg);
@@ -794,8 +797,7 @@ ul#searchresults span.teaser em {
max-height: 600px;
display: flex;
flex-direction: column;
- padding: 16px;
- overflow-y: auto;
+ overflow-y: hidden;
border-radius: 8px;
background: var(--popover-bg);
@@ -803,8 +805,11 @@ ul#searchresults span.teaser em {
box-shadow: var(--popover-shadow);
}
-.searchbar-outer {
- width: 100%;
+.searchresults-outer {
+ flex: 1;
+ min-height: 0;
+ overflow-y: auto;
+ padding: 0px 22px 22px 22px;
}
#searchbar {
@@ -424,6 +424,31 @@
<script src="{{ path_to_root }}elasticlunr.min.js"></script>
<script src="{{ path_to_root }}mark.min.js"></script>
<script src="{{ path_to_root }}searcher.js"></script>
+
+ <script>
+ (function () {
+ // Check for focused search result and bring into the view
+ const ensureVisible = () => {
+ const focused = document.querySelector("#searchresults li.focus");
+
+ if (focused) {
+ focused.scrollIntoView({
+ block: "nearest",
+ inline: "nearest"
+ });
+ }
+ };
+
+ // 1. Listen for arrow key events
+ // 2. Wait for DOM to update
+ // 3. Call envsureVisible
+ document.addEventListener("keydown", function (e) {
+ if (e.key === "ArrowDown" || e.key === "ArrowUp") {
+ requestAnimationFrame(ensureVisible);
+ }
+ });
+ })();
+ </script>
{{/if}}
<script src="{{ path_to_root }}clipboard.min.js"></script>
@@ -29,38 +29,99 @@ mod runners;
mod steps;
mod vars;
+#[derive(Clone)]
+pub(crate) struct GitSha(String);
+
+impl AsRef<str> for GitSha {
+ fn as_ref(&self) -> &str {
+ &self.0
+ }
+}
+
+#[allow(
+ clippy::disallowed_methods,
+ reason = "This runs only in a CLI environment"
+)]
+fn parse_ref(value: &str) -> Result<GitSha, String> {
+ const GIT_SHA_LENGTH: usize = 40;
+ (value.len() == GIT_SHA_LENGTH)
+ .then_some(value)
+ .ok_or_else(|| {
+ format!(
+ "Git SHA has wrong length! \
+ Only SHAs with a full length of {GIT_SHA_LENGTH} are supported, found {len} characters.",
+ len = value.len()
+ )
+ })
+ .and_then(|value| {
+ let mut tmp = [0; 4];
+ value
+ .chars()
+ .all(|char| u16::from_str_radix(char.encode_utf8(&mut tmp), 16).is_ok()).then_some(value)
+ .ok_or_else(|| "Not a valid Git SHA".to_owned())
+ })
+ .and_then(|sha| {
+ std::process::Command::new("git")
+ .args([
+ "rev-parse",
+ "--quiet",
+ "--verify",
+ &format!("{sha}^{{commit}}")
+ ])
+ .output()
+ .map_err(|_| "Failed to spawn Git command to verify SHA".to_owned())
+ .and_then(|output|
+ output
+ .status.success()
+ .then_some(sha)
+ .ok_or_else(|| format!("SHA {sha} is not a valid Git SHA within this repository!")))
+ }).map(|sha| GitSha(sha.to_owned()))
+}
+
#[derive(Parser)]
-pub struct GenerateWorkflowArgs {}
+pub(crate) struct GenerateWorkflowArgs {
+ #[arg(value_parser = parse_ref)]
+ /// The Git SHA to use when invoking this
+ pub(crate) sha: Option<GitSha>,
+}
+
+enum WorkflowSource {
+ Contextless(fn() -> Workflow),
+ WithContext(fn(&GenerateWorkflowArgs) -> Workflow),
+}
struct WorkflowFile {
- source: fn() -> Workflow,
+ source: WorkflowSource,
r#type: WorkflowType,
}
impl WorkflowFile {
fn zed(f: fn() -> Workflow) -> WorkflowFile {
WorkflowFile {
- source: f,
+ source: WorkflowSource::Contextless(f),
r#type: WorkflowType::Zed,
}
}
- fn extension(f: fn() -> Workflow) -> WorkflowFile {
+ fn extension(f: fn(&GenerateWorkflowArgs) -> Workflow) -> WorkflowFile {
WorkflowFile {
- source: f,
+ source: WorkflowSource::WithContext(f),
r#type: WorkflowType::ExtensionCi,
}
}
- fn extension_shared(f: fn() -> Workflow) -> WorkflowFile {
+ fn extension_shared(f: fn(&GenerateWorkflowArgs) -> Workflow) -> WorkflowFile {
WorkflowFile {
- source: f,
+ source: WorkflowSource::WithContext(f),
r#type: WorkflowType::ExtensionsShared,
}
}
- fn generate_file(&self) -> Result<()> {
- let workflow = (self.source)();
+ fn generate_file(&self, workflow_args: &GenerateWorkflowArgs) -> Result<()> {
+ let workflow = match &self.source {
+ WorkflowSource::Contextless(f) => f(),
+ WorkflowSource::WithContext(f) => f(workflow_args),
+ };
let workflow_folder = self.r#type.folder_path();
fs::create_dir_all(&workflow_folder).with_context(|| {
@@ -124,7 +185,7 @@ impl WorkflowType {
}
}
-pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
+pub fn run_workflows(args: GenerateWorkflowArgs) -> Result<()> {
if !Path::new("crates/zed/").is_dir() {
anyhow::bail!("xtask workflows must be ran from the project root");
}
@@ -154,7 +215,7 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
];
for workflow_file in workflows {
- workflow_file.generate_file()?;
+ workflow_file.generate_file(&args)?;
}
workflow_checks::validate(Default::default())
@@ -6,46 +6,72 @@ use indoc::indoc;
use serde_json::json;
use crate::tasks::workflows::steps::CheckoutStep;
+use crate::tasks::workflows::steps::cache_rust_dependencies_namespace;
+use crate::tasks::workflows::vars::JobOutput;
use crate::tasks::workflows::{
extension_bump::{RepositoryTarget, generate_token},
runners,
steps::{self, DEFAULT_REPOSITORY_OWNER_GUARD, NamedJob, named},
- vars::{self, StepOutput},
+ vars::{self, StepOutput, WorkflowInput},
};
const ROLLOUT_TAG_NAME: &str = "extension-workflows";
+const WORKFLOW_ARTIFACT_NAME: &str = "extension-workflow-files";
pub(crate) fn extension_workflow_rollout() -> Workflow {
- let fetch_repos = fetch_extension_repos();
- let rollout_workflows = rollout_workflows_to_extension(&fetch_repos);
- let create_tag = create_rollout_tag(&rollout_workflows);
+ let filter_repos_input = WorkflowInput::string("filter-repos", Some(String::new()))
+ .description(
+ "Comma-separated list of repository names to rollout to. Leave empty for all repos.",
+ );
+ let extra_context_input = WorkflowInput::string("change-description", Some(String::new()))
+ .description("Description for the changes to be expected with this rollout");
+
+ let (fetch_repos, removed_ci, removed_shared) = fetch_extension_repos(&filter_repos_input);
+ let rollout_workflows = rollout_workflows_to_extension(
+ &fetch_repos,
+ removed_ci,
+ removed_shared,
+ &extra_context_input,
+ );
+ let create_tag = create_rollout_tag(&rollout_workflows, &filter_repos_input);
named::workflow()
- .on(Event::default().workflow_dispatch(WorkflowDispatch::default()))
+ .on(Event::default().workflow_dispatch(
+ WorkflowDispatch::default()
+ .add_input(filter_repos_input.name, filter_repos_input.input())
+ .add_input(extra_context_input.name, extra_context_input.input()),
+ ))
.add_env(("CARGO_TERM_COLOR", "always"))
.add_job(fetch_repos.name, fetch_repos.job)
.add_job(rollout_workflows.name, rollout_workflows.job)
.add_job(create_tag.name, create_tag.job)
}
-fn fetch_extension_repos() -> NamedJob {
- fn get_repositories() -> (Step<Use>, StepOutput) {
+fn fetch_extension_repos(filter_repos_input: &WorkflowInput) -> (NamedJob, JobOutput, JobOutput) {
+ fn get_repositories(filter_repos_input: &WorkflowInput) -> (Step<Use>, StepOutput) {
let step = named::uses("actions", "github-script", "v7")
.id("list-repos")
.add_with((
"script",
- indoc::indoc! {r#"
- const repos = await github.paginate(github.rest.repos.listForOrg, {
+ formatdoc! {r#"
+ const repos = await github.paginate(github.rest.repos.listForOrg, {{
org: 'zed-extensions',
type: 'public',
per_page: 100,
- });
+ }});
- const filteredRepos = repos
+ let filteredRepos = repos
.filter(repo => !repo.archived)
.map(repo => repo.name);
- console.log(`Found ${filteredRepos.length} extension repos`);
+ const filterInput = `{filter_repos_input}`.trim();
+ if (filterInput.length > 0) {{
+ const allowedNames = filterInput.split(',').map(s => s.trim()).filter(s => s.length > 0);
+ filteredRepos = filteredRepos.filter(name => allowedNames.includes(name));
+ console.log(`Filter applied. Matched ${{filteredRepos.length}} repos from ${{allowedNames.length}} requested.`);
+ }}
+
+ console.log(`Found ${{filteredRepos.length}} extension repos`);
return filteredRepos;
"#},
))
@@ -56,36 +82,12 @@ fn fetch_extension_repos() -> NamedJob {
(step, filtered_repos)
}
- let (get_org_repositories, list_repos_output) = get_repositories();
-
- let job = Job::default()
- .cond(Expression::new(format!(
- "{DEFAULT_REPOSITORY_OWNER_GUARD} && github.ref == 'refs/heads/main'"
- )))
- .runs_on(runners::LINUX_SMALL)
- .timeout_minutes(5u32)
- .outputs([("repos".to_owned(), list_repos_output.to_string())])
- .add_step(get_org_repositories);
-
- named::job(job)
-}
-
-fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
fn checkout_zed_repo() -> CheckoutStep {
steps::checkout_repo()
.with_full_history()
- .with_path("zed")
.with_custom_name("checkout_zed_repo")
}
- fn checkout_extension_repo(token: &StepOutput) -> CheckoutStep {
- steps::checkout_repo()
- .with_custom_name("checkout_extension_repo")
- .with_token(token)
- .with_repository("zed-extensions/${{ matrix.repo }}")
- .with_path("extension")
- }
-
fn get_previous_tag_commit() -> (Step<Run>, StepOutput) {
let step = named::bash(formatdoc! {r#"
PREV_COMMIT=$(git rev-parse "{ROLLOUT_TAG_NAME}^{{commit}}" 2>/dev/null || echo "")
@@ -96,49 +98,126 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
echo "Found previous rollout at commit: $PREV_COMMIT"
echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT"
"#})
- .id("prev-tag")
- .working_directory("zed");
+ .id("prev-tag");
let step_output = StepOutput::new(&step, "prev_commit");
(step, step_output)
}
- fn get_removed_files(prev_commit: &StepOutput) -> (Step<Run>, StepOutput) {
- let step = named::bash(indoc::indoc! {r#"
- if [ "$MATRIX_REPO" = "workflows" ]; then
- WORKFLOW_DIR="extensions/workflows"
- else
- WORKFLOW_DIR="extensions/workflows/shared"
- fi
-
- echo "Calculating changes from $PREV_COMMIT to HEAD for $WORKFLOW_DIR"
+ fn get_removed_files(prev_commit: &StepOutput) -> (Step<Run>, StepOutput, StepOutput) {
+ let step = named::bash(indoc! {r#"
+ for workflow_type in "ci" "shared"; do
+ if [ "$workflow_type" = "ci" ]; then
+ WORKFLOW_DIR="extensions/workflows"
+ else
+ WORKFLOW_DIR="extensions/workflows/shared"
+ fi
+
+ REMOVED=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \
+ awk '/^D/ { print $2 } /^R/ { print $2 }' | \
+ xargs -I{} basename {} 2>/dev/null | \
+ tr '\n' ' ' || echo "")
+ REMOVED=$(echo "$REMOVED" | xargs)
+
+ echo "Removed files for $workflow_type: $REMOVED"
+ echo "removed_${workflow_type}=$REMOVED" >> "$GITHUB_OUTPUT"
+ done
+ "#})
+ .id("calc-changes")
+ .add_env(("PREV_COMMIT", prev_commit.to_string()));
- # Get deleted files (status D) and renamed files (status R - old name needs removal)
- # Using -M to detect renames, then extracting files that are gone from their original location
- REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \
- awk '/^D/ { print $2 } /^R/ { print $2 }' | \
- xargs -I{} basename {} 2>/dev/null | \
- tr '\n' ' ' || echo "")
+ let removed_ci = StepOutput::new(&step, "removed_ci");
+ let removed_shared = StepOutput::new(&step, "removed_shared");
- REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs)
+ (step, removed_ci, removed_shared)
+ }
- echo "Files to remove: $REMOVED_FILES"
- echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT"
+ fn generate_workflow_files() -> Step<Run> {
+ named::bash(indoc! {r#"
+ cargo xtask workflows "$COMMIT_SHA"
"#})
- .id("calc-changes")
- .working_directory("zed")
- .add_env(("PREV_COMMIT", prev_commit.to_string()))
- .add_env(("MATRIX_REPO", "${{ matrix.repo }}"));
+ .add_env(("COMMIT_SHA", "${{ github.sha }}"))
+ }
- let removed_files = StepOutput::new(&step, "removed_files");
+ fn upload_workflow_files() -> Step<Use> {
+ named::uses(
+ "actions",
+ "upload-artifact",
+ "330a01c490aca151604b8cf639adc76d48f6c5d4", // v5
+ )
+ .add_with(("name", WORKFLOW_ARTIFACT_NAME))
+ .add_with(("path", "extensions/workflows/**/*.yml"))
+ .add_with(("if-no-files-found", "error"))
+ }
- (step, removed_files)
+ let (get_org_repositories, list_repos_output) = get_repositories(filter_repos_input);
+ let (get_prev_tag, prev_commit) = get_previous_tag_commit();
+ let (calc_changes, removed_ci, removed_shared) = get_removed_files(&prev_commit);
+
+ let job = Job::default()
+ .cond(Expression::new(format!(
+ "{DEFAULT_REPOSITORY_OWNER_GUARD} && github.ref == 'refs/heads/main'"
+ )))
+ .runs_on(runners::LINUX_SMALL)
+ .timeout_minutes(10u32)
+ .outputs([
+ ("repos".to_owned(), list_repos_output.to_string()),
+ ("prev_commit".to_owned(), prev_commit.to_string()),
+ ("removed_ci".to_owned(), removed_ci.to_string()),
+ ("removed_shared".to_owned(), removed_shared.to_string()),
+ ])
+ .add_step(checkout_zed_repo())
+ .add_step(get_prev_tag)
+ .add_step(calc_changes)
+ .add_step(get_org_repositories)
+ .add_step(cache_rust_dependencies_namespace())
+ .add_step(generate_workflow_files())
+ .add_step(upload_workflow_files());
+
+ let job = named::job(job);
+ let (removed_ci, removed_shared) = (
+ removed_ci.as_job_output(&job),
+ removed_shared.as_job_output(&job),
+ );
+
+ (job, removed_ci, removed_shared)
+}
+
+fn rollout_workflows_to_extension(
+ fetch_repos_job: &NamedJob,
+ removed_ci: JobOutput,
+ removed_shared: JobOutput,
+ extra_context_input: &WorkflowInput,
+) -> NamedJob {
+ fn checkout_extension_repo(token: &StepOutput) -> CheckoutStep {
+ steps::checkout_repo()
+ .with_custom_name("checkout_extension_repo")
+ .with_token(token)
+ .with_repository("zed-extensions/${{ matrix.repo }}")
+ .with_path("extension")
+ }
+
+ fn download_workflow_files() -> Step<Use> {
+ named::uses(
+ "actions",
+ "download-artifact",
+ "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0
+ )
+ .add_with(("name", WORKFLOW_ARTIFACT_NAME))
+ .add_with(("path", "workflow-files"))
}
- fn sync_workflow_files(removed_files: &StepOutput) -> Step<Run> {
- named::bash(indoc::indoc! {r#"
+ fn sync_workflow_files(removed_ci: JobOutput, removed_shared: JobOutput) -> Step<Run> {
+ named::bash(indoc! {r#"
mkdir -p extension/.github/workflows
+
+ if [ "$MATRIX_REPO" = "workflows" ]; then
+ REMOVED_FILES="$REMOVED_CI"
+ else
+ REMOVED_FILES="$REMOVED_SHARED"
+ fi
+
cd extension/.github/workflows
if [ -n "$REMOVED_FILES" ]; then
@@ -152,40 +231,46 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
cd - > /dev/null
if [ "$MATRIX_REPO" = "workflows" ]; then
- cp zed/extensions/workflows/*.yml extension/.github/workflows/
+ cp workflow-files/*.yml extension/.github/workflows/
else
- cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/
+ cp workflow-files/shared/*.yml extension/.github/workflows/
fi
"#})
- .add_env(("REMOVED_FILES", removed_files.to_string()))
+ .add_env(("REMOVED_CI", removed_ci))
+ .add_env(("REMOVED_SHARED", removed_shared))
.add_env(("MATRIX_REPO", "${{ matrix.repo }}"))
}
fn get_short_sha() -> (Step<Run>, StepOutput) {
- let step = named::bash(indoc::indoc! {r#"
- echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT"
+ let step = named::bash(indoc! {r#"
+ echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
"#})
- .id("short-sha")
- .working_directory("zed");
+ .id("short-sha");
let step_output = StepOutput::new(&step, "sha_short");
(step, step_output)
}
- fn create_pull_request(token: &StepOutput, short_sha: &StepOutput) -> Step<Use> {
+ fn create_pull_request(
+ token: &StepOutput,
+ short_sha: &StepOutput,
+ context_input: &WorkflowInput,
+ ) -> Step<Use> {
let title = format!("Update CI workflows to `{short_sha}`");
+ let body = formatdoc! {r#"
+ This PR updates the CI workflow files from the main Zed repository
+ based on the commit zed-industries/zed@${{{{ github.sha }}}}
+
+ {context_input}
+ "#,
+ };
+
named::uses("peter-evans", "create-pull-request", "v7")
.add_with(("path", "extension"))
.add_with(("title", title.clone()))
- .add_with((
- "body",
- indoc::indoc! {r#"
- This PR updates the CI workflow files from the main Zed repository
- based on the commit zed-industries/zed@${{ github.sha }}
- "#},
- ))
+ .add_with(("body", body))
.add_with(("commit-message", title))
.add_with(("branch", "update-workflows"))
.add_with((
@@ -204,12 +289,12 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
}
fn enable_auto_merge(token: &StepOutput) -> Step<gh_workflow::Run> {
- named::bash(indoc::indoc! {r#"
+ named::bash(indoc! {r#"
if [ -n "$PR_NUMBER" ]; then
- cd extension
gh pr merge "$PR_NUMBER" --auto --squash
fi
"#})
+ .working_directory("extension")
.add_env(("GH_TOKEN", token.to_string()))
.add_env((
"PR_NUMBER",
@@ -228,8 +313,6 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
]),
),
);
- let (get_prev_tag, prev_commit) = get_previous_tag_commit();
- let (calc_changes, removed_files) = get_removed_files(&prev_commit);
let (calculate_short_sha, short_sha) = get_short_sha();
let job = Job::default()
@@ -249,19 +332,17 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
})),
)
.add_step(authenticate)
- .add_step(checkout_zed_repo())
.add_step(checkout_extension_repo(&token))
- .add_step(get_prev_tag)
- .add_step(calc_changes)
- .add_step(sync_workflow_files(&removed_files))
+ .add_step(download_workflow_files())
+ .add_step(sync_workflow_files(removed_ci, removed_shared))
.add_step(calculate_short_sha)
- .add_step(create_pull_request(&token, &short_sha))
+ .add_step(create_pull_request(&token, &short_sha, extra_context_input))
.add_step(enable_auto_merge(&token));
named::job(job)
}
-fn create_rollout_tag(rollout_job: &NamedJob) -> NamedJob {
+fn create_rollout_tag(rollout_job: &NamedJob, filter_repos_input: &WorkflowInput) -> NamedJob {
fn checkout_zed_repo(token: &StepOutput) -> CheckoutStep {
steps::checkout_repo().with_full_history().with_token(token)
}
@@ -297,6 +378,10 @@ fn create_rollout_tag(rollout_job: &NamedJob) -> NamedJob {
let job = Job::default()
.needs([rollout_job.name.clone()])
+ .cond(Expression::new(format!(
+ "{filter_repos} == ''",
+ filter_repos = filter_repos_input.expr(),
+ )))
.runs_on(runners::LINUX_SMALL)
.timeout_minutes(1u32)
.add_step(authenticate)
@@ -5,17 +5,18 @@ use gh_workflow::{
use indoc::indoc;
use crate::tasks::workflows::{
+ GenerateWorkflowArgs, GitSha,
extensions::WithAppSecrets,
runners,
steps::{CommonJobConditions, NamedJob, named},
vars::{JobOutput, StepOutput, one_workflow_per_non_main_branch_and_token},
};
-pub(crate) fn bump_version() -> Workflow {
+pub(crate) fn bump_version(args: &GenerateWorkflowArgs) -> Workflow {
let (determine_bump_type, bump_type) = determine_bump_type();
let bump_type = bump_type.as_job_output(&determine_bump_type);
- let call_bump_version = call_bump_version(&determine_bump_type, bump_type);
+ let call_bump_version = call_bump_version(args.sha.as_ref(), &determine_bump_type, bump_type);
named::workflow()
.on(Event::default()
@@ -32,6 +33,7 @@ pub(crate) fn bump_version() -> Workflow {
}
pub(crate) fn call_bump_version(
+ target_ref: Option<&GitSha>,
depending_job: &NamedJob,
bump_type: JobOutput,
) -> NamedJob<UsesJob> {
@@ -51,7 +53,7 @@ pub(crate) fn call_bump_version(
"zed-industries",
"zed",
".github/workflows/extension_bump.yml",
- "main",
+ target_ref.map_or("main", AsRef::as_ref),
)
.add_need(depending_job.name.clone())
.with(
@@ -1,12 +1,13 @@
use gh_workflow::{Event, Job, Level, Permissions, PullRequest, Push, UsesJob, Workflow};
use crate::tasks::workflows::{
+ GenerateWorkflowArgs, GitSha,
steps::{NamedJob, named},
vars::one_workflow_per_non_main_branch_and_token,
};
-pub(crate) fn run_tests() -> Workflow {
- let call_extension_tests = call_extension_tests();
+pub(crate) fn run_tests(args: &GenerateWorkflowArgs) -> Workflow {
+ let call_extension_tests = call_extension_tests(args.sha.as_ref());
named::workflow()
.on(Event::default()
.pull_request(PullRequest::default().add_branch("**"))
@@ -15,14 +16,14 @@ pub(crate) fn run_tests() -> Workflow {
.add_job(call_extension_tests.name, call_extension_tests.job)
}
-pub(crate) fn call_extension_tests() -> NamedJob<UsesJob> {
+pub(crate) fn call_extension_tests(target_ref: Option<&GitSha>) -> NamedJob<UsesJob> {
let job = Job::default()
.permissions(Permissions::default().contents(Level::Read))
.uses(
"zed-industries",
"zed",
".github/workflows/extension_tests.yml",
- "main",
+ target_ref.map_or("main", AsRef::as_ref),
);
named::job(job)
@@ -131,22 +131,12 @@ impl From<CheckoutStep> for Step<Use> {
FetchDepth::Full => step.add_with(("fetch-depth", 0)),
FetchDepth::Custom(depth) => step.add_with(("fetch-depth", depth)),
})
- .map(|step| match value.token {
- Some(token) => step.add_with(("token", token)),
- None => step,
- })
- .map(|step| match value.path {
- Some(path) => step.add_with(("path", path)),
- None => step,
- })
- .map(|step| match value.repository {
- Some(repository) => step.add_with(("repository", repository)),
- None => step,
- })
- .map(|step| match value.ref_ {
- Some(ref_) => step.add_with(("ref", ref_)),
- None => step,
+ .when_some(value.path, |step, path| step.add_with(("path", path)))
+ .when_some(value.repository, |step, repository| {
+ step.add_with(("repository", repository))
})
+ .when_some(value.ref_, |step, ref_| step.add_with(("ref", ref_)))
+ .when_some(value.token, |step, token| step.add_with(("token", token)))
}
}