1use super::{
2 diagnostics_command::write_single_file_diagnostics,
3 file_command::{build_entry_output_section, codeblock_fence_for_path},
4 SlashCommand, SlashCommandOutput,
5};
6use anyhow::{Context, Result};
7use assistant_slash_command::ArgumentCompletion;
8use collections::{HashMap, HashSet};
9use editor::Editor;
10use futures::future::join_all;
11use gpui::{Entity, Task, WeakView};
12use language::{BufferSnapshot, LspAdapterDelegate};
13use std::{
14 fmt::Write,
15 path::PathBuf,
16 sync::{atomic::AtomicBool, Arc},
17};
18use ui::WindowContext;
19use workspace::Workspace;
20
21pub(crate) struct TabSlashCommand;
22
23const ALL_TABS_COMPLETION_ITEM: &str = "all";
24
25impl SlashCommand for TabSlashCommand {
26 fn name(&self) -> String {
27 "tab".into()
28 }
29
30 fn description(&self) -> String {
31 "insert open tabs (active tab by default)".to_owned()
32 }
33
34 fn menu_text(&self) -> String {
35 "Insert Open Tabs".to_owned()
36 }
37
38 fn requires_argument(&self) -> bool {
39 false
40 }
41
42 fn complete_argument(
43 self: Arc<Self>,
44 arguments: &[String],
45 cancel: Arc<AtomicBool>,
46 workspace: Option<WeakView<Workspace>>,
47 cx: &mut WindowContext,
48 ) -> Task<Result<Vec<ArgumentCompletion>>> {
49 let mut has_all_tabs_completion_item = false;
50 let argument_set = arguments
51 .iter()
52 .filter(|argument| {
53 if has_all_tabs_completion_item || ALL_TABS_COMPLETION_ITEM == argument.as_str() {
54 has_all_tabs_completion_item = true;
55 false
56 } else {
57 true
58 }
59 })
60 .cloned()
61 .collect::<HashSet<_>>();
62 if has_all_tabs_completion_item {
63 return Task::ready(Ok(Vec::new()));
64 }
65 let current_query = arguments.last().cloned().unwrap_or_default();
66 let tab_items_search =
67 tab_items_for_queries(workspace, &[current_query], cancel, false, cx);
68 cx.spawn(|_| async move {
69 let tab_items = tab_items_search.await?;
70 let run_command = tab_items.len() == 1;
71 let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
72 let path_string = path.as_deref()?.to_string_lossy().to_string();
73 if argument_set.contains(&path_string) {
74 return None;
75 }
76 Some(ArgumentCompletion {
77 label: path_string.clone().into(),
78 new_text: path_string,
79 run_command,
80 })
81 });
82
83 Ok(Some(ArgumentCompletion {
84 label: ALL_TABS_COMPLETION_ITEM.into(),
85 new_text: ALL_TABS_COMPLETION_ITEM.to_owned(),
86 run_command: true,
87 })
88 .into_iter()
89 .chain(tab_completion_items)
90 .collect::<Vec<_>>())
91 })
92 }
93
94 fn run(
95 self: Arc<Self>,
96 arguments: &[String],
97 workspace: WeakView<Workspace>,
98 _delegate: Option<Arc<dyn LspAdapterDelegate>>,
99 cx: &mut WindowContext,
100 ) -> Task<Result<SlashCommandOutput>> {
101 let tab_items_search = tab_items_for_queries(
102 Some(workspace),
103 arguments,
104 Arc::new(AtomicBool::new(false)),
105 true,
106 cx,
107 );
108
109 cx.background_executor().spawn(async move {
110 let mut sections = Vec::new();
111 let mut text = String::new();
112 let mut has_diagnostics = false;
113 for (full_path, buffer, _) in tab_items_search.await? {
114 let section_start_ix = text.len();
115 text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None));
116 for chunk in buffer.as_rope().chunks() {
117 text.push_str(chunk);
118 }
119 if !text.ends_with('\n') {
120 text.push('\n');
121 }
122 writeln!(text, "```").unwrap();
123 if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) {
124 has_diagnostics = true;
125 }
126 if !text.ends_with('\n') {
127 text.push('\n');
128 }
129
130 let section_end_ix = text.len() - 1;
131 sections.push(build_entry_output_section(
132 section_start_ix..section_end_ix,
133 full_path.as_deref(),
134 false,
135 None,
136 ));
137 }
138
139 Ok(SlashCommandOutput {
140 text,
141 sections,
142 run_commands_in_text: has_diagnostics,
143 })
144 })
145 }
146}
147
148fn tab_items_for_queries(
149 workspace: Option<WeakView<Workspace>>,
150 queries: &[String],
151 cancel: Arc<AtomicBool>,
152 strict_match: bool,
153 cx: &mut WindowContext,
154) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
155 let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
156 let queries = queries.to_owned();
157 cx.spawn(|mut cx| async move {
158 let mut open_buffers =
159 workspace
160 .context("no workspace")?
161 .update(&mut cx, |workspace, cx| {
162 if strict_match && empty_query {
163 let active_editor = workspace
164 .active_item(cx)
165 .context("no active item")?
166 .downcast::<Editor>()
167 .context("active item is not an editor")?;
168 let snapshot = active_editor
169 .read(cx)
170 .buffer()
171 .read(cx)
172 .as_singleton()
173 .context("active editor is not a singleton buffer")?
174 .read(cx)
175 .snapshot();
176 let full_path = snapshot.resolve_file_path(cx, true);
177 return anyhow::Ok(vec![(full_path, snapshot, 0)]);
178 }
179
180 let mut timestamps_by_entity_id = HashMap::default();
181 let mut open_buffers = Vec::new();
182
183 for pane in workspace.panes() {
184 let pane = pane.read(cx);
185 for entry in pane.activation_history() {
186 timestamps_by_entity_id.insert(entry.entity_id, entry.timestamp);
187 }
188 }
189
190 for editor in workspace.items_of_type::<Editor>(cx) {
191 if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
192 if let Some(timestamp) =
193 timestamps_by_entity_id.get(&editor.entity_id())
194 {
195 let snapshot = buffer.read(cx).snapshot();
196 let full_path = snapshot.resolve_file_path(cx, true);
197 open_buffers.push((full_path, snapshot, *timestamp));
198 }
199 }
200 }
201
202 Ok(open_buffers)
203 })??;
204
205 let background_executor = cx.background_executor().clone();
206 cx.background_executor()
207 .spawn(async move {
208 open_buffers.sort_by_key(|(_, _, timestamp)| *timestamp);
209 if empty_query
210 || queries
211 .iter()
212 .any(|query| query == ALL_TABS_COMPLETION_ITEM)
213 {
214 return Ok(open_buffers);
215 }
216
217 let matched_items = if strict_match {
218 let match_candidates = open_buffers
219 .iter()
220 .enumerate()
221 .filter_map(|(id, (full_path, ..))| {
222 let path_string = full_path.as_deref()?.to_string_lossy().to_string();
223 Some((id, path_string))
224 })
225 .fold(HashMap::default(), |mut candidates, (id, path_string)| {
226 candidates
227 .entry(path_string)
228 .or_insert_with(|| Vec::new())
229 .push(id);
230 candidates
231 });
232
233 queries
234 .iter()
235 .filter_map(|query| match_candidates.get(query))
236 .flatten()
237 .copied()
238 .filter_map(|id| open_buffers.get(id))
239 .cloned()
240 .collect()
241 } else {
242 let match_candidates = open_buffers
243 .iter()
244 .enumerate()
245 .filter_map(|(id, (full_path, ..))| {
246 let path_string = full_path.as_deref()?.to_string_lossy().to_string();
247 Some(fuzzy::StringMatchCandidate {
248 id,
249 char_bag: path_string.as_str().into(),
250 string: path_string,
251 })
252 })
253 .collect::<Vec<_>>();
254 let mut processed_matches = HashSet::default();
255 let file_queries = queries.iter().map(|query| {
256 fuzzy::match_strings(
257 &match_candidates,
258 query,
259 true,
260 usize::MAX,
261 &cancel,
262 background_executor.clone(),
263 )
264 });
265
266 join_all(file_queries)
267 .await
268 .into_iter()
269 .flatten()
270 .filter(|string_match| processed_matches.insert(string_match.candidate_id))
271 .filter_map(|string_match| open_buffers.get(string_match.candidate_id))
272 .cloned()
273 .collect()
274 };
275 Ok(matched_items)
276 })
277 .await
278 })
279}