1use super::{
2 diagnostics_command::write_single_file_diagnostics,
3 file_command::{build_entry_output_section, codeblock_fence_for_path},
4 SlashCommand, SlashCommandOutput,
5};
6use anyhow::{Context, Result};
7use assistant_slash_command::ArgumentCompletion;
8use collections::{HashMap, HashSet};
9use editor::Editor;
10use futures::future::join_all;
11use gpui::{Entity, Task, WeakView};
12use language::{BufferSnapshot, LspAdapterDelegate};
13use std::{
14 fmt::Write,
15 path::PathBuf,
16 sync::{atomic::AtomicBool, Arc},
17};
18use ui::WindowContext;
19use workspace::Workspace;
20
21pub(crate) struct TabSlashCommand;
22
23const ALL_TABS_COMPLETION_ITEM: &str = "all";
24
25impl SlashCommand for TabSlashCommand {
26 fn name(&self) -> String {
27 "tab".into()
28 }
29
30 fn description(&self) -> String {
31 "insert open tabs (active tab by default)".to_owned()
32 }
33
34 fn menu_text(&self) -> String {
35 "Insert Open Tabs".to_owned()
36 }
37
38 fn requires_argument(&self) -> bool {
39 false
40 }
41
42 fn complete_argument(
43 self: Arc<Self>,
44 arguments: &[String],
45 cancel: Arc<AtomicBool>,
46 workspace: Option<WeakView<Workspace>>,
47 cx: &mut WindowContext,
48 ) -> Task<Result<Vec<ArgumentCompletion>>> {
49 let mut has_all_tabs_completion_item = false;
50 let argument_set = arguments
51 .iter()
52 .filter(|argument| {
53 if has_all_tabs_completion_item || ALL_TABS_COMPLETION_ITEM == argument.as_str() {
54 has_all_tabs_completion_item = true;
55 false
56 } else {
57 true
58 }
59 })
60 .cloned()
61 .collect::<HashSet<_>>();
62 if has_all_tabs_completion_item {
63 return Task::ready(Ok(Vec::new()));
64 }
65 let current_query = arguments.last().cloned().unwrap_or_default();
66 let tab_items_search =
67 tab_items_for_queries(workspace, &[current_query], cancel, false, cx);
68 cx.spawn(|_| async move {
69 let tab_items = tab_items_search.await?;
70 let run_command = tab_items.len() == 1;
71 let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
72 let path_string = path.as_deref()?.to_string_lossy().to_string();
73 if argument_set.contains(&path_string) {
74 return None;
75 }
76 Some(ArgumentCompletion {
77 label: path_string.clone().into(),
78 new_text: path_string,
79 replace_previous_arguments: false,
80 run_command,
81 })
82 });
83
84 Ok(Some(ArgumentCompletion {
85 label: ALL_TABS_COMPLETION_ITEM.into(),
86 new_text: ALL_TABS_COMPLETION_ITEM.to_owned(),
87 replace_previous_arguments: false,
88 run_command: true,
89 })
90 .into_iter()
91 .chain(tab_completion_items)
92 .collect::<Vec<_>>())
93 })
94 }
95
96 fn run(
97 self: Arc<Self>,
98 arguments: &[String],
99 workspace: WeakView<Workspace>,
100 _delegate: Option<Arc<dyn LspAdapterDelegate>>,
101 cx: &mut WindowContext,
102 ) -> Task<Result<SlashCommandOutput>> {
103 let tab_items_search = tab_items_for_queries(
104 Some(workspace),
105 arguments,
106 Arc::new(AtomicBool::new(false)),
107 true,
108 cx,
109 );
110
111 cx.background_executor().spawn(async move {
112 let mut sections = Vec::new();
113 let mut text = String::new();
114 let mut has_diagnostics = false;
115 for (full_path, buffer, _) in tab_items_search.await? {
116 let section_start_ix = text.len();
117 text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None));
118 for chunk in buffer.as_rope().chunks() {
119 text.push_str(chunk);
120 }
121 if !text.ends_with('\n') {
122 text.push('\n');
123 }
124 writeln!(text, "```").unwrap();
125 if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) {
126 has_diagnostics = true;
127 }
128 if !text.ends_with('\n') {
129 text.push('\n');
130 }
131
132 let section_end_ix = text.len() - 1;
133 sections.push(build_entry_output_section(
134 section_start_ix..section_end_ix,
135 full_path.as_deref(),
136 false,
137 None,
138 ));
139 }
140
141 Ok(SlashCommandOutput {
142 text,
143 sections,
144 run_commands_in_text: has_diagnostics,
145 })
146 })
147 }
148}
149
150fn tab_items_for_queries(
151 workspace: Option<WeakView<Workspace>>,
152 queries: &[String],
153 cancel: Arc<AtomicBool>,
154 strict_match: bool,
155 cx: &mut WindowContext,
156) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
157 let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
158 let queries = queries.to_owned();
159 cx.spawn(|mut cx| async move {
160 let mut open_buffers =
161 workspace
162 .context("no workspace")?
163 .update(&mut cx, |workspace, cx| {
164 if strict_match && empty_query {
165 let active_editor = workspace
166 .active_item(cx)
167 .context("no active item")?
168 .downcast::<Editor>()
169 .context("active item is not an editor")?;
170 let snapshot = active_editor
171 .read(cx)
172 .buffer()
173 .read(cx)
174 .as_singleton()
175 .context("active editor is not a singleton buffer")?
176 .read(cx)
177 .snapshot();
178 let full_path = snapshot.resolve_file_path(cx, true);
179 return anyhow::Ok(vec![(full_path, snapshot, 0)]);
180 }
181
182 let mut timestamps_by_entity_id = HashMap::default();
183 let mut open_buffers = Vec::new();
184
185 for pane in workspace.panes() {
186 let pane = pane.read(cx);
187 for entry in pane.activation_history() {
188 timestamps_by_entity_id.insert(entry.entity_id, entry.timestamp);
189 }
190 }
191
192 for editor in workspace.items_of_type::<Editor>(cx) {
193 if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
194 if let Some(timestamp) =
195 timestamps_by_entity_id.get(&editor.entity_id())
196 {
197 let snapshot = buffer.read(cx).snapshot();
198 let full_path = snapshot.resolve_file_path(cx, true);
199 open_buffers.push((full_path, snapshot, *timestamp));
200 }
201 }
202 }
203
204 Ok(open_buffers)
205 })??;
206
207 let background_executor = cx.background_executor().clone();
208 cx.background_executor()
209 .spawn(async move {
210 open_buffers.sort_by_key(|(_, _, timestamp)| *timestamp);
211 if empty_query
212 || queries
213 .iter()
214 .any(|query| query == ALL_TABS_COMPLETION_ITEM)
215 {
216 return Ok(open_buffers);
217 }
218
219 let matched_items = if strict_match {
220 let match_candidates = open_buffers
221 .iter()
222 .enumerate()
223 .filter_map(|(id, (full_path, ..))| {
224 let path_string = full_path.as_deref()?.to_string_lossy().to_string();
225 Some((id, path_string))
226 })
227 .fold(HashMap::default(), |mut candidates, (id, path_string)| {
228 candidates
229 .entry(path_string)
230 .or_insert_with(|| Vec::new())
231 .push(id);
232 candidates
233 });
234
235 queries
236 .iter()
237 .filter_map(|query| match_candidates.get(query))
238 .flatten()
239 .copied()
240 .filter_map(|id| open_buffers.get(id))
241 .cloned()
242 .collect()
243 } else {
244 let match_candidates = open_buffers
245 .iter()
246 .enumerate()
247 .filter_map(|(id, (full_path, ..))| {
248 let path_string = full_path.as_deref()?.to_string_lossy().to_string();
249 Some(fuzzy::StringMatchCandidate {
250 id,
251 char_bag: path_string.as_str().into(),
252 string: path_string,
253 })
254 })
255 .collect::<Vec<_>>();
256 let mut processed_matches = HashSet::default();
257 let file_queries = queries.iter().map(|query| {
258 fuzzy::match_strings(
259 &match_candidates,
260 query,
261 true,
262 usize::MAX,
263 &cancel,
264 background_executor.clone(),
265 )
266 });
267
268 join_all(file_queries)
269 .await
270 .into_iter()
271 .flatten()
272 .filter(|string_match| processed_matches.insert(string_match.candidate_id))
273 .filter_map(|string_match| open_buffers.get(string_match.candidate_id))
274 .cloned()
275 .collect()
276 };
277 Ok(matched_items)
278 })
279 .await
280 })
281}