1use super::{
2 create_label_for_command, search_command::add_search_result_section, SlashCommand,
3 SlashCommandOutput,
4};
5use crate::PromptBuilder;
6use anyhow::{anyhow, Result};
7use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection, SlashCommandResult};
8use feature_flags::FeatureFlag;
9use gpui::{AppContext, Task, WeakView, WindowContext};
10use language::{Anchor, CodeLabel, LspAdapterDelegate};
11use language_model::{LanguageModelRegistry, LanguageModelTool};
12use schemars::JsonSchema;
13use semantic_index::SemanticDb;
14use serde::Deserialize;
15
16pub struct ProjectSlashCommandFeatureFlag;
17
18impl FeatureFlag for ProjectSlashCommandFeatureFlag {
19 const NAME: &'static str = "project-slash-command";
20}
21
22use std::{
23 fmt::Write as _,
24 ops::DerefMut,
25 sync::{atomic::AtomicBool, Arc},
26};
27use ui::{BorrowAppContext as _, IconName};
28use workspace::Workspace;
29
30pub struct ProjectSlashCommand {
31 prompt_builder: Arc<PromptBuilder>,
32}
33
34impl ProjectSlashCommand {
35 pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
36 Self { prompt_builder }
37 }
38}
39
40impl SlashCommand for ProjectSlashCommand {
41 fn name(&self) -> String {
42 "project".into()
43 }
44
45 fn label(&self, cx: &AppContext) -> CodeLabel {
46 create_label_for_command("project", &[], cx)
47 }
48
49 fn description(&self) -> String {
50 "Generate a semantic search based on context".into()
51 }
52
53 fn menu_text(&self) -> String {
54 self.description()
55 }
56
57 fn requires_argument(&self) -> bool {
58 false
59 }
60
61 fn complete_argument(
62 self: Arc<Self>,
63 _arguments: &[String],
64 _cancel: Arc<AtomicBool>,
65 _workspace: Option<WeakView<Workspace>>,
66 _cx: &mut WindowContext,
67 ) -> Task<Result<Vec<ArgumentCompletion>>> {
68 Task::ready(Ok(Vec::new()))
69 }
70
71 fn run(
72 self: Arc<Self>,
73 _arguments: &[String],
74 _context_slash_command_output_sections: &[SlashCommandOutputSection<Anchor>],
75 context_buffer: language::BufferSnapshot,
76 workspace: WeakView<Workspace>,
77 _delegate: Option<Arc<dyn LspAdapterDelegate>>,
78 cx: &mut WindowContext,
79 ) -> Task<SlashCommandResult> {
80 let model_registry = LanguageModelRegistry::read_global(cx);
81 let current_model = model_registry.active_model();
82 let prompt_builder = self.prompt_builder.clone();
83
84 let Some(workspace) = workspace.upgrade() else {
85 return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
86 };
87 let project = workspace.read(cx).project().clone();
88 let fs = project.read(cx).fs().clone();
89 let Some(project_index) =
90 cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
91 else {
92 return Task::ready(Err(anyhow::anyhow!("no project indexer")));
93 };
94
95 cx.spawn(|mut cx| async move {
96 let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
97
98 let prompt =
99 prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?;
100
101 let search_queries = current_model
102 .use_tool::<SearchQueries>(
103 language_model::LanguageModelRequest {
104 messages: vec![language_model::LanguageModelRequestMessage {
105 role: language_model::Role::User,
106 content: vec![language_model::MessageContent::Text(prompt)],
107 cache: false,
108 }],
109 tools: vec![],
110 stop: vec![],
111 temperature: None,
112 },
113 cx.deref_mut(),
114 )
115 .await?
116 .search_queries;
117
118 let results = project_index
119 .read_with(&cx, |project_index, cx| {
120 project_index.search(search_queries.clone(), 25, cx)
121 })?
122 .await?;
123
124 let results = SemanticDb::load_results(results, &fs, &cx).await?;
125
126 cx.background_executor()
127 .spawn(async move {
128 let mut output = "Project context:\n".to_string();
129 let mut sections = Vec::new();
130
131 for (ix, query) in search_queries.into_iter().enumerate() {
132 let start_ix = output.len();
133 writeln!(&mut output, "Results for {query}:").unwrap();
134 let mut has_results = false;
135 for result in &results {
136 if result.query_index == ix {
137 add_search_result_section(result, &mut output, &mut sections);
138 has_results = true;
139 }
140 }
141 if has_results {
142 sections.push(SlashCommandOutputSection {
143 range: start_ix..output.len(),
144 icon: IconName::MagnifyingGlass,
145 label: query.into(),
146 metadata: None,
147 });
148 output.push('\n');
149 } else {
150 output.truncate(start_ix);
151 }
152 }
153
154 sections.push(SlashCommandOutputSection {
155 range: 0..output.len(),
156 icon: IconName::Book,
157 label: "Project context".into(),
158 metadata: None,
159 });
160
161 Ok(SlashCommandOutput {
162 text: output,
163 sections,
164 run_commands_in_text: true,
165 }
166 .to_event_stream())
167 })
168 .await
169 })
170 }
171}
172
173#[derive(JsonSchema, Deserialize)]
174struct SearchQueries {
175 /// An array of semantic search queries.
176 ///
177 /// These queries will be used to search the user's codebase.
178 /// The function can only accept 4 queries, otherwise it will error.
179 /// As such, it's important that you limit the length of the search_queries array to 5 queries or less.
180 search_queries: Vec<String>,
181}
182
183impl LanguageModelTool for SearchQueries {
184 fn name() -> String {
185 "search_queries".to_string()
186 }
187
188 fn description() -> String {
189 "Generate semantic search queries based on context".to_string()
190 }
191}