project_command.rs

  1use super::{
  2    create_label_for_command, search_command::add_search_result_section, SlashCommand,
  3    SlashCommandOutput,
  4};
  5use crate::PromptBuilder;
  6use anyhow::{anyhow, Result};
  7use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection, SlashCommandResult};
  8use feature_flags::FeatureFlag;
  9use gpui::{AppContext, Task, WeakView, WindowContext};
 10use language::{Anchor, CodeLabel, LspAdapterDelegate};
 11use language_model::{LanguageModelRegistry, LanguageModelTool};
 12use schemars::JsonSchema;
 13use semantic_index::SemanticDb;
 14use serde::Deserialize;
 15
 16pub struct ProjectSlashCommandFeatureFlag;
 17
 18impl FeatureFlag for ProjectSlashCommandFeatureFlag {
 19    const NAME: &'static str = "project-slash-command";
 20}
 21
 22use std::{
 23    fmt::Write as _,
 24    ops::DerefMut,
 25    sync::{atomic::AtomicBool, Arc},
 26};
 27
 28use ui::prelude::*;
 29use workspace::Workspace;
 30
 31pub struct ProjectSlashCommand {
 32    prompt_builder: Arc<PromptBuilder>,
 33}
 34
 35impl ProjectSlashCommand {
 36    pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
 37        Self { prompt_builder }
 38    }
 39}
 40
 41impl SlashCommand for ProjectSlashCommand {
 42    fn name(&self) -> String {
 43        "project".into()
 44    }
 45
 46    fn label(&self, cx: &AppContext) -> CodeLabel {
 47        create_label_for_command("project", &[], cx)
 48    }
 49
 50    fn description(&self) -> String {
 51        "Generate a semantic search based on context".into()
 52    }
 53
 54    fn icon(&self) -> IconName {
 55        IconName::Folder
 56    }
 57
 58    fn menu_text(&self) -> String {
 59        self.description()
 60    }
 61
 62    fn requires_argument(&self) -> bool {
 63        false
 64    }
 65
 66    fn complete_argument(
 67        self: Arc<Self>,
 68        _arguments: &[String],
 69        _cancel: Arc<AtomicBool>,
 70        _workspace: Option<WeakView<Workspace>>,
 71        _cx: &mut WindowContext,
 72    ) -> Task<Result<Vec<ArgumentCompletion>>> {
 73        Task::ready(Ok(Vec::new()))
 74    }
 75
 76    fn run(
 77        self: Arc<Self>,
 78        _arguments: &[String],
 79        _context_slash_command_output_sections: &[SlashCommandOutputSection<Anchor>],
 80        context_buffer: language::BufferSnapshot,
 81        workspace: WeakView<Workspace>,
 82        _delegate: Option<Arc<dyn LspAdapterDelegate>>,
 83        cx: &mut WindowContext,
 84    ) -> Task<SlashCommandResult> {
 85        let model_registry = LanguageModelRegistry::read_global(cx);
 86        let current_model = model_registry.active_model();
 87        let prompt_builder = self.prompt_builder.clone();
 88
 89        let Some(workspace) = workspace.upgrade() else {
 90            return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
 91        };
 92        let project = workspace.read(cx).project().clone();
 93        let fs = project.read(cx).fs().clone();
 94        let Some(project_index) =
 95            cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
 96        else {
 97            return Task::ready(Err(anyhow::anyhow!("no project indexer")));
 98        };
 99
100        cx.spawn(|mut cx| async move {
101            let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
102
103            let prompt =
104                prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?;
105
106            let search_queries = current_model
107                .use_tool::<SearchQueries>(
108                    language_model::LanguageModelRequest {
109                        messages: vec![language_model::LanguageModelRequestMessage {
110                            role: language_model::Role::User,
111                            content: vec![language_model::MessageContent::Text(prompt)],
112                            cache: false,
113                        }],
114                        tools: vec![],
115                        stop: vec![],
116                        temperature: None,
117                    },
118                    cx.deref_mut(),
119                )
120                .await?
121                .search_queries;
122
123            let results = project_index
124                .read_with(&cx, |project_index, cx| {
125                    project_index.search(search_queries.clone(), 25, cx)
126                })?
127                .await?;
128
129            let results = SemanticDb::load_results(results, &fs, &cx).await?;
130
131            cx.background_executor()
132                .spawn(async move {
133                    let mut output = "Project context:\n".to_string();
134                    let mut sections = Vec::new();
135
136                    for (ix, query) in search_queries.into_iter().enumerate() {
137                        let start_ix = output.len();
138                        writeln!(&mut output, "Results for {query}:").unwrap();
139                        let mut has_results = false;
140                        for result in &results {
141                            if result.query_index == ix {
142                                add_search_result_section(result, &mut output, &mut sections);
143                                has_results = true;
144                            }
145                        }
146                        if has_results {
147                            sections.push(SlashCommandOutputSection {
148                                range: start_ix..output.len(),
149                                icon: IconName::MagnifyingGlass,
150                                label: query.into(),
151                                metadata: None,
152                            });
153                            output.push('\n');
154                        } else {
155                            output.truncate(start_ix);
156                        }
157                    }
158
159                    sections.push(SlashCommandOutputSection {
160                        range: 0..output.len(),
161                        icon: IconName::Book,
162                        label: "Project context".into(),
163                        metadata: None,
164                    });
165
166                    Ok(SlashCommandOutput {
167                        text: output,
168                        sections,
169                        run_commands_in_text: true,
170                    }
171                    .to_event_stream())
172                })
173                .await
174        })
175    }
176}
177
178#[derive(JsonSchema, Deserialize)]
179struct SearchQueries {
180    /// An array of semantic search queries.
181    ///
182    /// These queries will be used to search the user's codebase.
183    /// The function can only accept 4 queries, otherwise it will error.
184    /// As such, it's important that you limit the length of the search_queries array to 5 queries or less.
185    search_queries: Vec<String>,
186}
187
188impl LanguageModelTool for SearchQueries {
189    fn name() -> String {
190        "search_queries".to_string()
191    }
192
193    fn description() -> String {
194        "Generate semantic search queries based on context".to_string()
195    }
196}