1use anyhow::Result;
2use assistant_tooling::{LanguageModelTool, ToolOutput};
3use collections::BTreeMap;
4use gpui::{prelude::*, Model, Task};
5use project::ProjectPath;
6use schemars::JsonSchema;
7use semantic_index::{ProjectIndex, Status};
8use serde::Deserialize;
9use serde_json::Value;
10use std::{fmt::Write as _, ops::Range};
11use ui::{div, prelude::*, CollapsibleContainer, Color, Icon, IconName, Label, WindowContext};
12
13const DEFAULT_SEARCH_LIMIT: usize = 20;
14
15pub struct ProjectIndexTool {
16 project_index: Model<ProjectIndex>,
17}
18
19// Note: Comments on a `LanguageModelTool::Input` become descriptions on the generated JSON schema as shown to the language model.
20// Any changes or deletions to the `CodebaseQuery` comments will change model behavior.
21
22#[derive(Deserialize, JsonSchema)]
23pub struct CodebaseQuery {
24 /// Semantic search query
25 query: String,
26 /// Maximum number of results to return, defaults to 20
27 limit: Option<usize>,
28}
29
30pub struct ProjectIndexView {
31 input: CodebaseQuery,
32 output: Result<ProjectIndexOutput>,
33 element_id: ElementId,
34 expanded_header: bool,
35}
36
37pub struct ProjectIndexOutput {
38 status: Status,
39 excerpts: BTreeMap<ProjectPath, Vec<Range<usize>>>,
40}
41
42impl ProjectIndexView {
43 fn new(input: CodebaseQuery, output: Result<ProjectIndexOutput>) -> Self {
44 let element_id = ElementId::Name(nanoid::nanoid!().into());
45
46 Self {
47 input,
48 output,
49 element_id,
50 expanded_header: false,
51 }
52 }
53
54 fn toggle_header(&mut self, cx: &mut ViewContext<Self>) {
55 self.expanded_header = !self.expanded_header;
56 cx.notify();
57 }
58}
59
60impl Render for ProjectIndexView {
61 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
62 let query = self.input.query.clone();
63
64 let result = &self.output;
65
66 let output = match result {
67 Err(err) => {
68 return div().child(Label::new(format!("Error: {}", err)).color(Color::Error));
69 }
70 Ok(output) => output,
71 };
72
73 let file_count = output.excerpts.len();
74
75 let header = h_flex()
76 .gap_2()
77 .child(Icon::new(IconName::File))
78 .child(format!(
79 "Read {} {}",
80 file_count,
81 if file_count == 1 { "file" } else { "files" }
82 ));
83
84 v_flex().gap_3().child(
85 CollapsibleContainer::new(self.element_id.clone(), self.expanded_header)
86 .start_slot(header)
87 .on_click(cx.listener(move |this, _, cx| {
88 this.toggle_header(cx);
89 }))
90 .child(
91 v_flex()
92 .gap_3()
93 .p_3()
94 .child(
95 h_flex()
96 .gap_2()
97 .child(Icon::new(IconName::MagnifyingGlass))
98 .child(Label::new(format!("`{}`", query)).color(Color::Muted)),
99 )
100 .child(
101 v_flex()
102 .gap_2()
103 .children(output.excerpts.keys().map(|path| {
104 h_flex().gap_2().child(Icon::new(IconName::File)).child(
105 Label::new(path.path.to_string_lossy().to_string())
106 .color(Color::Muted),
107 )
108 })),
109 ),
110 ),
111 )
112 }
113}
114
115impl ToolOutput for ProjectIndexView {
116 fn generate(
117 &self,
118 context: &mut assistant_tooling::ProjectContext,
119 _: &mut WindowContext,
120 ) -> String {
121 match &self.output {
122 Ok(output) => {
123 let mut body = "found results in the following paths:\n".to_string();
124
125 for (project_path, ranges) in &output.excerpts {
126 context.add_excerpts(project_path.clone(), ranges);
127 writeln!(&mut body, "* {}", &project_path.path.display()).unwrap();
128 }
129
130 if output.status != Status::Idle {
131 body.push_str("Still indexing. Results may be incomplete.\n");
132 }
133
134 body
135 }
136 Err(err) => format!("Error: {}", err),
137 }
138 }
139}
140
141impl ProjectIndexTool {
142 pub fn new(project_index: Model<ProjectIndex>) -> Self {
143 Self { project_index }
144 }
145}
146
147impl LanguageModelTool for ProjectIndexTool {
148 type Input = CodebaseQuery;
149 type Output = ProjectIndexOutput;
150 type View = ProjectIndexView;
151
152 fn name(&self) -> String {
153 "query_codebase".to_string()
154 }
155
156 fn description(&self) -> String {
157 "Semantic search against the user's current codebase, returning excerpts related to the query by computing a dot product against embeddings of code chunks in the code base and an embedding of the query.".to_string()
158 }
159
160 fn execute(&self, query: &Self::Input, cx: &mut WindowContext) -> Task<Result<Self::Output>> {
161 let project_index = self.project_index.read(cx);
162 let status = project_index.status();
163 let search = project_index.search(
164 query.query.clone(),
165 query.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
166 cx,
167 );
168
169 cx.spawn(|mut cx| async move {
170 let search_results = search.await?;
171
172 cx.update(|cx| {
173 let mut output = ProjectIndexOutput {
174 status,
175 excerpts: Default::default(),
176 };
177
178 for search_result in search_results {
179 let path = ProjectPath {
180 worktree_id: search_result.worktree.read(cx).id(),
181 path: search_result.path.clone(),
182 };
183
184 let excerpts_for_path = output.excerpts.entry(path).or_default();
185 let ix = match excerpts_for_path
186 .binary_search_by_key(&search_result.range.start, |r| r.start)
187 {
188 Ok(ix) | Err(ix) => ix,
189 };
190 excerpts_for_path.insert(ix, search_result.range);
191 }
192
193 output
194 })
195 })
196 }
197
198 fn output_view(
199 input: Self::Input,
200 output: Result<Self::Output>,
201 cx: &mut WindowContext,
202 ) -> gpui::View<Self::View> {
203 cx.new_view(|_cx| ProjectIndexView::new(input, output))
204 }
205
206 fn render_running(arguments: &Option<Value>, _: &mut WindowContext) -> impl IntoElement {
207 let text: String = arguments
208 .as_ref()
209 .and_then(|arguments| arguments.get("query"))
210 .and_then(|query| query.as_str())
211 .map(|query| format!("Searching for: {}", query))
212 .unwrap_or_else(|| "Preparing search...".to_string());
213
214 CollapsibleContainer::new(ElementId::Name(nanoid::nanoid!().into()), false).start_slot(text)
215 }
216}