1mod chunking;
2mod embedding;
3mod embedding_index;
4mod indexing;
5mod project_index;
6mod project_index_debug_view;
7mod summary_backlog;
8mod summary_index;
9mod worktree_index;
10
11use anyhow::{Context as _, Result};
12use collections::HashMap;
13use fs::Fs;
14use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel};
15use language::LineEnding;
16use project::{Project, Worktree};
17use std::{
18 cmp::Ordering,
19 path::{Path, PathBuf},
20 sync::Arc,
21};
22use ui::ViewContext;
23use util::ResultExt as _;
24use workspace::Workspace;
25
26pub use embedding::*;
27pub use project_index::{LoadedSearchResult, ProjectIndex, SearchResult, Status};
28pub use project_index_debug_view::ProjectIndexDebugView;
29pub use summary_index::FileSummary;
30
31pub struct SemanticDb {
32 embedding_provider: Arc<dyn EmbeddingProvider>,
33 db_connection: Option<heed::Env>,
34 project_indices: HashMap<WeakModel<Project>, Model<ProjectIndex>>,
35}
36
37impl Global for SemanticDb {}
38
39impl SemanticDb {
40 pub async fn new(
41 db_path: PathBuf,
42 embedding_provider: Arc<dyn EmbeddingProvider>,
43 cx: &mut AsyncAppContext,
44 ) -> Result<Self> {
45 let db_connection = cx
46 .background_executor()
47 .spawn(async move {
48 std::fs::create_dir_all(&db_path)?;
49 unsafe {
50 heed::EnvOpenOptions::new()
51 .map_size(1024 * 1024 * 1024)
52 .max_dbs(3000)
53 .open(db_path)
54 }
55 })
56 .await
57 .context("opening database connection")?;
58
59 cx.update(|cx| {
60 cx.observe_new_views(
61 |workspace: &mut Workspace, cx: &mut ViewContext<Workspace>| {
62 let project = workspace.project().clone();
63
64 if cx.has_global::<SemanticDb>() {
65 cx.update_global::<SemanticDb, _>(|this, cx| {
66 this.create_project_index(project, cx);
67 })
68 } else {
69 log::info!("No SemanticDb, skipping project index")
70 }
71 },
72 )
73 .detach();
74 })
75 .ok();
76
77 Ok(SemanticDb {
78 db_connection: Some(db_connection),
79 embedding_provider,
80 project_indices: HashMap::default(),
81 })
82 }
83
84 pub async fn load_results(
85 mut results: Vec<SearchResult>,
86 fs: &Arc<dyn Fs>,
87 cx: &AsyncAppContext,
88 ) -> Result<Vec<LoadedSearchResult>> {
89 let mut max_scores_by_path = HashMap::<_, (f32, usize)>::default();
90 for result in &results {
91 let (score, query_index) = max_scores_by_path
92 .entry((result.worktree.clone(), result.path.clone()))
93 .or_default();
94 if result.score > *score {
95 *score = result.score;
96 *query_index = result.query_index;
97 }
98 }
99
100 results.sort_by(|a, b| {
101 let max_score_a = max_scores_by_path[&(a.worktree.clone(), a.path.clone())].0;
102 let max_score_b = max_scores_by_path[&(b.worktree.clone(), b.path.clone())].0;
103 max_score_b
104 .partial_cmp(&max_score_a)
105 .unwrap_or(Ordering::Equal)
106 .then_with(|| a.worktree.entity_id().cmp(&b.worktree.entity_id()))
107 .then_with(|| a.path.cmp(&b.path))
108 .then_with(|| a.range.start.cmp(&b.range.start))
109 });
110
111 let mut last_loaded_file: Option<(Model<Worktree>, Arc<Path>, PathBuf, String)> = None;
112 let mut loaded_results = Vec::<LoadedSearchResult>::new();
113 for result in results {
114 let full_path;
115 let file_content;
116 if let Some(last_loaded_file) =
117 last_loaded_file
118 .as_ref()
119 .filter(|(last_worktree, last_path, _, _)| {
120 last_worktree == &result.worktree && last_path == &result.path
121 })
122 {
123 full_path = last_loaded_file.2.clone();
124 file_content = &last_loaded_file.3;
125 } else {
126 let output = result.worktree.read_with(cx, |worktree, _cx| {
127 let entry_abs_path = worktree.abs_path().join(&result.path);
128 let mut entry_full_path = PathBuf::from(worktree.root_name());
129 entry_full_path.push(&result.path);
130 let file_content = async {
131 let entry_abs_path = entry_abs_path;
132 fs.load(&entry_abs_path).await
133 };
134 (entry_full_path, file_content)
135 })?;
136 full_path = output.0;
137 let Some(content) = output.1.await.log_err() else {
138 continue;
139 };
140 last_loaded_file = Some((
141 result.worktree.clone(),
142 result.path.clone(),
143 full_path.clone(),
144 content,
145 ));
146 file_content = &last_loaded_file.as_ref().unwrap().3;
147 };
148
149 let query_index = max_scores_by_path[&(result.worktree.clone(), result.path.clone())].1;
150
151 let mut range_start = result.range.start.min(file_content.len());
152 let mut range_end = result.range.end.min(file_content.len());
153 while !file_content.is_char_boundary(range_start) {
154 range_start += 1;
155 }
156 while !file_content.is_char_boundary(range_end) {
157 range_end += 1;
158 }
159
160 let start_row = file_content[0..range_start].matches('\n').count() as u32;
161 let mut end_row = file_content[0..range_end].matches('\n').count() as u32;
162 let start_line_byte_offset = file_content[0..range_start]
163 .rfind('\n')
164 .map(|pos| pos + 1)
165 .unwrap_or_default();
166 let mut end_line_byte_offset = range_end;
167 if file_content[..end_line_byte_offset].ends_with('\n') {
168 end_row -= 1;
169 } else {
170 end_line_byte_offset = file_content[range_end..]
171 .find('\n')
172 .map(|pos| range_end + pos + 1)
173 .unwrap_or_else(|| file_content.len());
174 }
175 let mut excerpt_content =
176 file_content[start_line_byte_offset..end_line_byte_offset].to_string();
177 LineEnding::normalize(&mut excerpt_content);
178
179 if let Some(prev_result) = loaded_results.last_mut() {
180 if prev_result.full_path == full_path {
181 if *prev_result.row_range.end() + 1 == start_row {
182 prev_result.row_range = *prev_result.row_range.start()..=end_row;
183 prev_result.excerpt_content.push_str(&excerpt_content);
184 continue;
185 }
186 }
187 }
188
189 loaded_results.push(LoadedSearchResult {
190 path: result.path,
191 full_path,
192 excerpt_content,
193 row_range: start_row..=end_row,
194 query_index,
195 });
196 }
197
198 for result in &mut loaded_results {
199 while result.excerpt_content.ends_with("\n\n") {
200 result.excerpt_content.pop();
201 result.row_range =
202 *result.row_range.start()..=result.row_range.end().saturating_sub(1)
203 }
204 }
205
206 Ok(loaded_results)
207 }
208
209 pub fn project_index(
210 &mut self,
211 project: Model<Project>,
212 _cx: &mut AppContext,
213 ) -> Option<Model<ProjectIndex>> {
214 self.project_indices.get(&project.downgrade()).cloned()
215 }
216
217 pub fn remaining_summaries(
218 &self,
219 project: &WeakModel<Project>,
220 cx: &mut AppContext,
221 ) -> Option<usize> {
222 self.project_indices.get(project).map(|project_index| {
223 project_index.update(cx, |project_index, cx| {
224 project_index.remaining_summaries(cx)
225 })
226 })
227 }
228
229 pub fn create_project_index(
230 &mut self,
231 project: Model<Project>,
232 cx: &mut AppContext,
233 ) -> Model<ProjectIndex> {
234 let project_index = cx.new_model(|cx| {
235 ProjectIndex::new(
236 project.clone(),
237 self.db_connection.clone().unwrap(),
238 self.embedding_provider.clone(),
239 cx,
240 )
241 });
242
243 let project_weak = project.downgrade();
244 self.project_indices
245 .insert(project_weak.clone(), project_index.clone());
246
247 cx.observe_release(&project, move |_, cx| {
248 if cx.has_global::<SemanticDb>() {
249 cx.update_global::<SemanticDb, _>(|this, _| {
250 this.project_indices.remove(&project_weak);
251 })
252 }
253 })
254 .detach();
255
256 project_index
257 }
258}
259
260impl Drop for SemanticDb {
261 fn drop(&mut self) {
262 self.db_connection.take().unwrap().prepare_for_closing();
263 }
264}
265
266#[cfg(test)]
267mod tests {
268 use super::*;
269 use anyhow::anyhow;
270 use chunking::Chunk;
271 use embedding_index::{ChunkedFile, EmbeddingIndex};
272 use feature_flags::FeatureFlagAppExt;
273 use fs::FakeFs;
274 use futures::{future::BoxFuture, FutureExt};
275 use gpui::TestAppContext;
276 use indexing::IndexingEntrySet;
277 use language::language_settings::AllLanguageSettings;
278 use project::{Project, ProjectEntryId};
279 use serde_json::json;
280 use settings::SettingsStore;
281 use smol::{channel, stream::StreamExt};
282 use std::{future, path::Path, sync::Arc};
283
284 fn init_test(cx: &mut TestAppContext) {
285 env_logger::try_init().ok();
286
287 cx.update(|cx| {
288 let store = SettingsStore::test(cx);
289 cx.set_global(store);
290 language::init(cx);
291 cx.update_flags(false, vec![]);
292 Project::init_settings(cx);
293 SettingsStore::update(cx, |store, cx| {
294 store.update_user_settings::<AllLanguageSettings>(cx, |_| {});
295 });
296 });
297 }
298
299 pub struct TestEmbeddingProvider {
300 batch_size: usize,
301 compute_embedding: Box<dyn Fn(&str) -> Result<Embedding> + Send + Sync>,
302 }
303
304 impl TestEmbeddingProvider {
305 pub fn new(
306 batch_size: usize,
307 compute_embedding: impl 'static + Fn(&str) -> Result<Embedding> + Send + Sync,
308 ) -> Self {
309 Self {
310 batch_size,
311 compute_embedding: Box::new(compute_embedding),
312 }
313 }
314 }
315
316 impl EmbeddingProvider for TestEmbeddingProvider {
317 fn embed<'a>(
318 &'a self,
319 texts: &'a [TextToEmbed<'a>],
320 ) -> BoxFuture<'a, Result<Vec<Embedding>>> {
321 let embeddings = texts
322 .iter()
323 .map(|to_embed| (self.compute_embedding)(to_embed.text))
324 .collect();
325 future::ready(embeddings).boxed()
326 }
327
328 fn batch_size(&self) -> usize {
329 self.batch_size
330 }
331 }
332
333 #[gpui::test]
334 async fn test_search(cx: &mut TestAppContext) {
335 cx.executor().allow_parking();
336
337 init_test(cx);
338
339 cx.update(|cx| {
340 // This functionality is staff-flagged.
341 cx.update_flags(true, vec![]);
342 });
343
344 let temp_dir = tempfile::tempdir().unwrap();
345
346 let mut semantic_index = SemanticDb::new(
347 temp_dir.path().into(),
348 Arc::new(TestEmbeddingProvider::new(16, |text| {
349 let mut embedding = vec![0f32; 2];
350 // if the text contains garbage, give it a 1 in the first dimension
351 if text.contains("garbage in") {
352 embedding[0] = 0.9;
353 } else {
354 embedding[0] = -0.9;
355 }
356
357 if text.contains("garbage out") {
358 embedding[1] = 0.9;
359 } else {
360 embedding[1] = -0.9;
361 }
362
363 Ok(Embedding::new(embedding))
364 })),
365 &mut cx.to_async(),
366 )
367 .await
368 .unwrap();
369
370 let fs = FakeFs::new(cx.executor());
371 let project_path = Path::new("/fake_project");
372
373 fs.insert_tree(
374 project_path,
375 json!({
376 "fixture": {
377 "main.rs": include_str!("../fixture/main.rs"),
378 "needle.md": include_str!("../fixture/needle.md"),
379 }
380 }),
381 )
382 .await;
383
384 let project = Project::test(fs, [project_path], cx).await;
385
386 let project_index = cx.update(|cx| {
387 let language_registry = project.read(cx).languages().clone();
388 let node_runtime = project.read(cx).node_runtime().unwrap().clone();
389 languages::init(language_registry, node_runtime, cx);
390 semantic_index.create_project_index(project.clone(), cx)
391 });
392
393 cx.run_until_parked();
394 while cx
395 .update(|cx| semantic_index.remaining_summaries(&project.downgrade(), cx))
396 .unwrap()
397 > 0
398 {
399 cx.run_until_parked();
400 }
401
402 let results = cx
403 .update(|cx| {
404 let project_index = project_index.read(cx);
405 let query = "garbage in, garbage out";
406 project_index.search(vec![query.into()], 4, cx)
407 })
408 .await
409 .unwrap();
410
411 assert!(
412 results.len() > 1,
413 "should have found some results, but only found {:?}",
414 results
415 );
416
417 for result in &results {
418 println!("result: {:?}", result.path);
419 println!("score: {:?}", result.score);
420 }
421
422 // Find result that is greater than 0.5
423 let search_result = results.iter().find(|result| result.score > 0.9).unwrap();
424
425 assert_eq!(search_result.path.to_string_lossy(), "fixture/needle.md");
426
427 let content = cx
428 .update(|cx| {
429 let worktree = search_result.worktree.read(cx);
430 let entry_abs_path = worktree.abs_path().join(&search_result.path);
431 let fs = project.read(cx).fs().clone();
432 cx.background_executor()
433 .spawn(async move { fs.load(&entry_abs_path).await.unwrap() })
434 })
435 .await;
436
437 let range = search_result.range.clone();
438 let content = content[range.clone()].to_owned();
439
440 assert!(content.contains("garbage in, garbage out"));
441 }
442
443 #[gpui::test]
444 async fn test_embed_files(cx: &mut TestAppContext) {
445 cx.executor().allow_parking();
446
447 let provider = Arc::new(TestEmbeddingProvider::new(3, |text| {
448 if text.contains('g') {
449 Err(anyhow!("cannot embed text containing a 'g' character"))
450 } else {
451 Ok(Embedding::new(
452 ('a'..='z')
453 .map(|char| text.chars().filter(|c| *c == char).count() as f32)
454 .collect(),
455 ))
456 }
457 }));
458
459 let (indexing_progress_tx, _) = channel::unbounded();
460 let indexing_entries = Arc::new(IndexingEntrySet::new(indexing_progress_tx));
461
462 let (chunked_files_tx, chunked_files_rx) = channel::unbounded::<ChunkedFile>();
463 chunked_files_tx
464 .send_blocking(ChunkedFile {
465 path: Path::new("test1.md").into(),
466 mtime: None,
467 handle: indexing_entries.insert(ProjectEntryId::from_proto(0)),
468 text: "abcdefghijklmnop".to_string(),
469 chunks: [0..4, 4..8, 8..12, 12..16]
470 .into_iter()
471 .map(|range| Chunk {
472 range,
473 digest: Default::default(),
474 })
475 .collect(),
476 })
477 .unwrap();
478 chunked_files_tx
479 .send_blocking(ChunkedFile {
480 path: Path::new("test2.md").into(),
481 mtime: None,
482 handle: indexing_entries.insert(ProjectEntryId::from_proto(1)),
483 text: "qrstuvwxyz".to_string(),
484 chunks: [0..4, 4..8, 8..10]
485 .into_iter()
486 .map(|range| Chunk {
487 range,
488 digest: Default::default(),
489 })
490 .collect(),
491 })
492 .unwrap();
493 chunked_files_tx.close();
494
495 let embed_files_task =
496 cx.update(|cx| EmbeddingIndex::embed_files(provider.clone(), chunked_files_rx, cx));
497 embed_files_task.task.await.unwrap();
498
499 let mut embedded_files_rx = embed_files_task.files;
500 let mut embedded_files = Vec::new();
501 while let Some((embedded_file, _)) = embedded_files_rx.next().await {
502 embedded_files.push(embedded_file);
503 }
504
505 assert_eq!(embedded_files.len(), 1);
506 assert_eq!(embedded_files[0].path.as_ref(), Path::new("test2.md"));
507 assert_eq!(
508 embedded_files[0]
509 .chunks
510 .iter()
511 .map(|embedded_chunk| { embedded_chunk.embedding.clone() })
512 .collect::<Vec<Embedding>>(),
513 vec![
514 (provider.compute_embedding)("qrst").unwrap(),
515 (provider.compute_embedding)("uvwx").unwrap(),
516 (provider.compute_embedding)("yz").unwrap(),
517 ],
518 );
519 }
520
521 #[gpui::test]
522 async fn test_load_search_results(cx: &mut TestAppContext) {
523 init_test(cx);
524
525 let fs = FakeFs::new(cx.executor());
526 let project_path = Path::new("/fake_project");
527
528 let file1_content = "one\ntwo\nthree\nfour\nfive\n";
529 let file2_content = "aaa\nbbb\nccc\nddd\neee\n";
530
531 fs.insert_tree(
532 project_path,
533 json!({
534 "file1.txt": file1_content,
535 "file2.txt": file2_content,
536 }),
537 )
538 .await;
539
540 let fs = fs as Arc<dyn Fs>;
541 let project = Project::test(fs.clone(), [project_path], cx).await;
542 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
543
544 // chunk that is already newline-aligned
545 let search_results = vec![SearchResult {
546 worktree: worktree.clone(),
547 path: Path::new("file1.txt").into(),
548 range: 0..file1_content.find("four").unwrap(),
549 score: 0.5,
550 query_index: 0,
551 }];
552 assert_eq!(
553 SemanticDb::load_results(search_results, &fs, &cx.to_async())
554 .await
555 .unwrap(),
556 &[LoadedSearchResult {
557 path: Path::new("file1.txt").into(),
558 full_path: "fake_project/file1.txt".into(),
559 excerpt_content: "one\ntwo\nthree\n".into(),
560 row_range: 0..=2,
561 query_index: 0,
562 }]
563 );
564
565 // chunk that is *not* newline-aligned
566 let search_results = vec![SearchResult {
567 worktree: worktree.clone(),
568 path: Path::new("file1.txt").into(),
569 range: file1_content.find("two").unwrap() + 1..file1_content.find("four").unwrap() + 2,
570 score: 0.5,
571 query_index: 0,
572 }];
573 assert_eq!(
574 SemanticDb::load_results(search_results, &fs, &cx.to_async())
575 .await
576 .unwrap(),
577 &[LoadedSearchResult {
578 path: Path::new("file1.txt").into(),
579 full_path: "fake_project/file1.txt".into(),
580 excerpt_content: "two\nthree\nfour\n".into(),
581 row_range: 1..=3,
582 query_index: 0,
583 }]
584 );
585
586 // chunks that are adjacent
587
588 let search_results = vec![
589 SearchResult {
590 worktree: worktree.clone(),
591 path: Path::new("file1.txt").into(),
592 range: file1_content.find("two").unwrap()..file1_content.len(),
593 score: 0.6,
594 query_index: 0,
595 },
596 SearchResult {
597 worktree: worktree.clone(),
598 path: Path::new("file1.txt").into(),
599 range: 0..file1_content.find("two").unwrap(),
600 score: 0.5,
601 query_index: 1,
602 },
603 SearchResult {
604 worktree: worktree.clone(),
605 path: Path::new("file2.txt").into(),
606 range: 0..file2_content.len(),
607 score: 0.8,
608 query_index: 1,
609 },
610 ];
611 assert_eq!(
612 SemanticDb::load_results(search_results, &fs, &cx.to_async())
613 .await
614 .unwrap(),
615 &[
616 LoadedSearchResult {
617 path: Path::new("file2.txt").into(),
618 full_path: "fake_project/file2.txt".into(),
619 excerpt_content: file2_content.into(),
620 row_range: 0..=4,
621 query_index: 1,
622 },
623 LoadedSearchResult {
624 path: Path::new("file1.txt").into(),
625 full_path: "fake_project/file1.txt".into(),
626 excerpt_content: file1_content.into(),
627 row_range: 0..=4,
628 query_index: 0,
629 }
630 ]
631 );
632 }
633}