1use client::Client;
2use futures::channel::oneshot;
3use gpui::App;
4use http_client::HttpClientWithUrl;
5use language::language_settings::AllLanguageSettings;
6use project::Project;
7use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticIndex};
8use settings::SettingsStore;
9use std::{
10 path::{Path, PathBuf},
11 sync::Arc,
12};
13
14fn main() {
15 env_logger::init();
16
17 use clock::FakeSystemClock;
18
19 App::new().run(|cx| {
20 let store = SettingsStore::test(cx);
21 cx.set_global(store);
22 language::init(cx);
23 Project::init_settings(cx);
24 SettingsStore::update(cx, |store, cx| {
25 store.update_user_settings::<AllLanguageSettings>(cx, |_| {});
26 });
27
28 let clock = Arc::new(FakeSystemClock::default());
29 let http = Arc::new(HttpClientWithUrl::new("http://localhost:11434", None, None));
30
31 let client = client::Client::new(clock, http.clone(), cx);
32 Client::set_global(client.clone(), cx);
33
34 let args: Vec<String> = std::env::args().collect();
35 if args.len() < 2 {
36 eprintln!("Usage: cargo run --example index -p semantic_index -- <project_path>");
37 cx.quit();
38 return;
39 }
40
41 // let embedding_provider = semantic_index::FakeEmbeddingProvider;
42
43 let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
44
45 let embedding_provider = Arc::new(OpenAiEmbeddingProvider::new(
46 http.clone(),
47 OpenAiEmbeddingModel::TextEmbedding3Small,
48 open_ai::OPEN_AI_API_URL.to_string(),
49 api_key,
50 ));
51
52 cx.spawn(|mut cx| async move {
53 let semantic_index = SemanticIndex::new(
54 PathBuf::from("/tmp/semantic-index-db.mdb"),
55 embedding_provider,
56 &mut cx,
57 );
58
59 let mut semantic_index = semantic_index.await.unwrap();
60
61 let project_path = Path::new(&args[1]);
62
63 let project = Project::example([project_path], &mut cx).await;
64
65 cx.update(|cx| {
66 let language_registry = project.read(cx).languages().clone();
67 let node_runtime = project.read(cx).node_runtime().unwrap().clone();
68 languages::init(language_registry, node_runtime, cx);
69 })
70 .unwrap();
71
72 let project_index = cx
73 .update(|cx| semantic_index.project_index(project.clone(), cx))
74 .unwrap();
75
76 let (tx, rx) = oneshot::channel();
77 let mut tx = Some(tx);
78 let subscription = cx.update(|cx| {
79 cx.subscribe(&project_index, move |_, event, _| {
80 if let Some(tx) = tx.take() {
81 _ = tx.send(*event);
82 }
83 })
84 });
85
86 let index_start = std::time::Instant::now();
87 rx.await.expect("no event emitted");
88 drop(subscription);
89 println!("Index time: {:?}", index_start.elapsed());
90
91 let results = cx
92 .update(|cx| {
93 let project_index = project_index.read(cx);
94 let query = "converting an anchor to a point";
95 project_index.search(query.into(), 4, cx)
96 })
97 .unwrap()
98 .await
99 .unwrap();
100
101 for search_result in results {
102 let path = search_result.path.clone();
103
104 let content = cx
105 .update(|cx| {
106 let worktree = search_result.worktree.read(cx);
107 let entry_abs_path = worktree.abs_path().join(search_result.path.clone());
108 let fs = project.read(cx).fs().clone();
109 cx.spawn(|_| async move { fs.load(&entry_abs_path).await.unwrap() })
110 })
111 .unwrap()
112 .await;
113
114 let range = search_result.range.clone();
115 let content = content[search_result.range].to_owned();
116
117 println!(
118 "✄✄✄✄✄✄✄✄✄✄✄✄✄✄ {:?} @ {} ✄✄✄✄✄✄✄✄✄✄✄✄✄✄",
119 path, search_result.score
120 );
121 println!("{:?}:{:?}:{:?}", path, range.start, range.end);
122 println!("{}", content);
123 }
124
125 cx.background_executor()
126 .timer(std::time::Duration::from_secs(100000))
127 .await;
128
129 cx.update(|cx| cx.quit()).unwrap();
130 })
131 .detach();
132 });
133}