1use client::Client;
2use futures::channel::oneshot;
3use gpui::App;
4use http_client::HttpClientWithUrl;
5use language::language_settings::AllLanguageSettings;
6use project::Project;
7use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb};
8use settings::SettingsStore;
9use std::{
10 path::{Path, PathBuf},
11 sync::Arc,
12};
13
14fn main() {
15 env_logger::init();
16
17 use clock::FakeSystemClock;
18
19 App::new().run(|cx| {
20 let store = SettingsStore::test(cx);
21 cx.set_global(store);
22 language::init(cx);
23 Project::init_settings(cx);
24 SettingsStore::update(cx, |store, cx| {
25 store.update_user_settings::<AllLanguageSettings>(cx, |_| {});
26 });
27
28 let clock = Arc::new(FakeSystemClock::default());
29
30 let http = Arc::new(HttpClientWithUrl::new(
31 Arc::new(ureq_client::UreqClient::new(
32 None,
33 "Zed semantic index example".to_string(),
34 cx.background_executor().clone(),
35 )),
36 "http://localhost:11434",
37 None,
38 ));
39 let client = client::Client::new(clock, http.clone(), cx);
40 Client::set_global(client.clone(), cx);
41
42 let args: Vec<String> = std::env::args().collect();
43 if args.len() < 2 {
44 eprintln!("Usage: cargo run --example index -p semantic_index -- <project_path>");
45 cx.quit();
46 return;
47 }
48
49 // let embedding_provider = semantic_index::FakeEmbeddingProvider;
50
51 let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
52
53 let embedding_provider = Arc::new(OpenAiEmbeddingProvider::new(
54 http.clone(),
55 OpenAiEmbeddingModel::TextEmbedding3Small,
56 open_ai::OPEN_AI_API_URL.to_string(),
57 api_key,
58 ));
59
60 cx.spawn(|mut cx| async move {
61 let semantic_index = SemanticDb::new(
62 PathBuf::from("/tmp/semantic-index-db.mdb"),
63 embedding_provider,
64 &mut cx,
65 );
66
67 let mut semantic_index = semantic_index.await.unwrap();
68
69 let project_path = Path::new(&args[1]);
70
71 let project = Project::example([project_path], &mut cx).await;
72
73 cx.update(|cx| {
74 let language_registry = project.read(cx).languages().clone();
75 let node_runtime = project.read(cx).node_runtime().unwrap().clone();
76 languages::init(language_registry, node_runtime, cx);
77 })
78 .unwrap();
79
80 let project_index = cx
81 .update(|cx| semantic_index.project_index(project.clone(), cx))
82 .unwrap()
83 .unwrap();
84
85 let (tx, rx) = oneshot::channel();
86 let mut tx = Some(tx);
87 let subscription = cx.update(|cx| {
88 cx.subscribe(&project_index, move |_, event, _| {
89 if let Some(tx) = tx.take() {
90 _ = tx.send(*event);
91 }
92 })
93 });
94
95 let index_start = std::time::Instant::now();
96 rx.await.expect("no event emitted");
97 drop(subscription);
98 println!("Index time: {:?}", index_start.elapsed());
99
100 let results = cx
101 .update(|cx| {
102 let project_index = project_index.read(cx);
103 let query = "converting an anchor to a point";
104 project_index.search(vec![query.into()], 4, cx)
105 })
106 .unwrap()
107 .await
108 .unwrap();
109
110 for search_result in results {
111 let path = search_result.path.clone();
112
113 let content = cx
114 .update(|cx| {
115 let worktree = search_result.worktree.read(cx);
116 let entry_abs_path = worktree.abs_path().join(search_result.path.clone());
117 let fs = project.read(cx).fs().clone();
118 cx.spawn(|_| async move { fs.load(&entry_abs_path).await.unwrap() })
119 })
120 .unwrap()
121 .await;
122
123 let range = search_result.range.clone();
124 let content = content[search_result.range].to_owned();
125
126 println!(
127 "✄✄✄✄✄✄✄✄✄✄✄✄✄✄ {:?} @ {} ✄✄✄✄✄✄✄✄✄✄✄✄✄✄",
128 path, search_result.score
129 );
130 println!("{:?}:{:?}:{:?}", path, range.start, range.end);
131 println!("{}", content);
132 }
133
134 cx.background_executor()
135 .timer(std::time::Duration::from_secs(100000))
136 .await;
137
138 cx.update(|cx| cx.quit()).unwrap();
139 })
140 .detach();
141 });
142}