1mod headless;
2
3use anyhow::{Result, anyhow};
4use clap::{Args, Parser, Subcommand};
5use cloud_llm_client::predict_edits_v3;
6use edit_prediction_context::{
7 Declaration, EditPredictionContext, EditPredictionExcerptOptions, Identifier, ReferenceRegion,
8 SyntaxIndex, references_in_range,
9};
10use futures::channel::mpsc;
11use futures::{FutureExt as _, StreamExt as _};
12use gpui::{AppContext, Application, AsyncApp};
13use gpui::{Entity, Task};
14use language::Bias;
15use language::Point;
16use language::{Buffer, OffsetRangeExt};
17use language_model::LlmApiToken;
18use ordered_float::OrderedFloat;
19use project::{Project, ProjectPath, Worktree};
20use release_channel::AppVersion;
21use reqwest_client::ReqwestClient;
22use serde_json::json;
23use std::cmp::Reverse;
24use std::collections::HashMap;
25use std::io::Write as _;
26use std::ops::Range;
27use std::path::{Path, PathBuf};
28use std::process::exit;
29use std::str::FromStr;
30use std::sync::Arc;
31use std::time::Duration;
32use util::paths::PathStyle;
33use util::rel_path::RelPath;
34use util::{RangeExt, ResultExt as _};
35use zeta::{PerformPredictEditsParams, Zeta};
36
37use crate::headless::ZetaCliAppState;
38
39#[derive(Parser, Debug)]
40#[command(name = "zeta")]
41struct ZetaCliArgs {
42 #[command(subcommand)]
43 command: Commands,
44}
45
46#[derive(Subcommand, Debug)]
47enum Commands {
48 Context(ContextArgs),
49 Zeta2Context {
50 #[clap(flatten)]
51 zeta2_args: Zeta2Args,
52 #[clap(flatten)]
53 context_args: ContextArgs,
54 },
55 Predict {
56 #[arg(long)]
57 predict_edits_body: Option<FileOrStdin>,
58 #[clap(flatten)]
59 context_args: Option<ContextArgs>,
60 },
61 RetrievalStats {
62 #[arg(long)]
63 worktree: PathBuf,
64 #[arg(long, default_value_t = 42)]
65 file_indexing_parallelism: usize,
66 },
67}
68
69#[derive(Debug, Args)]
70#[group(requires = "worktree")]
71struct ContextArgs {
72 #[arg(long)]
73 worktree: PathBuf,
74 #[arg(long)]
75 cursor: CursorPosition,
76 #[arg(long)]
77 use_language_server: bool,
78 #[arg(long)]
79 events: Option<FileOrStdin>,
80}
81
82#[derive(Debug, Args)]
83struct Zeta2Args {
84 #[arg(long, default_value_t = 8192)]
85 max_prompt_bytes: usize,
86 #[arg(long, default_value_t = 2048)]
87 max_excerpt_bytes: usize,
88 #[arg(long, default_value_t = 1024)]
89 min_excerpt_bytes: usize,
90 #[arg(long, default_value_t = 0.66)]
91 target_before_cursor_over_total_bytes: f32,
92 #[arg(long, default_value_t = 1024)]
93 max_diagnostic_bytes: usize,
94 #[arg(long, value_enum, default_value_t = PromptFormat::default())]
95 prompt_format: PromptFormat,
96 #[arg(long, value_enum, default_value_t = Default::default())]
97 output_format: OutputFormat,
98 #[arg(long, default_value_t = 42)]
99 file_indexing_parallelism: usize,
100}
101
102#[derive(clap::ValueEnum, Default, Debug, Clone)]
103enum PromptFormat {
104 #[default]
105 MarkedExcerpt,
106 LabeledSections,
107 OnlySnippets,
108}
109
110impl Into<predict_edits_v3::PromptFormat> for PromptFormat {
111 fn into(self) -> predict_edits_v3::PromptFormat {
112 match self {
113 Self::MarkedExcerpt => predict_edits_v3::PromptFormat::MarkedExcerpt,
114 Self::LabeledSections => predict_edits_v3::PromptFormat::LabeledSections,
115 Self::OnlySnippets => predict_edits_v3::PromptFormat::OnlySnippets,
116 }
117 }
118}
119
120#[derive(clap::ValueEnum, Default, Debug, Clone)]
121enum OutputFormat {
122 #[default]
123 Prompt,
124 Request,
125 Both,
126}
127
128#[derive(Debug, Clone)]
129enum FileOrStdin {
130 File(PathBuf),
131 Stdin,
132}
133
134impl FileOrStdin {
135 async fn read_to_string(&self) -> Result<String, std::io::Error> {
136 match self {
137 FileOrStdin::File(path) => smol::fs::read_to_string(path).await,
138 FileOrStdin::Stdin => smol::unblock(|| std::io::read_to_string(std::io::stdin())).await,
139 }
140 }
141}
142
143impl FromStr for FileOrStdin {
144 type Err = <PathBuf as FromStr>::Err;
145
146 fn from_str(s: &str) -> Result<Self, Self::Err> {
147 match s {
148 "-" => Ok(Self::Stdin),
149 _ => Ok(Self::File(PathBuf::from_str(s)?)),
150 }
151 }
152}
153
154#[derive(Debug, Clone)]
155struct CursorPosition {
156 path: Arc<RelPath>,
157 point: Point,
158}
159
160impl FromStr for CursorPosition {
161 type Err = anyhow::Error;
162
163 fn from_str(s: &str) -> Result<Self> {
164 let parts: Vec<&str> = s.split(':').collect();
165 if parts.len() != 3 {
166 return Err(anyhow!(
167 "Invalid cursor format. Expected 'file.rs:line:column', got '{}'",
168 s
169 ));
170 }
171
172 let path = RelPath::new(Path::new(&parts[0]), PathStyle::local())?.into_arc();
173 let line: u32 = parts[1]
174 .parse()
175 .map_err(|_| anyhow!("Invalid line number: '{}'", parts[1]))?;
176 let column: u32 = parts[2]
177 .parse()
178 .map_err(|_| anyhow!("Invalid column number: '{}'", parts[2]))?;
179
180 // Convert from 1-based to 0-based indexing
181 let point = Point::new(line.saturating_sub(1), column.saturating_sub(1));
182
183 Ok(CursorPosition { path, point })
184 }
185}
186
187enum GetContextOutput {
188 Zeta1(zeta::GatherContextOutput),
189 Zeta2(String),
190}
191
192async fn get_context(
193 zeta2_args: Option<Zeta2Args>,
194 args: ContextArgs,
195 app_state: &Arc<ZetaCliAppState>,
196 cx: &mut AsyncApp,
197) -> Result<GetContextOutput> {
198 let ContextArgs {
199 worktree: worktree_path,
200 cursor,
201 use_language_server,
202 events,
203 } = args;
204
205 let worktree_path = worktree_path.canonicalize()?;
206
207 let project = cx.update(|cx| {
208 Project::local(
209 app_state.client.clone(),
210 app_state.node_runtime.clone(),
211 app_state.user_store.clone(),
212 app_state.languages.clone(),
213 app_state.fs.clone(),
214 None,
215 cx,
216 )
217 })?;
218
219 let worktree = project
220 .update(cx, |project, cx| {
221 project.create_worktree(&worktree_path, true, cx)
222 })?
223 .await?;
224
225 let (_lsp_open_handle, buffer) = if use_language_server {
226 let (lsp_open_handle, buffer) =
227 open_buffer_with_language_server(&project, &worktree, &cursor.path, cx).await?;
228 (Some(lsp_open_handle), buffer)
229 } else {
230 let buffer = open_buffer(&project, &worktree, &cursor.path, cx).await?;
231 (None, buffer)
232 };
233
234 let full_path_str = worktree
235 .read_with(cx, |worktree, _| worktree.root_name().join(&cursor.path))?
236 .display(PathStyle::local())
237 .to_string();
238
239 let snapshot = cx.update(|cx| buffer.read(cx).snapshot())?;
240 let clipped_cursor = snapshot.clip_point(cursor.point, Bias::Left);
241 if clipped_cursor != cursor.point {
242 let max_row = snapshot.max_point().row;
243 if cursor.point.row < max_row {
244 return Err(anyhow!(
245 "Cursor position {:?} is out of bounds (line length is {})",
246 cursor.point,
247 snapshot.line_len(cursor.point.row)
248 ));
249 } else {
250 return Err(anyhow!(
251 "Cursor position {:?} is out of bounds (max row is {})",
252 cursor.point,
253 max_row
254 ));
255 }
256 }
257
258 let events = match events {
259 Some(events) => events.read_to_string().await?,
260 None => String::new(),
261 };
262
263 if let Some(zeta2_args) = zeta2_args {
264 // wait for worktree scan before starting zeta2 so that wait_for_initial_indexing waits for
265 // the whole worktree.
266 worktree
267 .read_with(cx, |worktree, _cx| {
268 worktree.as_local().unwrap().scan_complete()
269 })?
270 .await;
271 let output = cx
272 .update(|cx| {
273 let zeta = cx.new(|cx| {
274 zeta2::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx)
275 });
276 let indexing_done_task = zeta.update(cx, |zeta, cx| {
277 zeta.set_options(zeta2::ZetaOptions {
278 excerpt: EditPredictionExcerptOptions {
279 max_bytes: zeta2_args.max_excerpt_bytes,
280 min_bytes: zeta2_args.min_excerpt_bytes,
281 target_before_cursor_over_total_bytes: zeta2_args
282 .target_before_cursor_over_total_bytes,
283 },
284 max_diagnostic_bytes: zeta2_args.max_diagnostic_bytes,
285 max_prompt_bytes: zeta2_args.max_prompt_bytes,
286 prompt_format: zeta2_args.prompt_format.into(),
287 file_indexing_parallelism: zeta2_args.file_indexing_parallelism,
288 });
289 zeta.register_buffer(&buffer, &project, cx);
290 zeta.wait_for_initial_indexing(&project, cx)
291 });
292 cx.spawn(async move |cx| {
293 indexing_done_task.await?;
294 let request = zeta
295 .update(cx, |zeta, cx| {
296 let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor);
297 zeta.cloud_request_for_zeta_cli(&project, &buffer, cursor, cx)
298 })?
299 .await?;
300
301 let planned_prompt = cloud_zeta2_prompt::PlannedPrompt::populate(&request)?;
302 let prompt_string = planned_prompt.to_prompt_string()?.0;
303 match zeta2_args.output_format {
304 OutputFormat::Prompt => anyhow::Ok(prompt_string),
305 OutputFormat::Request => {
306 anyhow::Ok(serde_json::to_string_pretty(&request)?)
307 }
308 OutputFormat::Both => anyhow::Ok(serde_json::to_string_pretty(&json!({
309 "request": request,
310 "prompt": prompt_string,
311 }))?),
312 }
313 })
314 })?
315 .await?;
316 Ok(GetContextOutput::Zeta2(output))
317 } else {
318 let prompt_for_events = move || (events, 0);
319 Ok(GetContextOutput::Zeta1(
320 cx.update(|cx| {
321 zeta::gather_context(
322 full_path_str,
323 &snapshot,
324 clipped_cursor,
325 prompt_for_events,
326 cx,
327 )
328 })?
329 .await?,
330 ))
331 }
332}
333
334pub async fn retrieval_stats(
335 worktree: PathBuf,
336 file_indexing_parallelism: usize,
337 app_state: Arc<ZetaCliAppState>,
338 cx: &mut AsyncApp,
339) -> Result<String> {
340 let worktree_path = worktree.canonicalize()?;
341
342 let project = cx.update(|cx| {
343 Project::local(
344 app_state.client.clone(),
345 app_state.node_runtime.clone(),
346 app_state.user_store.clone(),
347 app_state.languages.clone(),
348 app_state.fs.clone(),
349 None,
350 cx,
351 )
352 })?;
353
354 let worktree = project
355 .update(cx, |project, cx| {
356 project.create_worktree(&worktree_path, true, cx)
357 })?
358 .await?;
359 let worktree_id = worktree.read_with(cx, |worktree, _cx| worktree.id())?;
360
361 // wait for worktree scan so that wait_for_initial_file_indexing waits for the whole worktree.
362 worktree
363 .read_with(cx, |worktree, _cx| {
364 worktree.as_local().unwrap().scan_complete()
365 })?
366 .await;
367
368 let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx))?;
369 index
370 .read_with(cx, |index, cx| index.wait_for_initial_file_indexing(cx))?
371 .await?;
372 let files = index
373 .read_with(cx, |index, cx| index.indexed_file_paths(cx))?
374 .await;
375
376 let mut lsp_open_handles = Vec::new();
377 let mut output = std::fs::File::create("retrieval-stats.txt")?;
378 let mut results = Vec::new();
379 for (file_index, project_path) in files.iter().enumerate() {
380 println!(
381 "Processing file {} of {}: {}",
382 file_index + 1,
383 files.len(),
384 project_path.path.display(PathStyle::Posix)
385 );
386 let Some((lsp_open_handle, buffer)) =
387 open_buffer_with_language_server(&project, &worktree, &project_path.path, cx)
388 .await
389 .log_err()
390 else {
391 continue;
392 };
393 lsp_open_handles.push(lsp_open_handle);
394
395 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
396 let full_range = 0..snapshot.len();
397 let references = references_in_range(
398 full_range,
399 &snapshot.text(),
400 ReferenceRegion::Nearby,
401 &snapshot,
402 );
403
404 let index = index.read_with(cx, |index, _cx| index.state().clone())?;
405 let index = index.lock().await;
406 for reference in references {
407 let query_point = snapshot.offset_to_point(reference.range.start);
408 let mut single_reference_map = HashMap::default();
409 single_reference_map.insert(reference.identifier.clone(), vec![reference.clone()]);
410 let edit_prediction_context = EditPredictionContext::gather_context_with_references_fn(
411 query_point,
412 &snapshot,
413 &zeta2::DEFAULT_EXCERPT_OPTIONS,
414 Some(&index),
415 |_, _, _| single_reference_map,
416 );
417
418 let Some(edit_prediction_context) = edit_prediction_context else {
419 let result = RetrievalStatsResult {
420 identifier: reference.identifier,
421 point: query_point,
422 outcome: RetrievalStatsOutcome::NoExcerpt,
423 };
424 write!(output, "{:?}\n\n", result)?;
425 results.push(result);
426 continue;
427 };
428
429 let mut retrieved_definitions = Vec::new();
430 for scored_declaration in edit_prediction_context.declarations {
431 match &scored_declaration.declaration {
432 Declaration::File {
433 project_entry_id,
434 declaration,
435 } => {
436 let Some(path) = worktree.read_with(cx, |worktree, _cx| {
437 worktree
438 .entry_for_id(*project_entry_id)
439 .map(|entry| entry.path.clone())
440 })?
441 else {
442 log::error!("bug: file project entry not found");
443 continue;
444 };
445 let project_path = ProjectPath {
446 worktree_id,
447 path: path.clone(),
448 };
449 let buffer = project
450 .update(cx, |project, cx| project.open_buffer(project_path, cx))?
451 .await?;
452 let rope = buffer.read_with(cx, |buffer, _cx| buffer.as_rope().clone())?;
453 retrieved_definitions.push((
454 path,
455 rope.offset_to_point(declaration.item_range.start)
456 ..rope.offset_to_point(declaration.item_range.end),
457 scored_declaration.scores.declaration,
458 scored_declaration.scores.retrieval,
459 ));
460 }
461 Declaration::Buffer {
462 project_entry_id,
463 rope,
464 declaration,
465 ..
466 } => {
467 let Some(path) = worktree.read_with(cx, |worktree, _cx| {
468 worktree
469 .entry_for_id(*project_entry_id)
470 .map(|entry| entry.path.clone())
471 })?
472 else {
473 log::error!("bug: buffer project entry not found");
474 continue;
475 };
476 retrieved_definitions.push((
477 path,
478 rope.offset_to_point(declaration.item_range.start)
479 ..rope.offset_to_point(declaration.item_range.end),
480 scored_declaration.scores.declaration,
481 scored_declaration.scores.retrieval,
482 ));
483 }
484 }
485 }
486 retrieved_definitions
487 .sort_by_key(|(_, _, _, retrieval_score)| Reverse(OrderedFloat(*retrieval_score)));
488
489 // TODO: Consider still checking language server in this case, or having a mode for
490 // this. For now assuming that the purpose of this is to refine the ranking rather than
491 // refining whether the definition is present at all.
492 if retrieved_definitions.is_empty() {
493 continue;
494 }
495
496 // TODO: Rename declaration to definition in edit_prediction_context?
497 let lsp_result = project
498 .update(cx, |project, cx| {
499 project.definitions(&buffer, reference.range.start, cx)
500 })?
501 .await;
502 match lsp_result {
503 Ok(lsp_definitions) => {
504 let lsp_definitions = lsp_definitions
505 .unwrap_or_default()
506 .into_iter()
507 .filter_map(|definition| {
508 definition
509 .target
510 .buffer
511 .read_with(cx, |buffer, _cx| {
512 Some((
513 buffer.file()?.path().clone(),
514 definition.target.range.to_point(&buffer),
515 ))
516 })
517 .ok()?
518 })
519 .collect::<Vec<_>>();
520
521 let result = RetrievalStatsResult {
522 identifier: reference.identifier,
523 point: query_point,
524 outcome: RetrievalStatsOutcome::Success {
525 matches: lsp_definitions
526 .iter()
527 .map(|(path, range)| {
528 retrieved_definitions.iter().position(
529 |(retrieved_path, retrieved_range, _, _)| {
530 path == retrieved_path
531 && retrieved_range.contains_inclusive(&range)
532 },
533 )
534 })
535 .collect(),
536 lsp_definitions,
537 retrieved_definitions,
538 },
539 };
540 write!(output, "{:?}\n\n", result)?;
541 results.push(result);
542 }
543 Err(err) => {
544 let result = RetrievalStatsResult {
545 identifier: reference.identifier,
546 point: query_point,
547 outcome: RetrievalStatsOutcome::LanguageServerError {
548 message: err.to_string(),
549 },
550 };
551 write!(output, "{:?}\n\n", result)?;
552 results.push(result);
553 }
554 }
555 }
556 }
557
558 let mut no_excerpt_count = 0;
559 let mut error_count = 0;
560 let mut definitions_count = 0;
561 let mut top_match_count = 0;
562 let mut non_top_match_count = 0;
563 let mut ranking_involved_count = 0;
564 let mut ranking_involved_top_match_count = 0;
565 let mut ranking_involved_non_top_match_count = 0;
566 for result in &results {
567 match &result.outcome {
568 RetrievalStatsOutcome::NoExcerpt => no_excerpt_count += 1,
569 RetrievalStatsOutcome::LanguageServerError { .. } => error_count += 1,
570 RetrievalStatsOutcome::Success {
571 matches,
572 retrieved_definitions,
573 ..
574 } => {
575 definitions_count += 1;
576 let top_matches = matches.contains(&Some(0));
577 if top_matches {
578 top_match_count += 1;
579 }
580 let non_top_matches = !top_matches && matches.iter().any(|index| *index != Some(0));
581 if non_top_matches {
582 non_top_match_count += 1;
583 }
584 if retrieved_definitions.len() > 1 {
585 ranking_involved_count += 1;
586 if top_matches {
587 ranking_involved_top_match_count += 1;
588 }
589 if non_top_matches {
590 ranking_involved_non_top_match_count += 1;
591 }
592 }
593 }
594 }
595 }
596
597 println!("\nStats:\n");
598 println!("No Excerpt: {}", no_excerpt_count);
599 println!("Language Server Error: {}", error_count);
600 println!("Definitions: {}", definitions_count);
601 println!("Top Match: {}", top_match_count);
602 println!("Non-Top Match: {}", non_top_match_count);
603 println!("Ranking Involved: {}", ranking_involved_count);
604 println!(
605 "Ranking Involved Top Match: {}",
606 ranking_involved_top_match_count
607 );
608 println!(
609 "Ranking Involved Non-Top Match: {}",
610 ranking_involved_non_top_match_count
611 );
612
613 Ok("".to_string())
614}
615
616#[derive(Debug)]
617struct RetrievalStatsResult {
618 #[allow(dead_code)]
619 identifier: Identifier,
620 #[allow(dead_code)]
621 point: Point,
622 outcome: RetrievalStatsOutcome,
623}
624
625#[derive(Debug)]
626enum RetrievalStatsOutcome {
627 NoExcerpt,
628 LanguageServerError {
629 #[allow(dead_code)]
630 message: String,
631 },
632 Success {
633 matches: Vec<Option<usize>>,
634 #[allow(dead_code)]
635 lsp_definitions: Vec<(Arc<RelPath>, Range<Point>)>,
636 retrieved_definitions: Vec<(Arc<RelPath>, Range<Point>, f32, f32)>,
637 },
638}
639
640pub async fn open_buffer(
641 project: &Entity<Project>,
642 worktree: &Entity<Worktree>,
643 path: &RelPath,
644 cx: &mut AsyncApp,
645) -> Result<Entity<Buffer>> {
646 let project_path = worktree.read_with(cx, |worktree, _cx| ProjectPath {
647 worktree_id: worktree.id(),
648 path: path.into(),
649 })?;
650
651 project
652 .update(cx, |project, cx| project.open_buffer(project_path, cx))?
653 .await
654}
655
656pub async fn open_buffer_with_language_server(
657 project: &Entity<Project>,
658 worktree: &Entity<Worktree>,
659 path: &RelPath,
660 cx: &mut AsyncApp,
661) -> Result<(Entity<Entity<Buffer>>, Entity<Buffer>)> {
662 let buffer = open_buffer(project, worktree, path, cx).await?;
663
664 let (lsp_open_handle, path_style) = project.update(cx, |project, cx| {
665 (
666 project.register_buffer_with_language_servers(&buffer, cx),
667 project.path_style(cx),
668 )
669 })?;
670
671 let log_prefix = path.display(path_style);
672 wait_for_lang_server(&project, &buffer, log_prefix.into_owned(), cx).await?;
673
674 Ok((lsp_open_handle, buffer))
675}
676
677// TODO: Dedupe with similar function in crates/eval/src/instance.rs
678pub fn wait_for_lang_server(
679 project: &Entity<Project>,
680 buffer: &Entity<Buffer>,
681 log_prefix: String,
682 cx: &mut AsyncApp,
683) -> Task<Result<()>> {
684 println!("{}⏵ Waiting for language server", log_prefix);
685
686 let (mut tx, mut rx) = mpsc::channel(1);
687
688 let lsp_store = project
689 .read_with(cx, |project, _| project.lsp_store())
690 .unwrap();
691
692 let has_lang_server = buffer
693 .update(cx, |buffer, cx| {
694 lsp_store.update(cx, |lsp_store, cx| {
695 lsp_store
696 .language_servers_for_local_buffer(buffer, cx)
697 .next()
698 .is_some()
699 })
700 })
701 .unwrap_or(false);
702
703 if has_lang_server {
704 project
705 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
706 .unwrap()
707 .detach();
708 }
709 let (mut added_tx, mut added_rx) = mpsc::channel(1);
710
711 let subscriptions = [
712 cx.subscribe(&lsp_store, {
713 let log_prefix = log_prefix.clone();
714 move |_, event, _| {
715 if let project::LspStoreEvent::LanguageServerUpdate {
716 message:
717 client::proto::update_language_server::Variant::WorkProgress(
718 client::proto::LspWorkProgress {
719 message: Some(message),
720 ..
721 },
722 ),
723 ..
724 } = event
725 {
726 println!("{}⟲ {message}", log_prefix)
727 }
728 }
729 }),
730 cx.subscribe(project, {
731 let buffer = buffer.clone();
732 move |project, event, cx| match event {
733 project::Event::LanguageServerAdded(_, _, _) => {
734 let buffer = buffer.clone();
735 project
736 .update(cx, |project, cx| project.save_buffer(buffer, cx))
737 .detach();
738 added_tx.try_send(()).ok();
739 }
740 project::Event::DiskBasedDiagnosticsFinished { .. } => {
741 tx.try_send(()).ok();
742 }
743 _ => {}
744 }
745 }),
746 ];
747
748 cx.spawn(async move |cx| {
749 if !has_lang_server {
750 // some buffers never have a language server, so this aborts quickly in that case.
751 let timeout = cx.background_executor().timer(Duration::from_secs(1));
752 futures::select! {
753 _ = added_rx.next() => {},
754 _ = timeout.fuse() => {
755 anyhow::bail!("Waiting for language server add timed out after 1 second");
756 }
757 };
758 }
759 let timeout = cx.background_executor().timer(Duration::from_secs(60 * 5));
760 let result = futures::select! {
761 _ = rx.next() => {
762 println!("{}⚑ Language server idle", log_prefix);
763 anyhow::Ok(())
764 },
765 _ = timeout.fuse() => {
766 anyhow::bail!("LSP wait timed out after 5 minutes");
767 }
768 };
769 drop(subscriptions);
770 result
771 })
772}
773
774fn main() {
775 zlog::init();
776 zlog::init_output_stderr();
777 let args = ZetaCliArgs::parse();
778 let http_client = Arc::new(ReqwestClient::new());
779 let app = Application::headless().with_http_client(http_client);
780
781 app.run(move |cx| {
782 let app_state = Arc::new(headless::init(cx));
783 cx.spawn(async move |cx| {
784 let result = match args.command {
785 Commands::Zeta2Context {
786 zeta2_args,
787 context_args,
788 } => match get_context(Some(zeta2_args), context_args, &app_state, cx).await {
789 Ok(GetContextOutput::Zeta1 { .. }) => unreachable!(),
790 Ok(GetContextOutput::Zeta2(output)) => Ok(output),
791 Err(err) => Err(err),
792 },
793 Commands::Context(context_args) => {
794 match get_context(None, context_args, &app_state, cx).await {
795 Ok(GetContextOutput::Zeta1(output)) => {
796 Ok(serde_json::to_string_pretty(&output.body).unwrap())
797 }
798 Ok(GetContextOutput::Zeta2 { .. }) => unreachable!(),
799 Err(err) => Err(err),
800 }
801 }
802 Commands::Predict {
803 predict_edits_body,
804 context_args,
805 } => {
806 cx.spawn(async move |cx| {
807 let app_version = cx.update(|cx| AppVersion::global(cx))?;
808 app_state.client.sign_in(true, cx).await?;
809 let llm_token = LlmApiToken::default();
810 llm_token.refresh(&app_state.client).await?;
811
812 let predict_edits_body =
813 if let Some(predict_edits_body) = predict_edits_body {
814 serde_json::from_str(&predict_edits_body.read_to_string().await?)?
815 } else if let Some(context_args) = context_args {
816 match get_context(None, context_args, &app_state, cx).await? {
817 GetContextOutput::Zeta1(output) => output.body,
818 GetContextOutput::Zeta2 { .. } => unreachable!(),
819 }
820 } else {
821 return Err(anyhow!(
822 "Expected either --predict-edits-body-file \
823 or the required args of the `context` command."
824 ));
825 };
826
827 let (response, _usage) =
828 Zeta::perform_predict_edits(PerformPredictEditsParams {
829 client: app_state.client.clone(),
830 llm_token,
831 app_version,
832 body: predict_edits_body,
833 })
834 .await?;
835
836 Ok(response.output_excerpt)
837 })
838 .await
839 }
840 Commands::RetrievalStats {
841 worktree,
842 file_indexing_parallelism,
843 } => retrieval_stats(worktree, file_indexing_parallelism, app_state, cx).await,
844 };
845 match result {
846 Ok(output) => {
847 println!("{}", output);
848 let _ = cx.update(|cx| cx.quit());
849 }
850 Err(e) => {
851 eprintln!("Failed: {:?}", e);
852 exit(1);
853 }
854 }
855 })
856 .detach();
857 });
858}