1use crate::headless_assistant::send_language_model_request;
2use anyhow::anyhow;
3use gpui::{App, Task};
4use language_model::{
5 LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
6};
7use std::sync::Arc;
8
9pub struct Judge {
10 #[allow(dead_code)]
11 pub original_diff: Option<String>,
12 pub original_message: Option<String>,
13 pub model: Arc<dyn LanguageModel>,
14}
15
16impl Judge {
17 pub fn run_with_prompt(&self, cx: &mut App) -> Task<anyhow::Result<String>> {
18 let Some(prompt) = self.original_message.as_ref() else {
19 return Task::ready(Err(anyhow!("No prompt provided in original_message")));
20 };
21
22 let request = LanguageModelRequest {
23 messages: vec![LanguageModelRequestMessage {
24 role: Role::User,
25 content: vec![MessageContent::Text(prompt.clone())],
26 cache: false,
27 }],
28 temperature: Some(0.0),
29 tools: Vec::new(),
30 stop: Vec::new(),
31 };
32
33 let model = self.model.clone();
34 let request = request.clone();
35 cx.spawn(async move |cx| send_language_model_request(model, request, cx).await)
36 }
37}