request.rs

  1use crate::role::Role;
  2use serde::{Deserialize, Serialize};
  3
  4#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
  5pub struct LanguageModelRequestMessage {
  6    pub role: Role,
  7    pub content: String,
  8}
  9
 10#[derive(Debug, Default, Serialize, Deserialize)]
 11pub struct LanguageModelRequest {
 12    pub messages: Vec<LanguageModelRequestMessage>,
 13    pub stop: Vec<String>,
 14    pub temperature: f32,
 15}
 16
 17impl LanguageModelRequest {
 18    pub fn into_open_ai(self, model: String) -> open_ai::Request {
 19        open_ai::Request {
 20            model,
 21            messages: self
 22                .messages
 23                .into_iter()
 24                .map(|msg| match msg.role {
 25                    Role::User => open_ai::RequestMessage::User {
 26                        content: msg.content,
 27                    },
 28                    Role::Assistant => open_ai::RequestMessage::Assistant {
 29                        content: Some(msg.content),
 30                        tool_calls: Vec::new(),
 31                    },
 32                    Role::System => open_ai::RequestMessage::System {
 33                        content: msg.content,
 34                    },
 35                })
 36                .collect(),
 37            stream: true,
 38            stop: self.stop,
 39            temperature: self.temperature,
 40            tools: Vec::new(),
 41            tool_choice: None,
 42        }
 43    }
 44
 45    pub fn into_google(self, model: String) -> google_ai::GenerateContentRequest {
 46        google_ai::GenerateContentRequest {
 47            model,
 48            contents: self
 49                .messages
 50                .into_iter()
 51                .map(|msg| google_ai::Content {
 52                    parts: vec![google_ai::Part::TextPart(google_ai::TextPart {
 53                        text: msg.content,
 54                    })],
 55                    role: match msg.role {
 56                        Role::User => google_ai::Role::User,
 57                        Role::Assistant => google_ai::Role::Model,
 58                        Role::System => google_ai::Role::User, // Google AI doesn't have a system role
 59                    },
 60                })
 61                .collect(),
 62            generation_config: Some(google_ai::GenerationConfig {
 63                candidate_count: Some(1),
 64                stop_sequences: Some(self.stop),
 65                max_output_tokens: None,
 66                temperature: Some(self.temperature as f64),
 67                top_p: None,
 68                top_k: None,
 69            }),
 70            safety_settings: None,
 71        }
 72    }
 73
 74    pub fn into_anthropic(self, model: String) -> anthropic::Request {
 75        let mut new_messages: Vec<LanguageModelRequestMessage> = Vec::new();
 76        let mut system_message = String::new();
 77
 78        for message in self.messages {
 79            if message.content.is_empty() {
 80                continue;
 81            }
 82
 83            match message.role {
 84                Role::User | Role::Assistant => {
 85                    if let Some(last_message) = new_messages.last_mut() {
 86                        if last_message.role == message.role {
 87                            last_message.content.push_str("\n\n");
 88                            last_message.content.push_str(&message.content);
 89                            continue;
 90                        }
 91                    }
 92
 93                    new_messages.push(message);
 94                }
 95                Role::System => {
 96                    if !system_message.is_empty() {
 97                        system_message.push_str("\n\n");
 98                    }
 99                    system_message.push_str(&message.content);
100                }
101            }
102        }
103
104        anthropic::Request {
105            model,
106            messages: new_messages
107                .into_iter()
108                .filter_map(|message| {
109                    Some(anthropic::RequestMessage {
110                        role: match message.role {
111                            Role::User => anthropic::Role::User,
112                            Role::Assistant => anthropic::Role::Assistant,
113                            Role::System => return None,
114                        },
115                        content: message.content,
116                    })
117                })
118                .collect(),
119            stream: true,
120            max_tokens: 4092,
121            system: system_message,
122        }
123    }
124}
125
126#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
127pub struct LanguageModelResponseMessage {
128    pub role: Option<Role>,
129    pub content: Option<String>,
130}