request.rs

  1use crate::role::Role;
  2use serde::{Deserialize, Serialize};
  3
  4#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Hash)]
  5pub struct LanguageModelRequestMessage {
  6    pub role: Role,
  7    pub content: String,
  8}
  9
 10#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
 11pub struct LanguageModelRequest {
 12    pub messages: Vec<LanguageModelRequestMessage>,
 13    pub stop: Vec<String>,
 14    pub temperature: f32,
 15}
 16
 17impl LanguageModelRequest {
 18    pub fn into_open_ai(self, model: String) -> open_ai::Request {
 19        open_ai::Request {
 20            model,
 21            messages: self
 22                .messages
 23                .into_iter()
 24                .map(|msg| match msg.role {
 25                    Role::User => open_ai::RequestMessage::User {
 26                        content: msg.content,
 27                    },
 28                    Role::Assistant => open_ai::RequestMessage::Assistant {
 29                        content: Some(msg.content),
 30                        tool_calls: Vec::new(),
 31                    },
 32                    Role::System => open_ai::RequestMessage::System {
 33                        content: msg.content,
 34                    },
 35                })
 36                .collect(),
 37            stream: true,
 38            stop: self.stop,
 39            temperature: self.temperature,
 40            max_tokens: None,
 41            tools: Vec::new(),
 42            tool_choice: None,
 43        }
 44    }
 45
 46    pub fn into_google(self, model: String) -> google_ai::GenerateContentRequest {
 47        google_ai::GenerateContentRequest {
 48            model,
 49            contents: self
 50                .messages
 51                .into_iter()
 52                .map(|msg| google_ai::Content {
 53                    parts: vec![google_ai::Part::TextPart(google_ai::TextPart {
 54                        text: msg.content,
 55                    })],
 56                    role: match msg.role {
 57                        Role::User => google_ai::Role::User,
 58                        Role::Assistant => google_ai::Role::Model,
 59                        Role::System => google_ai::Role::User, // Google AI doesn't have a system role
 60                    },
 61                })
 62                .collect(),
 63            generation_config: Some(google_ai::GenerationConfig {
 64                candidate_count: Some(1),
 65                stop_sequences: Some(self.stop),
 66                max_output_tokens: None,
 67                temperature: Some(self.temperature as f64),
 68                top_p: None,
 69                top_k: None,
 70            }),
 71            safety_settings: None,
 72        }
 73    }
 74
 75    pub fn into_anthropic(self, model: String) -> anthropic::Request {
 76        let mut new_messages: Vec<LanguageModelRequestMessage> = Vec::new();
 77        let mut system_message = String::new();
 78
 79        for message in self.messages {
 80            if message.content.is_empty() {
 81                continue;
 82            }
 83
 84            match message.role {
 85                Role::User | Role::Assistant => {
 86                    if let Some(last_message) = new_messages.last_mut() {
 87                        if last_message.role == message.role {
 88                            last_message.content.push_str("\n\n");
 89                            last_message.content.push_str(&message.content);
 90                            continue;
 91                        }
 92                    }
 93
 94                    new_messages.push(message);
 95                }
 96                Role::System => {
 97                    if !system_message.is_empty() {
 98                        system_message.push_str("\n\n");
 99                    }
100                    system_message.push_str(&message.content);
101                }
102            }
103        }
104
105        anthropic::Request {
106            model,
107            messages: new_messages
108                .into_iter()
109                .filter_map(|message| {
110                    Some(anthropic::Message {
111                        role: match message.role {
112                            Role::User => anthropic::Role::User,
113                            Role::Assistant => anthropic::Role::Assistant,
114                            Role::System => return None,
115                        },
116                        content: vec![anthropic::Content::Text {
117                            text: message.content,
118                        }],
119                    })
120                })
121                .collect(),
122            max_tokens: 4092,
123            system: Some(system_message),
124            tools: Vec::new(),
125            tool_choice: None,
126            metadata: None,
127            stop_sequences: Vec::new(),
128            temperature: None,
129            top_k: None,
130            top_p: None,
131        }
132    }
133}
134
135#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
136pub struct LanguageModelResponseMessage {
137    pub role: Option<Role>,
138    pub content: Option<String>,
139}