Add support for GPT-4o in Copilot Chat (#16446)

Ryan Hawkins created

Release Notes:
- Added support for GPT-4o for Copilot Chat.

Change summary

crates/copilot/src/copilot_chat.rs                 | 6 ++++++
crates/language_model/src/provider/copilot_chat.rs | 1 +
2 files changed, 7 insertions(+)

Detailed changes

crates/copilot/src/copilot_chat.rs 🔗

@@ -31,6 +31,8 @@ pub enum Role {
 #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
 pub enum Model {
     #[default]
+    #[serde(alias = "gpt-4o", rename = "gpt-4o-2024-05-13")]
+    Gpt4o,
     #[serde(alias = "gpt-4", rename = "gpt-4")]
     Gpt4,
     #[serde(alias = "gpt-3.5-turbo", rename = "gpt-3.5-turbo")]
@@ -40,6 +42,7 @@ pub enum Model {
 impl Model {
     pub fn from_id(id: &str) -> Result<Self> {
         match id {
+            "gpt-4o" => Ok(Self::Gpt4o),
             "gpt-4" => Ok(Self::Gpt4),
             "gpt-3.5-turbo" => Ok(Self::Gpt3_5Turbo),
             _ => Err(anyhow!("Invalid model id: {}", id)),
@@ -50,6 +53,7 @@ impl Model {
         match self {
             Self::Gpt3_5Turbo => "gpt-3.5-turbo",
             Self::Gpt4 => "gpt-4",
+            Self::Gpt4o => "gpt-4o",
         }
     }
 
@@ -57,11 +61,13 @@ impl Model {
         match self {
             Self::Gpt3_5Turbo => "GPT-3.5",
             Self::Gpt4 => "GPT-4",
+            Self::Gpt4o => "GPT-4o",
         }
     }
 
     pub fn max_token_count(&self) -> usize {
         match self {
+            Self::Gpt4o => 128000,
             Self::Gpt4 => 8192,
             Self::Gpt3_5Turbo => 16385,
         }

crates/language_model/src/provider/copilot_chat.rs 🔗

@@ -180,6 +180,7 @@ impl LanguageModel for CopilotChatLanguageModel {
         cx: &AppContext,
     ) -> BoxFuture<'static, Result<usize>> {
         let model = match self.model {
+            CopilotChatModel::Gpt4o => open_ai::Model::FourOmni,
             CopilotChatModel::Gpt4 => open_ai::Model::Four,
             CopilotChatModel::Gpt3_5Turbo => open_ai::Model::ThreePointFiveTurbo,
         };