OpenAI: Fix GPT-4. Only include max_tokens when max_output_tokens provided (#17168)

Peter Tripp created

- Fixed GPT-4 breakage (incorrect `max_output_tokens` handling).

Change summary

crates/open_ai/src/open_ai.rs | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)

Detailed changes

crates/open_ai/src/open_ai.rs 🔗

@@ -120,14 +120,10 @@ impl Model {
 
     pub fn max_output_tokens(&self) -> Option<u32> {
         match self {
-            Self::ThreePointFiveTurbo => Some(4096),
-            Self::Four => Some(8192),
-            Self::FourTurbo => Some(4096),
-            Self::FourOmni => Some(4096),
-            Self::FourOmniMini => Some(16384),
             Self::Custom {
                 max_output_tokens, ..
             } => *max_output_tokens,
+            _ => None,
         }
     }
 }