fix(openai): fix openai default max tokens settings

Andrey Nering created

Change summary

internal/providers/configs/openai.json | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)

Detailed changes

internal/providers/configs/openai.json 🔗

@@ -57,7 +57,7 @@
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0.5,
       "context_window": 1047576,
-      "default_max_tokens": 50000,
+      "default_max_tokens": 16384,
       "can_reason": false,
       "supports_attachments": true
     },
@@ -69,7 +69,7 @@
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0.09999999999999999,
       "context_window": 1047576,
-      "default_max_tokens": 50000,
+      "default_max_tokens": 16384,
       "can_reason": false,
       "supports_attachments": true
     },
@@ -81,7 +81,7 @@
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0.024999999999999998,
       "context_window": 1047576,
-      "default_max_tokens": 50000,
+      "default_max_tokens": 16384,
       "can_reason": false,
       "supports_attachments": true
     },
@@ -119,7 +119,7 @@
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 1.25,
       "context_window": 128000,
-      "default_max_tokens": 15000,
+      "default_max_tokens": 8192,
       "can_reason": false,
       "supports_attachments": true
     },
@@ -131,7 +131,7 @@
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0.075,
       "context_window": 128000,
-      "default_max_tokens": 14000,
+      "default_max_tokens": 8192,
       "can_reason": false,
       "reasoning_effort": "",
       "supports_attachments": true