lambda.json

  1{
  2  "name": "Lambda",
  3  "id": "lambda",
  4  "type": "openai",
  5  "api_key": "$LAMBDA_API_KEY",
  6  "api_endpoint": "https://api.lambda.ai/v1",
  7  "default_large_model_id": "qwen25-coder-32b-instruct",
  8  "default_small_model_id": "llama3.2-3b-instruct",
  9  "models": [
 10    {
 11      "id": "deepseek-r1-0528",
 12      "name": "DeepSeek R1 0528 FP8",
 13      "cost_per_1m_in": 0.5,
 14      "cost_per_1m_out": 2.18,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 164000,
 18      "default_max_tokens": 8192,
 19      "can_reason": true,
 20      "has_reasoning_efforts": false,
 21      "supports_attachments": false
 22    },
 23    {
 24      "id": "deepseek-r1-671b",
 25      "name": "DeepSeek R1 671B",
 26      "cost_per_1m_in": 0.5,
 27      "cost_per_1m_out": 2.18,
 28      "cost_per_1m_in_cached": 0,
 29      "cost_per_1m_out_cached": 0,
 30      "context_window": 164000,
 31      "default_max_tokens": 8192,
 32      "can_reason": true,
 33      "has_reasoning_efforts": false,
 34      "supports_attachments": false
 35    },
 36    {
 37      "id": "llama-4-maverick-17b-128e-instruct-fp8",
 38      "name": "Llama 4 Maverick 17B",
 39      "cost_per_1m_in": 0.18,
 40      "cost_per_1m_out": 0.6,
 41      "cost_per_1m_in_cached": 0,
 42      "cost_per_1m_out_cached": 0,
 43      "context_window": 1000000,
 44      "default_max_tokens": 8192,
 45      "can_reason": false,
 46      "supports_attachments": false
 47    },
 48    {
 49      "id": "llama3.1-405b-instruct-fp8",
 50      "name": "Llama 3.1 405B Instruct FP8",
 51      "cost_per_1m_in": 0.8,
 52      "cost_per_1m_out": 0.8,
 53      "cost_per_1m_in_cached": 0,
 54      "cost_per_1m_out_cached": 0,
 55      "context_window": 131000,
 56      "default_max_tokens": 8192,
 57      "can_reason": false,
 58      "supports_attachments": false
 59    },
 60    {
 61      "id": "llama3.3-70b-instruct-fp8",
 62      "name": "Llama 3.3 70B Instruct FP8",
 63      "cost_per_1m_in": 0.12,
 64      "cost_per_1m_out": 0.3,
 65      "cost_per_1m_in_cached": 0,
 66      "cost_per_1m_out_cached": 0,
 67      "context_window": 131000,
 68      "default_max_tokens": 8192,
 69      "can_reason": false,
 70      "supports_attachments": false
 71    },
 72    {
 73      "id": "llama3.1-70b-instruct-fp8",
 74      "name": "Llama 3.1 70B Instruct FP8",
 75      "cost_per_1m_in": 0.12,
 76      "cost_per_1m_out": 0.3,
 77      "cost_per_1m_in_cached": 0,
 78      "cost_per_1m_out_cached": 0,
 79      "context_window": 131000,
 80      "default_max_tokens": 8192,
 81      "can_reason": false,
 82      "supports_attachments": false
 83    },
 84    {
 85      "id": "llama3.1-8b-instruct",
 86      "name": "Llama 3.1 8B Instruct",
 87      "cost_per_1m_in": 0.025,
 88      "cost_per_1m_out": 0.04,
 89      "cost_per_1m_in_cached": 0,
 90      "cost_per_1m_out_cached": 0,
 91      "context_window": 131000,
 92      "default_max_tokens": 8192,
 93      "can_reason": false,
 94      "supports_attachments": false
 95    },
 96    {
 97      "id": "llama3.2-3b-instruct",
 98      "name": "Llama 3.2 3B Instruct",
 99      "cost_per_1m_in": 0.025,
100      "cost_per_1m_out": 0.04,
101      "cost_per_1m_in_cached": 0,
102      "cost_per_1m_out_cached": 0,
103      "context_window": 131000,
104      "default_max_tokens": 8192,
105      "can_reason": false,
106      "supports_attachments": false
107    },
108    {
109      "id": "llama3.2-11b-vision-instruct",
110      "name": "Llama 3.2 11B Vision Instruct",
111      "cost_per_1m_in": 0.025,
112      "cost_per_1m_out": 0.04,
113      "cost_per_1m_in_cached": 0,
114      "cost_per_1m_out_cached": 0,
115      "context_window": 131000,
116      "default_max_tokens": 8192,
117      "can_reason": false,
118      "supports_attachments": true
119    },
120    {
121      "id": "hermes3-8b",
122      "name": "Hermes 3 8B",
123      "cost_per_1m_in": 0.025,
124      "cost_per_1m_out": 0.04,
125      "cost_per_1m_in_cached": 0,
126      "cost_per_1m_out_cached": 0,
127      "context_window": 131000,
128      "default_max_tokens": 8192,
129      "can_reason": false,
130      "supports_attachments": false
131    },
132    {
133      "id": "hermes3-70b",
134      "name": "Hermes 3 70B",
135      "cost_per_1m_in": 0.12,
136      "cost_per_1m_out": 0.3,
137      "cost_per_1m_in_cached": 0,
138      "cost_per_1m_out_cached": 0,
139      "context_window": 131000,
140      "default_max_tokens": 8192,
141      "can_reason": false,
142      "supports_attachments": false
143    },
144    {
145      "id": "hermes3-405b",
146      "name": "Hermes 3 405B",
147      "cost_per_1m_in": 0.8,
148      "cost_per_1m_out": 0.8,
149      "cost_per_1m_in_cached": 0,
150      "cost_per_1m_out_cached": 0,
151      "context_window": 131000,
152      "default_max_tokens": 8192,
153      "can_reason": false,
154      "supports_attachments": false
155    },
156    {
157      "id": "lfm-40b",
158      "name": "LFM 40B",
159      "cost_per_1m_in": 0.18,
160      "cost_per_1m_out": 0.6,
161      "cost_per_1m_in_cached": 0,
162      "cost_per_1m_out_cached": 0,
163      "context_window": 65536,
164      "default_max_tokens": 8192,
165      "can_reason": false,
166      "supports_attachments": false
167    },
168    {
169      "id": "qwen25-coder-32b-instruct",
170      "name": "Qwen 2.5 Coder 32B Instruct",
171      "cost_per_1m_in": 0.12,
172      "cost_per_1m_out": 0.3,
173      "cost_per_1m_in_cached": 0,
174      "cost_per_1m_out_cached": 0,
175      "context_window": 131000,
176      "default_max_tokens": 8192,
177      "can_reason": false,
178      "supports_attachments": false
179    },
180    {
181      "id": "llama3.1-nemotron-70b-instruct-fp8",
182      "name": "Llama 3.1 Nemotron 70B Instruct FP8",
183      "cost_per_1m_in": 0.12,
184      "cost_per_1m_out": 0.3,
185      "cost_per_1m_in_cached": 0,
186      "cost_per_1m_out_cached": 0,
187      "context_window": 131000,
188      "default_max_tokens": 8192,
189      "can_reason": false,
190      "supports_attachments": false
191    },
192    {
193      "id": "deepseek-llama3.3-70b",
194      "name": "DeepSeek Llama 3.3 70B",
195      "cost_per_1m_in": 0.12,
196      "cost_per_1m_out": 0.3,
197      "cost_per_1m_in_cached": 0,
198      "cost_per_1m_out_cached": 0,
199      "context_window": 131000,
200      "default_max_tokens": 8192,
201      "can_reason": false,
202      "supports_attachments": false
203    },
204    {
205      "id": "llama-4-scout-17b-16e-instruct",
206      "name": "Llama 4 Scout 17B",
207      "cost_per_1m_in": 0.18,
208      "cost_per_1m_out": 0.6,
209      "cost_per_1m_in_cached": 0,
210      "cost_per_1m_out_cached": 0,
211      "context_window": 131000,
212      "default_max_tokens": 8192,
213      "can_reason": false,
214      "supports_attachments": false
215    },
216    {
217      "id": "deepseek-v3-0324",
218      "name": "DeepSeek V3 0324",
219      "cost_per_1m_in": 0.5,
220      "cost_per_1m_out": 2.18,
221      "cost_per_1m_in_cached": 0,
222      "cost_per_1m_out_cached": 0,
223      "context_window": 164000,
224      "default_max_tokens": 8192,
225      "can_reason": false,
226      "supports_attachments": false
227    },
228    {
229      "id": "lfm-7b",
230      "name": "LFM 7B",
231      "cost_per_1m_in": 0.025,
232      "cost_per_1m_out": 0.04,
233      "cost_per_1m_in_cached": 0,
234      "cost_per_1m_out_cached": 0,
235      "context_window": 65536,
236      "default_max_tokens": 8192,
237      "can_reason": false,
238      "supports_attachments": false
239    },
240    {
241      "id": "qwen3-32b-fp8",
242      "name": "Qwen 3 32B FP8",
243      "cost_per_1m_in": 0.12,
244      "cost_per_1m_out": 0.3,
245      "cost_per_1m_in_cached": 0,
246      "cost_per_1m_out_cached": 0,
247      "context_window": 131000,
248      "default_max_tokens": 8192,
249      "can_reason": false,
250      "supports_attachments": false
251    }
252  ]
253}