lambda.json

  1{
  2  "name": "Lambda",
  3  "id": "lambda",
  4  "type": "openai",
  5  "api_key": "$LAMBDA_API_KEY",
  6  "api_endpoint": "https://api.lambda.ai/v1",
  7  "default_large_model_id": "qwen25-coder-32b-instruct",
  8  "default_small_model_id": "llama3.1-8b-instruct",
  9  "default_headers": {
 10    "Authorization": "Bearer $LAMBDA_API_KEY"
 11  },
 12  "models": [
 13    {
 14      "id": "deepseek-r1-0528",
 15      "name": "DeepSeek R1 0528 FP8",
 16      "cost_per_1m_in": 0.5,
 17      "cost_per_1m_out": 2.18,
 18      "cost_per_1m_in_cached": 0,
 19      "cost_per_1m_out_cached": 0,
 20      "context_window": 164000,
 21      "default_max_tokens": 8192,
 22      "can_reason": true,
 23      "has_reasoning_effort": false,
 24      "supports_attachments": false
 25    },
 26    {
 27      "id": "deepseek-r1-671b",
 28      "name": "DeepSeek R1 671B",
 29      "cost_per_1m_in": 0.5,
 30      "cost_per_1m_out": 2.18,
 31      "cost_per_1m_in_cached": 0,
 32      "cost_per_1m_out_cached": 0,
 33      "context_window": 164000,
 34      "default_max_tokens": 8192,
 35      "can_reason": true,
 36      "has_reasoning_effort": false,
 37      "supports_attachments": false
 38    },
 39    {
 40      "id": "llama-4-maverick-17b-128e-instruct-fp8",
 41      "name": "Llama 4 Maverick 17B",
 42      "cost_per_1m_in": 0.18,
 43      "cost_per_1m_out": 0.6,
 44      "cost_per_1m_in_cached": 0,
 45      "cost_per_1m_out_cached": 0,
 46      "context_window": 1000000,
 47      "default_max_tokens": 8192,
 48      "can_reason": false,
 49      "supports_attachments": false
 50    },
 51    {
 52      "id": "llama3.1-405b-instruct-fp8",
 53      "name": "Llama 3.1 405B Instruct FP8",
 54      "cost_per_1m_in": 0.8,
 55      "cost_per_1m_out": 0.8,
 56      "cost_per_1m_in_cached": 0,
 57      "cost_per_1m_out_cached": 0,
 58      "context_window": 131000,
 59      "default_max_tokens": 8192,
 60      "can_reason": false,
 61      "supports_attachments": false
 62    },
 63    {
 64      "id": "llama3.3-70b-instruct-fp8",
 65      "name": "Llama 3.3 70B Instruct FP8",
 66      "cost_per_1m_in": 0.12,
 67      "cost_per_1m_out": 0.3,
 68      "cost_per_1m_in_cached": 0,
 69      "cost_per_1m_out_cached": 0,
 70      "context_window": 131000,
 71      "default_max_tokens": 8192,
 72      "can_reason": false,
 73      "supports_attachments": false
 74    },
 75    {
 76      "id": "llama3.1-70b-instruct-fp8",
 77      "name": "Llama 3.1 70B Instruct FP8",
 78      "cost_per_1m_in": 0.12,
 79      "cost_per_1m_out": 0.3,
 80      "cost_per_1m_in_cached": 0,
 81      "cost_per_1m_out_cached": 0,
 82      "context_window": 131000,
 83      "default_max_tokens": 8192,
 84      "can_reason": false,
 85      "supports_attachments": false
 86    },
 87    {
 88      "id": "llama3.1-8b-instruct",
 89      "name": "Llama 3.1 8B Instruct",
 90      "cost_per_1m_in": 0.025,
 91      "cost_per_1m_out": 0.04,
 92      "cost_per_1m_in_cached": 0,
 93      "cost_per_1m_out_cached": 0,
 94      "context_window": 131000,
 95      "default_max_tokens": 8192,
 96      "can_reason": false,
 97      "supports_attachments": false
 98    },
 99    {
100      "id": "llama3.2-3b-instruct",
101      "name": "Llama 3.2 3B Instruct",
102      "cost_per_1m_in": 0.025,
103      "cost_per_1m_out": 0.04,
104      "cost_per_1m_in_cached": 0,
105      "cost_per_1m_out_cached": 0,
106      "context_window": 131000,
107      "default_max_tokens": 8192,
108      "can_reason": false,
109      "supports_attachments": false
110    },
111    {
112      "id": "llama3.2-11b-vision-instruct",
113      "name": "Llama 3.2 11B Vision Instruct",
114      "cost_per_1m_in": 0.025,
115      "cost_per_1m_out": 0.04,
116      "cost_per_1m_in_cached": 0,
117      "cost_per_1m_out_cached": 0,
118      "context_window": 131000,
119      "default_max_tokens": 8192,
120      "can_reason": false,
121      "supports_attachments": true
122    },
123    {
124      "id": "hermes3-8b",
125      "name": "Hermes 3 8B",
126      "cost_per_1m_in": 0.025,
127      "cost_per_1m_out": 0.04,
128      "cost_per_1m_in_cached": 0,
129      "cost_per_1m_out_cached": 0,
130      "context_window": 131000,
131      "default_max_tokens": 8192,
132      "can_reason": false,
133      "supports_attachments": false
134    },
135    {
136      "id": "hermes3-70b",
137      "name": "Hermes 3 70B",
138      "cost_per_1m_in": 0.12,
139      "cost_per_1m_out": 0.3,
140      "cost_per_1m_in_cached": 0,
141      "cost_per_1m_out_cached": 0,
142      "context_window": 131000,
143      "default_max_tokens": 8192,
144      "can_reason": false,
145      "supports_attachments": false
146    },
147    {
148      "id": "hermes3-405b",
149      "name": "Hermes 3 405B",
150      "cost_per_1m_in": 0.8,
151      "cost_per_1m_out": 0.8,
152      "cost_per_1m_in_cached": 0,
153      "cost_per_1m_out_cached": 0,
154      "context_window": 131000,
155      "default_max_tokens": 8192,
156      "can_reason": false,
157      "supports_attachments": false
158    },
159    {
160      "id": "lfm-40b",
161      "name": "LFM 40B",
162      "cost_per_1m_in": 0.18,
163      "cost_per_1m_out": 0.6,
164      "cost_per_1m_in_cached": 0,
165      "cost_per_1m_out_cached": 0,
166      "context_window": 65536,
167      "default_max_tokens": 8192,
168      "can_reason": false,
169      "supports_attachments": false
170    },
171    {
172      "id": "qwen25-coder-32b-instruct",
173      "name": "Qwen 2.5 Coder 32B Instruct",
174      "cost_per_1m_in": 0.12,
175      "cost_per_1m_out": 0.3,
176      "cost_per_1m_in_cached": 0,
177      "cost_per_1m_out_cached": 0,
178      "context_window": 131000,
179      "default_max_tokens": 8192,
180      "can_reason": false,
181      "supports_attachments": false
182    },
183    {
184      "id": "llama3.1-nemotron-70b-instruct-fp8",
185      "name": "Llama 3.1 Nemotron 70B Instruct FP8",
186      "cost_per_1m_in": 0.12,
187      "cost_per_1m_out": 0.3,
188      "cost_per_1m_in_cached": 0,
189      "cost_per_1m_out_cached": 0,
190      "context_window": 131000,
191      "default_max_tokens": 8192,
192      "can_reason": false,
193      "supports_attachments": false
194    },
195    {
196      "id": "deepseek-llama3.3-70b",
197      "name": "DeepSeek Llama 3.3 70B",
198      "cost_per_1m_in": 0.12,
199      "cost_per_1m_out": 0.3,
200      "cost_per_1m_in_cached": 0,
201      "cost_per_1m_out_cached": 0,
202      "context_window": 131000,
203      "default_max_tokens": 8192,
204      "can_reason": false,
205      "supports_attachments": false
206    },
207    {
208      "id": "llama-4-scout-17b-16e-instruct",
209      "name": "Llama 4 Scout 17B",
210      "cost_per_1m_in": 0.18,
211      "cost_per_1m_out": 0.6,
212      "cost_per_1m_in_cached": 0,
213      "cost_per_1m_out_cached": 0,
214      "context_window": 131000,
215      "default_max_tokens": 8192,
216      "can_reason": false,
217      "supports_attachments": false
218    },
219    {
220      "id": "deepseek-v3-0324",
221      "name": "DeepSeek V3 0324",
222      "cost_per_1m_in": 0.5,
223      "cost_per_1m_out": 2.18,
224      "cost_per_1m_in_cached": 0,
225      "cost_per_1m_out_cached": 0,
226      "context_window": 164000,
227      "default_max_tokens": 8192,
228      "can_reason": false,
229      "supports_attachments": false
230    },
231    {
232      "id": "lfm-7b",
233      "name": "LFM 7B",
234      "cost_per_1m_in": 0.025,
235      "cost_per_1m_out": 0.04,
236      "cost_per_1m_in_cached": 0,
237      "cost_per_1m_out_cached": 0,
238      "context_window": 65536,
239      "default_max_tokens": 8192,
240      "can_reason": false,
241      "supports_attachments": false
242    },
243    {
244      "id": "qwen3-32b-fp8",
245      "name": "Qwen 3 32B FP8",
246      "cost_per_1m_in": 0.12,
247      "cost_per_1m_out": 0.3,
248      "cost_per_1m_in_cached": 0,
249      "cost_per_1m_out_cached": 0,
250      "context_window": 131000,
251      "default_max_tokens": 8192,
252      "can_reason": false,
253      "supports_attachments": false
254    }
255  ]
256}