removing Lambda inference due to product sunset (#79)

Benson Schliesser created

* removing Lambda inference due to product sunset

* deleted files

Change summary

internal/providers/configs/lambda.json | 253 ---------------------------
internal/providers/providers.go        |   8 
pkg/catwalk/provider.go                |   2 
3 files changed, 263 deletions(-)

Detailed changes

internal/providers/configs/lambda.json 🔗

@@ -1,253 +0,0 @@
-{
-  "name": "Lambda",
-  "id": "lambda",
-  "type": "openai",
-  "api_key": "$LAMBDA_API_KEY",
-  "api_endpoint": "https://api.lambda.ai/v1",
-  "default_large_model_id": "qwen25-coder-32b-instruct",
-  "default_small_model_id": "llama3.2-3b-instruct",
-  "models": [
-    {
-      "id": "deepseek-r1-0528",
-      "name": "DeepSeek R1 0528 FP8",
-      "cost_per_1m_in": 0.5,
-      "cost_per_1m_out": 2.18,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 164000,
-      "default_max_tokens": 8192,
-      "can_reason": true,
-      "has_reasoning_efforts": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "deepseek-r1-671b",
-      "name": "DeepSeek R1 671B",
-      "cost_per_1m_in": 0.5,
-      "cost_per_1m_out": 2.18,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 164000,
-      "default_max_tokens": 8192,
-      "can_reason": true,
-      "has_reasoning_efforts": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama-4-maverick-17b-128e-instruct-fp8",
-      "name": "Llama 4 Maverick 17B",
-      "cost_per_1m_in": 0.18,
-      "cost_per_1m_out": 0.6,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 1000000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama3.1-405b-instruct-fp8",
-      "name": "Llama 3.1 405B Instruct FP8",
-      "cost_per_1m_in": 0.8,
-      "cost_per_1m_out": 0.8,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama3.3-70b-instruct-fp8",
-      "name": "Llama 3.3 70B Instruct FP8",
-      "cost_per_1m_in": 0.12,
-      "cost_per_1m_out": 0.3,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama3.1-70b-instruct-fp8",
-      "name": "Llama 3.1 70B Instruct FP8",
-      "cost_per_1m_in": 0.12,
-      "cost_per_1m_out": 0.3,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama3.1-8b-instruct",
-      "name": "Llama 3.1 8B Instruct",
-      "cost_per_1m_in": 0.025,
-      "cost_per_1m_out": 0.04,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama3.2-3b-instruct",
-      "name": "Llama 3.2 3B Instruct",
-      "cost_per_1m_in": 0.025,
-      "cost_per_1m_out": 0.04,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama3.2-11b-vision-instruct",
-      "name": "Llama 3.2 11B Vision Instruct",
-      "cost_per_1m_in": 0.025,
-      "cost_per_1m_out": 0.04,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": true
-    },
-    {
-      "id": "hermes3-8b",
-      "name": "Hermes 3 8B",
-      "cost_per_1m_in": 0.025,
-      "cost_per_1m_out": 0.04,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "hermes3-70b",
-      "name": "Hermes 3 70B",
-      "cost_per_1m_in": 0.12,
-      "cost_per_1m_out": 0.3,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "hermes3-405b",
-      "name": "Hermes 3 405B",
-      "cost_per_1m_in": 0.8,
-      "cost_per_1m_out": 0.8,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "lfm-40b",
-      "name": "LFM 40B",
-      "cost_per_1m_in": 0.18,
-      "cost_per_1m_out": 0.6,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 65536,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "qwen25-coder-32b-instruct",
-      "name": "Qwen 2.5 Coder 32B Instruct",
-      "cost_per_1m_in": 0.12,
-      "cost_per_1m_out": 0.3,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama3.1-nemotron-70b-instruct-fp8",
-      "name": "Llama 3.1 Nemotron 70B Instruct FP8",
-      "cost_per_1m_in": 0.12,
-      "cost_per_1m_out": 0.3,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "deepseek-llama3.3-70b",
-      "name": "DeepSeek Llama 3.3 70B",
-      "cost_per_1m_in": 0.12,
-      "cost_per_1m_out": 0.3,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "llama-4-scout-17b-16e-instruct",
-      "name": "Llama 4 Scout 17B",
-      "cost_per_1m_in": 0.18,
-      "cost_per_1m_out": 0.6,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "deepseek-v3-0324",
-      "name": "DeepSeek V3 0324",
-      "cost_per_1m_in": 0.5,
-      "cost_per_1m_out": 2.18,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 164000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "lfm-7b",
-      "name": "LFM 7B",
-      "cost_per_1m_in": 0.025,
-      "cost_per_1m_out": 0.04,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 65536,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "qwen3-32b-fp8",
-      "name": "Qwen 3 32B FP8",
-      "cost_per_1m_in": 0.12,
-      "cost_per_1m_out": 0.3,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 8192,
-      "can_reason": false,
-      "supports_attachments": false
-    }
-  ]
-}

internal/providers/providers.go 🔗

@@ -39,9 +39,6 @@ var bedrockConfig []byte
 //go:embed configs/groq.json
 var groqConfig []byte
 
-//go:embed configs/lambda.json
-var lambdaConfig []byte
-
 //go:embed configs/cerebras.json
 var cerebrasConfig []byte
 
@@ -71,7 +68,6 @@ var providerRegistry = []ProviderFunc{
 	zAIProvider,
 	groqProvider,
 	openRouterProvider,
-	lambdaProvider,
 	cerebrasProvider,
 	veniceProvider,
 	chutesProvider,
@@ -137,10 +133,6 @@ func groqProvider() catwalk.Provider {
 	return loadProviderFromConfig(groqConfig)
 }
 
-func lambdaProvider() catwalk.Provider {
-	return loadProviderFromConfig(lambdaConfig)
-}
-
 func cerebrasProvider() catwalk.Provider {
 	return loadProviderFromConfig(cerebrasConfig)
 }

pkg/catwalk/provider.go 🔗

@@ -28,7 +28,6 @@ const (
 	InferenceProviderZAI         InferenceProvider = "zai"
 	InferenceProviderGROQ        InferenceProvider = "groq"
 	InferenceProviderOpenRouter  InferenceProvider = "openrouter"
-	InferenceProviderLambda      InferenceProvider = "lambda"
 	InferenceProviderCerebras    InferenceProvider = "cerebras"
 	InferenceProviderVenice      InferenceProvider = "venice"
 	InferenceProviderChutes      InferenceProvider = "chutes"
@@ -77,7 +76,6 @@ func KnownProviders() []InferenceProvider {
 		InferenceProviderZAI,
 		InferenceProviderGROQ,
 		InferenceProviderOpenRouter,
-		InferenceProviderLambda,
 		InferenceProviderCerebras,
 		InferenceProviderVenice,
 		InferenceProviderChutes,