chore(crush): baaaack to previous method

Amolith created

Change summary

dot_config/crush/crush.json | 127 +++++++++++++++++++-------------------
1 file changed, 63 insertions(+), 64 deletions(-)

Detailed changes

dot_config/crush/crush.json 🔗

@@ -9,19 +9,23 @@
 		"memory_paths": ["~/.config/crush/rules", "~/.config/AGENTS.md"]
 	},
 	"providers": {
-		"proxy": {
-			"name": "wan",
-			"type": "anthropic",
-			"api_key": "dummy",
-			"base_url": "http://127.0.0.1:8317/",
+		"anthropic": {
+			"api_key": "Bearer $(bash ~/.config/crush/anthropic.sh)",
+			"extra_headers": {
+				"User-Agent": "claude-cli/2.0.31 (external, cli)",
+				"x-app": "cli",
+				"anthropic-version": "2023-06-01",
+				"anthropic-beta": "oauth-2025-04-20,interleaved-thinking-2025-05-14,fine-grained-tool-streaming-2025-05-14"
+			},
+			"system_prompt_prefix": "You are Claude Code, Anthropic's official CLI for Claude.",
 			"models": [
 				{
 					"id": "claude-sonnet-4-5-20250929",
 					"name": "Claude Sonnet 4.5",
-					"cost_per_1m_in": 0,
-					"cost_per_1m_out": 0,
-					"cost_per_1m_in_cached": 0,
-					"cost_per_1m_out_cached": 0,
+					"cost_per_1m_in": 3,
+					"cost_per_1m_out": 15,
+					"cost_per_1m_in_cached": 3.75,
+					"cost_per_1m_out_cached": 0.3,
 					"context_window": 200000,
 					"default_max_tokens": 50000,
 					"can_reason": true,
@@ -29,79 +33,74 @@
 					"supports_attachments": true
 				},
 				{
-					"id": "synthetic/glm-4.6",
-					"name": "GLM-4.6",
-					"cost_per_1m_in": 0,
-					"cost_per_1m_out": 0,
-					"cost_per_1m_in_cached": 0,
-					"cost_per_1m_out_cached": 0,
-					"context_window": 198000,
-					"default_max_tokens": 50000,
+					"id": "claude-haiku-4-5-20251001",
+					"name": "Claude 4.5 Haiku",
+					"cost_per_1m_in": 1,
+					"cost_per_1m_out": 5,
+					"cost_per_1m_in_cached": 1.25,
+					"cost_per_1m_out_cached": 0.09999999999999999,
+					"context_window": 200000,
+					"default_max_tokens": 32000,
 					"can_reason": true,
 					"has_reasoning_efforts": false,
-					"supports_attachments": false
+					"supports_attachments": true
 				},
 				{
-					"id": "gpt-5-codex-high",
-					"name": "GPT-5 Codex High",
-					"cost_per_1m_in": 0,
-					"cost_per_1m_out": 0,
-					"cost_per_1m_in_cached": 0,
-					"cost_per_1m_out_cached": 0,
-					"context_window": 204800,
-					"default_max_tokens": 50000,
+					"id": "claude-opus-4-1-20250805",
+					"name": "Claude Opus 4.1",
+					"cost_per_1m_in": 15,
+					"cost_per_1m_out": 75,
+					"cost_per_1m_in_cached": 18.75,
+					"cost_per_1m_out_cached": 1.5,
+					"context_window": 200000,
+					"default_max_tokens": 32000,
 					"can_reason": true,
 					"has_reasoning_efforts": false,
-					"supports_attachments": false
-				},
+					"supports_attachments": true
+				}
+			]
+		},
+		"synthetic": {
+			"name": "Synthetic",
+			"type": "anthropic",
+			"api_key": "$SYNTHETIC_API_KEY",
+			"base_url": "https://api.synthetic.new/anthropic",
+			"models": [
 				{
-					"id": "gpt-5-high",
-					"name": "GPT-5 High",
-					"cost_per_1m_in": 0,
-					"cost_per_1m_out": 0,
-					"cost_per_1m_in_cached": 0,
-					"cost_per_1m_out_cached": 0,
-					"context_window": 204800,
-					"default_max_tokens": 50000,
+					"id": "hf:MiniMaxAI/MiniMax-M2",
+					"name": "MiniMax M2",
+					"cost_per_1m_in": 0.55,
+					"cost_per_1m_out": 2.19,
+					"cost_per_1m_in_cached": 0.55,
+					"cost_per_1m_out_cached": 2.19,
+					"context_window": 196608,
+					"default_max_tokens": 65536,
 					"can_reason": true,
 					"has_reasoning_efforts": false,
 					"supports_attachments": false
 				},
 				{
-					"id": "claude-haiku-4-5-20251001",
-					"name": "Claude 4.5 Haiku",
-					"cost_per_1m_in": 0,
-					"cost_per_1m_out": 0,
-					"cost_per_1m_in_cached": 0,
-					"cost_per_1m_out_cached": 0,
-					"context_window": 200000,
-					"default_max_tokens": 50000,
-					"can_reason": true,
-					"has_reasoning_efforts": false,
-					"supports_attachments": true
-				},
-				{
-					"id": "synthetic/minimax-m2",
-					"name": "MiniMax M2",
-					"cost_per_1m_in": 0,
-					"cost_per_1m_out": 0,
-					"cost_per_1m_in_cached": 0,
-					"cost_per_1m_out_cached": 0,
-					"context_window": 192000,
-					"default_max_tokens": 50000,
+					"id": "hf:zai-org/GLM-4.6",
+					"name": "GLM-4.6",
+					"cost_per_1m_in": 0.55,
+					"cost_per_1m_out": 2.19,
+					"cost_per_1m_in_cached": 0.55,
+					"cost_per_1m_out_cached": 2.19,
+					"context_window": 204800,
+					"default_max_tokens": 65536,
 					"can_reason": true,
 					"has_reasoning_efforts": false,
 					"supports_attachments": false
 				},
 				{
-					"id": "synthetic/kimi-k2-thinking",
+					"id": "hf:moonshotai/Kimi-K2-Thinking",
 					"name": "Kimi K2 Thinking",
-					"cost_per_1m_in": 0,
-					"cost_per_1m_out": 0,
-					"cost_per_1m_in_cached": 0,
-					"cost_per_1m_out_cached": 0,
-					"context_window": 192000,
-					"default_max_tokens": 50000,
+					"cost_per_1m_in": 0.55,
+					"cost_per_1m_out": 2.19,
+					"cost_per_1m_in_cached": 0.55,
+					"cost_per_1m_out_cached": 2.19,
+					"context_window": 204800,
+					"default_max_tokens": 65536,
 					"can_reason": true,
 					"has_reasoning_efforts": false,
 					"supports_attachments": false