Merge remote-tracking branch 'origin/main' into list-select

Kujtim Hoxha created

Change summary

.github/cla-signatures.json                                                       |  24 
CRUSH.md                                                                          |   2 
README.md                                                                         | 115 
go.mod                                                                            |   6 
go.sum                                                                            |  12 
internal/llm/agent/agent.go                                                       |  68 
internal/llm/agent/mcp-tools.go                                                   |  16 
internal/llm/provider/openai.go                                                   | 138 
internal/llm/tools/tools.go                                                       |  75 
internal/tui/components/chat/messages/messages.go                                 |   4 
internal/tui/components/chat/splash/splash.go                                     |  22 
internal/tui/components/core/core.go                                              |  39 
internal/tui/components/core/status_test.go                                       |   3 
internal/tui/components/core/testdata/TestStatus/AllFieldsWithExtraContent.golden |   2 
internal/tui/components/core/testdata/TestStatus/Default.golden                   |   2 
internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden          |   2 
internal/tui/components/core/testdata/TestStatus/LongDescription.golden           |   2 
internal/tui/components/core/testdata/TestStatus/NarrowWidth.golden               |   2 
internal/tui/components/core/testdata/TestStatus/VeryNarrowWidth.golden           |   2 
internal/tui/components/core/testdata/TestStatus/WithColors.golden                |   2 
internal/tui/components/core/testdata/TestStatus/WithCustomIcon.golden            |   2 
internal/tui/components/core/testdata/TestStatus/WithExtraContent.golden          |   2 
internal/tui/components/core/testdata/TestStatusTruncation/Width20.golden         |   2 
internal/tui/components/core/testdata/TestStatusTruncation/Width30.golden         |   2 
internal/tui/components/core/testdata/TestStatusTruncation/Width40.golden         |   2 
internal/tui/components/core/testdata/TestStatusTruncation/Width50.golden         |   2 
internal/tui/components/core/testdata/TestStatusTruncation/Width60.golden         |   2 
internal/tui/components/dialogs/commands/commands.go                              |   6 
internal/tui/components/dialogs/models/list.go                                    |  15 
internal/tui/components/dialogs/models/models.go                                  |  19 
internal/tui/components/files/files.go                                            |   5 
internal/tui/components/lsp/lsp.go                                                |  11 
internal/tui/components/mcp/mcp.go                                                |  12 
internal/tui/exp/list/list.go                                                     |  11 
internal/tui/styles/crush.go                                                      |  11 
internal/tui/styles/theme.go                                                      |   6 
36 files changed, 416 insertions(+), 232 deletions(-)

Detailed changes

.github/cla-signatures.json πŸ”—

@@ -223,6 +223,30 @@
       "created_at": "2025-08-04T14:01:30Z",
       "repoId": 987670088,
       "pullRequestNo": 546
+    },
+    {
+      "name": "zloeber",
+      "id": 4702624,
+      "comment_id": 3152513500,
+      "created_at": "2025-08-04T21:55:42Z",
+      "repoId": 987670088,
+      "pullRequestNo": 564
+    },
+    {
+      "name": "nelsenm2",
+      "id": 197524521,
+      "comment_id": 3152872109,
+      "created_at": "2025-08-05T00:24:50Z",
+      "repoId": 987670088,
+      "pullRequestNo": 569
+    },
+    {
+      "name": "mohseenrm",
+      "id": 10768371,
+      "comment_id": 3153159347,
+      "created_at": "2025-08-05T03:39:12Z",
+      "repoId": 987670088,
+      "pullRequestNo": 574
     }
   ]
 }

CRUSH.md πŸ”—

@@ -4,6 +4,8 @@
 
 - **Build**: `go build .` or `go run .`
 - **Test**: `task test` or `go test ./...` (run single test: `go test ./internal/llm/prompt -run TestGetContextFromPaths`)
+- **Update Golden Files**: `go test ./... -update` (regenerates .golden files when test output changes)
+  - Update specific package: `go test ./internal/tui/components/core -update` (in this case, we're updating "core")
 - **Lint**: `task lint-fix`
 - **Format**: `task fmt` (gofumpt -w .)
 - **Dev**: `task dev` (runs with profiling enabled)

README.md πŸ”—

@@ -39,7 +39,7 @@ nix run github:numtide/nix-ai-tools#crush
 
 <details>
 <summary><strong>Nix (NUR)</strong></summary>
-    
+
 Crush is available via [NUR](https://github.com/nix-community/NUR) in `nur.repos.charmbracelet.crush`.
 
 You can also try out Crush via `nix-shell`:
@@ -138,19 +138,29 @@ Crush runs great with no configuration. That said, if you do need or want to
 customize Crush, configuration can be added either local to the project itself,
 or globally, with the following priority:
 
-1. `./.crush.json`
-2. `./crush.json`
-3. `$HOME/.config/crush/crush.json`
+1. `.crush.json`
+2. `crush.json`
+3. `$HOME/.config/crush/crush.json` (Windows: `%USERPROFILE%\AppData\Local\crush\crush.json`)
 
 Configuration itself is stored as a JSON object:
 
 ```json
 {
-   "this-setting": { }
-   "that-setting": { }
+   "this-setting": {"this": "that"},
+   "that-setting": ["ceci", "cela"]
 }
 ```
 
+As an additional note, Crush also stores ephemeral data, such as application state, in one additional location:
+
+```bash
+# Unix
+$HOME/.local/shared/crush/crush.json
+
+# Windows
+%LOCALAPPDATA%\crush\crush.json
+```
+
 ### LSPs
 
 Crush can use LSPs for additional context to help inform its decisions, just
@@ -221,10 +231,10 @@ control but don't want Crush to consider when providing context.
 The `.crushignore` file uses the same syntax as `.gitignore` and can be placed
 in the root of your project or in subdirectories.
 
-### Whitelisting Tools
+### Allowing Tools
 
 By default, Crush will ask you for permission before running tool calls. If
-you'd like, you can whitelist tools to be executed without prompting you for
+you'd like, you can allow tools to be executed without prompting you for
 permissions. Use this with care.
 
 ```json
@@ -245,6 +255,53 @@ permissions. Use this with care.
 You can also skip all permission prompts entirely by running Crush with the
 `--yolo` flag. Be very, very careful with this feature.
 
+### Local Models
+
+Local models can also be configured via OpenAI-compatible API. Here are two common examples:
+
+#### Ollama
+
+```json
+{
+  "providers": {
+    "ollama": {
+      "name": "Ollama",
+      "base_url": "http://localhost:11434/v1/",
+      "type": "openai",
+      "models": [
+        {
+          "name": "Qwen 3 30B",
+          "id": "qwen3:30b",
+          "context_window": 256000,
+          "default_max_tokens": 20000
+        }
+      ]
+    }
+}
+```
+
+#### LM Studio
+
+```json
+{
+  "providers": {
+    "lmstudio": {
+      "name": "LM Studio",
+      "base_url": "http://localhost:1234/v1/",
+      "type": "openai",
+      "models": [
+        {
+          "name": "Qwen 3 30B",
+          "id": "qwen/qwen3-30b-a3b-2507",
+          "context_window": 256000,
+          "default_max_tokens": 20000
+        }
+      ]
+    }
+  }
+}
+```
+
 ### Custom Providers
 
 Crush supports custom provider configurations for both OpenAI-compatible and
@@ -314,6 +371,48 @@ Custom Anthropic-compatible providers follow this format:
 }
 ```
 
+### Amazon Bedrock
+
+Crush currently supports running Anthropic models through Bedrock, with caching disabled.
+
+* A Bedrock provider will appear once you have AWS configured, i.e. `aws configure`
+* Crush also expects the `AWS_REGION` or `AWS_DEFAULT_REGION` to be set
+* To use a specific AWS profile set `AWS_PROFILE` in your environment, i.e. `AWS_PROFILE=myprofile crush`
+
+### Vertex AI Platform
+
+Vertex AI will appear in the list of available providers when `VERTEXAI_PROJECT` and `VERTEXAI_LOCATION` are set. You will also need to be authenticated:
+
+```bash
+gcloud auth application-default login
+```
+
+To add specific models to the configuration, configure as such:
+
+```json
+{
+  "$schema": "https://charm.land/crush.json",
+  "providers": {
+    "vertexai": {
+      "models": [
+        {
+          "id": "claude-sonnet-4@20250514",
+          "name": "VertexAI Sonnet 4",
+          "cost_per_1m_in": 3,
+          "cost_per_1m_out": 15,
+          "cost_per_1m_in_cached": 3.75,
+          "cost_per_1m_out_cached": 0.3,
+          "context_window": 200000,
+          "default_max_tokens": 50000,
+          "can_reason": true,
+          "supports_attachments": true
+        }
+      ]
+    }
+  }
+}
+```
+
 ## Logging
 
 Sometimes you need to look at logs. Luckily, Crush logs all sorts of

go.mod πŸ”—

@@ -14,7 +14,7 @@ require (
 	github.com/charlievieth/fastwalk v1.0.11
 	github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5
 	github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d
-	github.com/charmbracelet/catwalk v0.3.5
+	github.com/charmbracelet/catwalk v0.4.5
 	github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674
 	github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe
 	github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
@@ -27,7 +27,7 @@ require (
 	github.com/google/uuid v1.6.0
 	github.com/invopop/jsonschema v0.13.0
 	github.com/joho/godotenv v1.5.1
-	github.com/mark3labs/mcp-go v0.36.0
+	github.com/mark3labs/mcp-go v0.37.0
 	github.com/muesli/termenv v0.16.0
 	github.com/ncruces/go-sqlite3 v0.25.0
 	github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
@@ -135,7 +135,7 @@ require (
 	golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
 	golang.org/x/image v0.26.0 // indirect
 	golang.org/x/net v0.40.0 // indirect
-	golang.org/x/oauth2 v0.25.0 // indirect
+	golang.org/x/oauth2 v0.30.0 // indirect
 	golang.org/x/sync v0.16.0 // indirect
 	golang.org/x/sys v0.34.0
 	golang.org/x/term v0.32.0 // indirect

go.sum πŸ”—

@@ -78,8 +78,8 @@ github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5
 github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
 github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d h1:YMXLZHSo8DjytVY/b5dK8LDuyQsVUmBK3ydQMpu2Ui4=
 github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d/go.mod h1:XIQ1qQfRph6Z5o2EikCydjumo0oDInQySRHuPATzbZc=
-github.com/charmbracelet/catwalk v0.3.5 h1:ChMvA5ooTNZhDKFagmGNQgIZvZp8XjpdaJ+cDmhgCgA=
-github.com/charmbracelet/catwalk v0.3.5/go.mod h1:gUUCqqZ8bk4D7ZzGTu3I77k7cC2x4exRuJBN1H2u2pc=
+github.com/charmbracelet/catwalk v0.4.5 h1:Kv3PadDe8IF8gpcYTfAJdCee5Bv4HufvtNT61FXtq5g=
+github.com/charmbracelet/catwalk v0.4.5/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
 github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40=
 github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0=
 github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0=
@@ -180,8 +180,8 @@ github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69
 github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
 github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
 github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/mark3labs/mcp-go v0.36.0 h1:rIZaijrRYPeSbJG8/qNDe0hWlGrCJ7FWHNMz2SQpTis=
-github.com/mark3labs/mcp-go v0.36.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
+github.com/mark3labs/mcp-go v0.37.0 h1:BywvZLPRT6Zx6mMG/MJfxLSZQkTGIcJSEGKsvr4DsoQ=
+github.com/mark3labs/mcp-go v0.37.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
 github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
 github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
 github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
@@ -332,8 +332,8 @@ golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
 golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
 golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
 golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
-golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
-golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
+golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=

internal/llm/agent/agent.go πŸ”—

@@ -159,11 +159,12 @@ func NewAgent(
 	if err != nil {
 		return nil, err
 	}
+
 	summarizeOpts := []provider.ProviderClientOption{
-		provider.WithModel(config.SelectedModelTypeSmall),
-		provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, smallModelProviderCfg.ID)),
+		provider.WithModel(config.SelectedModelTypeLarge),
+		provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, providerCfg.ID)),
 	}
-	summarizeProvider, err := provider.NewProvider(*smallModelProviderCfg, summarizeOpts...)
+	summarizeProvider, err := provider.NewProvider(*providerCfg, summarizeOpts...)
 	if err != nil {
 		return nil, err
 	}
@@ -224,7 +225,7 @@ func NewAgent(
 		sessions:            sessions,
 		titleProvider:       titleProvider,
 		summarizeProvider:   summarizeProvider,
-		summarizeProviderID: string(smallModelProviderCfg.ID),
+		summarizeProviderID: string(providerCfg.ID),
 		activeRequests:      csync.NewMap[string, context.CancelFunc](),
 		tools:               csync.NewLazySlice(toolFn),
 	}, nil
@@ -904,54 +905,59 @@ func (a *agent) UpdateModel() error {
 		a.providerID = string(currentProviderCfg.ID)
 	}
 
-	// Check if small model provider has changed (affects title and summarize providers)
+	// Check if providers have changed for title (small) and summarize (large)
 	smallModelCfg := cfg.Models[config.SelectedModelTypeSmall]
 	var smallModelProviderCfg config.ProviderConfig
-
 	for p := range cfg.Providers.Seq() {
 		if p.ID == smallModelCfg.Provider {
 			smallModelProviderCfg = p
 			break
 		}
 	}
-
 	if smallModelProviderCfg.ID == "" {
 		return fmt.Errorf("provider %s not found in config", smallModelCfg.Provider)
 	}
 
-	// Check if summarize provider has changed
-	if string(smallModelProviderCfg.ID) != a.summarizeProviderID {
-		smallModel := cfg.GetModelByType(config.SelectedModelTypeSmall)
-		if smallModel == nil {
-			return fmt.Errorf("model %s not found in provider %s", smallModelCfg.Model, smallModelProviderCfg.ID)
+	largeModelCfg := cfg.Models[config.SelectedModelTypeLarge]
+	var largeModelProviderCfg config.ProviderConfig
+	for p := range cfg.Providers.Seq() {
+		if p.ID == largeModelCfg.Provider {
+			largeModelProviderCfg = p
+			break
 		}
+	}
+	if largeModelProviderCfg.ID == "" {
+		return fmt.Errorf("provider %s not found in config", largeModelCfg.Provider)
+	}
 
-		// Recreate title provider
-		titleOpts := []provider.ProviderClientOption{
-			provider.WithModel(config.SelectedModelTypeSmall),
-			provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptTitle, smallModelProviderCfg.ID)),
-			// We want the title to be short, so we limit the max tokens
-			provider.WithMaxTokens(40),
-		}
-		newTitleProvider, err := provider.NewProvider(smallModelProviderCfg, titleOpts...)
-		if err != nil {
-			return fmt.Errorf("failed to create new title provider: %w", err)
-		}
+	// Recreate title provider
+	titleOpts := []provider.ProviderClientOption{
+		provider.WithModel(config.SelectedModelTypeSmall),
+		provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptTitle, smallModelProviderCfg.ID)),
+		provider.WithMaxTokens(40),
+	}
+	newTitleProvider, err := provider.NewProvider(smallModelProviderCfg, titleOpts...)
+	if err != nil {
+		return fmt.Errorf("failed to create new title provider: %w", err)
+	}
+	a.titleProvider = newTitleProvider
 
-		// Recreate summarize provider
+	// Recreate summarize provider if provider changed (now large model)
+	if string(largeModelProviderCfg.ID) != a.summarizeProviderID {
+		largeModel := cfg.GetModelByType(config.SelectedModelTypeLarge)
+		if largeModel == nil {
+			return fmt.Errorf("model %s not found in provider %s", largeModelCfg.Model, largeModelProviderCfg.ID)
+		}
 		summarizeOpts := []provider.ProviderClientOption{
-			provider.WithModel(config.SelectedModelTypeSmall),
-			provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, smallModelProviderCfg.ID)),
+			provider.WithModel(config.SelectedModelTypeLarge),
+			provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, largeModelProviderCfg.ID)),
 		}
-		newSummarizeProvider, err := provider.NewProvider(smallModelProviderCfg, summarizeOpts...)
+		newSummarizeProvider, err := provider.NewProvider(largeModelProviderCfg, summarizeOpts...)
 		if err != nil {
 			return fmt.Errorf("failed to create new summarize provider: %w", err)
 		}
-
-		// Update the providers and provider ID
-		a.titleProvider = newTitleProvider
 		a.summarizeProvider = newSummarizeProvider
-		a.summarizeProviderID = string(smallModelProviderCfg.ID)
+		a.summarizeProviderID = string(largeModelProviderCfg.ID)
 	}
 
 	return nil

internal/llm/agent/mcp-tools.go πŸ”—

@@ -308,29 +308,31 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
 func createMcpClient(m config.MCPConfig) (*client.Client, error) {
 	switch m.Type {
 	case config.MCPStdio:
-		return client.NewStdioMCPClient(
+		return client.NewStdioMCPClientWithOptions(
 			m.Command,
 			m.ResolvedEnv(),
-			m.Args...,
+			m.Args,
+			transport.WithCommandLogger(mcpLogger{}),
 		)
 	case config.MCPHttp:
 		return client.NewStreamableHttpClient(
 			m.URL,
 			transport.WithHTTPHeaders(m.ResolvedHeaders()),
-			transport.WithLogger(mcpHTTPLogger{}),
+			transport.WithHTTPLogger(mcpLogger{}),
 		)
 	case config.MCPSse:
 		return client.NewSSEMCPClient(
 			m.URL,
 			client.WithHeaders(m.ResolvedHeaders()),
+			transport.WithSSELogger(mcpLogger{}),
 		)
 	default:
 		return nil, fmt.Errorf("unsupported mcp type: %s", m.Type)
 	}
 }
 
-// for MCP's HTTP client.
-type mcpHTTPLogger struct{}
+// for MCP's clients.
+type mcpLogger struct{}
 
-func (l mcpHTTPLogger) Errorf(format string, v ...any) { slog.Error(fmt.Sprintf(format, v...)) }
-func (l mcpHTTPLogger) Infof(format string, v ...any)  { slog.Info(fmt.Sprintf(format, v...)) }
+func (l mcpLogger) Errorf(format string, v ...any) { slog.Error(fmt.Sprintf(format, v...)) }
+func (l mcpLogger) Infof(format string, v ...any)  { slog.Info(fmt.Sprintf(format, v...)) }

internal/llm/provider/openai.go πŸ”—

@@ -2,10 +2,12 @@ package provider
 
 import (
 	"context"
+	"encoding/json"
 	"errors"
 	"fmt"
 	"io"
 	"log/slog"
+	"slices"
 	"strings"
 	"time"
 
@@ -14,6 +16,7 @@ import (
 	"github.com/charmbracelet/crush/internal/llm/tools"
 	"github.com/charmbracelet/crush/internal/log"
 	"github.com/charmbracelet/crush/internal/message"
+	"github.com/google/uuid"
 	"github.com/openai/openai-go"
 	"github.com/openai/openai-go/option"
 	"github.com/openai/openai-go/packages/param"
@@ -70,8 +73,9 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
 		systemMessage = o.providerOptions.systemPromptPrefix + "\n" + systemMessage
 	}
 
-	systemTextBlock := openai.ChatCompletionContentPartTextParam{Text: systemMessage}
+	system := openai.SystemMessage(systemMessage)
 	if isAnthropicModel && !o.providerOptions.disableCache {
+		systemTextBlock := openai.ChatCompletionContentPartTextParam{Text: systemMessage}
 		systemTextBlock.SetExtraFields(
 			map[string]any{
 				"cache_control": map[string]string{
@@ -79,10 +83,10 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
 				},
 			},
 		)
+		var content []openai.ChatCompletionContentPartTextParam
+		content = append(content, systemTextBlock)
+		system = openai.SystemMessage(content)
 	}
-	var content []openai.ChatCompletionContentPartTextParam
-	content = append(content, systemTextBlock)
-	system := openai.SystemMessage(content)
 	openaiMessages = append(openaiMessages, system)
 
 	for i, msg := range messages {
@@ -93,9 +97,12 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
 		switch msg.Role {
 		case message.User:
 			var content []openai.ChatCompletionContentPartUnionParam
+
 			textBlock := openai.ChatCompletionContentPartTextParam{Text: msg.Content().String()}
 			content = append(content, openai.ChatCompletionContentPartUnionParam{OfText: &textBlock})
+			hasBinaryContent := false
 			for _, binaryContent := range msg.BinaryContent() {
+				hasBinaryContent = true
 				imageURL := openai.ChatCompletionContentPartImageImageURLParam{URL: binaryContent.String(catwalk.InferenceProviderOpenAI)}
 				imageBlock := openai.ChatCompletionContentPartImageParam{ImageURL: imageURL}
 
@@ -108,8 +115,11 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
 					},
 				})
 			}
-
-			openaiMessages = append(openaiMessages, openai.UserMessage(content))
+			if hasBinaryContent || (isAnthropicModel && !o.providerOptions.disableCache) {
+				openaiMessages = append(openaiMessages, openai.UserMessage(content))
+			} else {
+				openaiMessages = append(openaiMessages, openai.UserMessage(msg.Content().String()))
+			}
 
 		case message.Assistant:
 			assistantMsg := openai.ChatCompletionAssistantMessageParam{
@@ -134,13 +144,15 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
 						},
 					},
 				}
+				if !isAnthropicModel {
+					assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
+						OfString: param.NewOpt(msg.Content().String()),
+					}
+				}
 			}
 
 			if len(msg.ToolCalls()) > 0 {
 				hasContent = true
-				assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
-					OfString: param.NewOpt(msg.Content().String()),
-				}
 				assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(msg.ToolCalls()))
 				for i, call := range msg.ToolCalls() {
 					assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
@@ -329,21 +341,26 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
 			acc := openai.ChatCompletionAccumulator{}
 			currentContent := ""
 			toolCalls := make([]message.ToolCall, 0)
-
-			var currentToolCallID string
-			var currentToolCall openai.ChatCompletionMessageToolCall
 			var msgToolCalls []openai.ChatCompletionMessageToolCall
-			currentToolIndex := 0
 			for openaiStream.Next() {
 				chunk := openaiStream.Current()
 				// Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index
 				if len(chunk.Choices) > 0 && len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 {
-					chunk.Choices[0].Delta.ToolCalls[0].Index = int64(currentToolIndex)
-					currentToolIndex++
+					chunk.Choices[0].Delta.ToolCalls[0].Index = 0
 				}
 				acc.AddChunk(chunk)
-				// This fixes multiple tool calls for some providers
-				for _, choice := range chunk.Choices {
+				for i, choice := range chunk.Choices {
+					reasoning, ok := choice.Delta.JSON.ExtraFields["reasoning"]
+					if ok && reasoning.Raw() != "" {
+						reasoningStr := ""
+						json.Unmarshal([]byte(reasoning.Raw()), &reasoningStr)
+						if reasoningStr != "" {
+							eventChan <- ProviderEvent{
+								Type:     EventThinkingDelta,
+								Thinking: reasoningStr,
+							}
+						}
+					}
 					if choice.Delta.Content != "" {
 						eventChan <- ProviderEvent{
 							Type:    EventContentDelta,
@@ -352,63 +369,50 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
 						currentContent += choice.Delta.Content
 					} else if len(choice.Delta.ToolCalls) > 0 {
 						toolCall := choice.Delta.ToolCalls[0]
-						// Detect tool use start
-						if currentToolCallID == "" {
-							if toolCall.ID != "" {
-								currentToolCallID = toolCall.ID
-								eventChan <- ProviderEvent{
-									Type: EventToolUseStart,
-									ToolCall: &message.ToolCall{
-										ID:       toolCall.ID,
-										Name:     toolCall.Function.Name,
-										Finished: false,
-									},
+						newToolCall := false
+						if len(msgToolCalls)-1 >= int(toolCall.Index) { // tool call exists
+							existingToolCall := msgToolCalls[toolCall.Index]
+							if toolCall.ID != "" && toolCall.ID != existingToolCall.ID {
+								found := false
+								// try to find the tool based on the ID
+								for i, tool := range msgToolCalls {
+									if tool.ID == toolCall.ID {
+										msgToolCalls[i].Function.Arguments += toolCall.Function.Arguments
+										found = true
+									}
 								}
-								currentToolCall = openai.ChatCompletionMessageToolCall{
-									ID:   toolCall.ID,
-									Type: "function",
-									Function: openai.ChatCompletionMessageToolCallFunction{
-										Name:      toolCall.Function.Name,
-										Arguments: toolCall.Function.Arguments,
-									},
+								if !found {
+									newToolCall = true
 								}
-							}
-						} else {
-							// Delta tool use
-							if toolCall.ID == "" || toolCall.ID == currentToolCallID {
-								currentToolCall.Function.Arguments += toolCall.Function.Arguments
 							} else {
-								// Detect new tool use
-								if toolCall.ID != currentToolCallID {
-									msgToolCalls = append(msgToolCalls, currentToolCall)
-									currentToolCallID = toolCall.ID
-									eventChan <- ProviderEvent{
-										Type: EventToolUseStart,
-										ToolCall: &message.ToolCall{
-											ID:       toolCall.ID,
-											Name:     toolCall.Function.Name,
-											Finished: false,
-										},
-									}
-									currentToolCall = openai.ChatCompletionMessageToolCall{
-										ID:   toolCall.ID,
-										Type: "function",
-										Function: openai.ChatCompletionMessageToolCallFunction{
-											Name:      toolCall.Function.Name,
-											Arguments: toolCall.Function.Arguments,
-										},
-									}
-								}
+								msgToolCalls[toolCall.Index].Function.Arguments += toolCall.Function.Arguments
 							}
+						} else {
+							newToolCall = true
 						}
-					}
-					// Kujtim: some models send finish stop even for tool calls
-					if choice.FinishReason == "tool_calls" || (choice.FinishReason == "stop" && currentToolCallID != "") {
-						msgToolCalls = append(msgToolCalls, currentToolCall)
-						if len(acc.Choices) > 0 {
-							acc.Choices[0].Message.ToolCalls = msgToolCalls
+						if newToolCall { // new tool call
+							if toolCall.ID == "" {
+								toolCall.ID = uuid.NewString()
+							}
+							eventChan <- ProviderEvent{
+								Type: EventToolUseStart,
+								ToolCall: &message.ToolCall{
+									ID:       toolCall.ID,
+									Name:     toolCall.Function.Name,
+									Finished: false,
+								},
+							}
+							msgToolCalls = append(msgToolCalls, openai.ChatCompletionMessageToolCall{
+								ID:   toolCall.ID,
+								Type: "function",
+								Function: openai.ChatCompletionMessageToolCallFunction{
+									Name:      toolCall.Function.Name,
+									Arguments: toolCall.Function.Arguments,
+								},
+							})
 						}
 					}
+					acc.Choices[i].Message.ToolCalls = slices.Clone(msgToolCalls)
 				}
 			}
 

internal/llm/tools/tools.go πŸ”—

@@ -3,6 +3,8 @@ package tools
 import (
 	"context"
 	"encoding/json"
+	"fmt"
+	"strings"
 )
 
 type ToolInfo struct {
@@ -25,6 +27,10 @@ const (
 
 	SessionIDContextKey sessionIDContextKey = "session_id"
 	MessageIDContextKey messageIDContextKey = "message_id"
+
+	maxResponseWidth  = 3000
+	maxResponseHeight = 5000
+	maxResponseChars  = 50000
 )
 
 type ToolResponse struct {
@@ -37,10 +43,77 @@ type ToolResponse struct {
 func NewTextResponse(content string) ToolResponse {
 	return ToolResponse{
 		Type:    ToolResponseTypeText,
-		Content: content,
+		Content: truncateContent(content),
 	}
 }
 
+func truncateContent(content string) string {
+	if len(content) <= maxResponseChars {
+		return truncateWidthAndHeight(content)
+	}
+
+	truncated := content[:maxResponseChars]
+
+	if lastNewline := strings.LastIndex(truncated, "\n"); lastNewline > maxResponseChars/2 {
+		truncated = truncated[:lastNewline]
+	}
+
+	truncated += "\n\n... [Content truncated due to length] ..."
+
+	return truncateWidthAndHeight(truncated)
+}
+
+func truncateWidthAndHeight(content string) string {
+	lines := strings.Split(content, "\n")
+
+	heightTruncated := false
+	if len(lines) > maxResponseHeight {
+		keepLines := maxResponseHeight - 3
+		firstHalf := keepLines / 2
+		secondHalf := keepLines - firstHalf
+
+		truncatedLines := make([]string, 0, maxResponseHeight)
+		truncatedLines = append(truncatedLines, lines[:firstHalf]...)
+		truncatedLines = append(truncatedLines, "")
+		truncatedLines = append(truncatedLines, fmt.Sprintf("... [%d lines truncated] ...", len(lines)-keepLines))
+		truncatedLines = append(truncatedLines, "")
+		truncatedLines = append(truncatedLines, lines[len(lines)-secondHalf:]...)
+
+		lines = truncatedLines
+		heightTruncated = true
+	}
+
+	widthTruncated := false
+	for i, line := range lines {
+		if len(line) > maxResponseWidth {
+			if maxResponseWidth > 20 {
+				keepChars := maxResponseWidth - 10
+				firstHalf := keepChars / 2
+				secondHalf := keepChars - firstHalf
+				lines[i] = line[:firstHalf] + " ... " + line[len(line)-secondHalf:]
+			} else {
+				lines[i] = line[:maxResponseWidth]
+			}
+			widthTruncated = true
+		}
+	}
+
+	result := strings.Join(lines, "\n")
+
+	if heightTruncated || widthTruncated {
+		notices := make([]string, 0, 2)
+		if heightTruncated {
+			notices = append(notices, "height")
+		}
+		if widthTruncated {
+			notices = append(notices, "width")
+		}
+		result += fmt.Sprintf("\n\n[Note: Content truncated by %s to fit response limits]", strings.Join(notices, " and "))
+	}
+
+	return result
+}
+
 func WithResponseMetadata(response ToolResponse, metadata any) ToolResponse {
 	if metadata != nil {
 		metadataBytes, err := json.Marshal(metadata)

internal/tui/components/chat/messages/messages.go πŸ”—

@@ -274,11 +274,13 @@ func (m *messageCmp) renderThinkingContent() string {
 	if reasoningContent.StartedAt > 0 {
 		duration := m.message.ThinkingDuration()
 		if reasoningContent.FinishedAt > 0 {
+			if duration.String() == "0s" {
+				return ""
+			}
 			m.anim.SetLabel("")
 			opts := core.StatusOpts{
 				Title:       "Thought for",
 				Description: duration.String(),
-				NoIcon:      true,
 			}
 			return t.S().Base.PaddingLeft(1).Render(core.Status(opts, m.textWidth()-1))
 		} else if finishReason != nil && finishReason.Reason == message.FinishReasonCanceled {

internal/tui/components/chat/splash/splash.go πŸ”—

@@ -3,7 +3,6 @@ package splash
 import (
 	"fmt"
 	"os"
-	"slices"
 	"strings"
 	"time"
 
@@ -103,27 +102,6 @@ func New() Splash {
 
 func (s *splashCmp) SetOnboarding(onboarding bool) {
 	s.isOnboarding = onboarding
-	if onboarding {
-		providers, err := config.Providers()
-		if err != nil {
-			return
-		}
-		filteredProviders := []catwalk.Provider{}
-		simpleProviders := []string{
-			"anthropic",
-			"openai",
-			"gemini",
-			"xai",
-			"groq",
-			"openrouter",
-		}
-		for _, p := range providers {
-			if slices.Contains(simpleProviders, string(p.ID)) {
-				filteredProviders = append(filteredProviders, p)
-			}
-		}
-		s.modelList.SetProviders(filteredProviders)
-	}
 }
 
 func (s *splashCmp) SetProjectInit(needsInit bool) {

internal/tui/components/core/core.go πŸ”—

@@ -82,41 +82,30 @@ func Title(title string, width int) string {
 }
 
 type StatusOpts struct {
-	Icon             string
-	IconColor        color.Color
-	NoIcon           bool // If true, no icon will be displayed
+	Icon             string // if empty no icon will be shown
 	Title            string
 	TitleColor       color.Color
 	Description      string
 	DescriptionColor color.Color
-	ExtraContent     string // Additional content to append after the description
+	ExtraContent     string // additional content to append after the description
 }
 
-func Status(ops StatusOpts, width int) string {
+func Status(opts StatusOpts, width int) string {
 	t := styles.CurrentTheme()
-	icon := "●"
-	iconColor := t.Success
-	if ops.Icon != "" {
-		icon = ops.Icon
-	} else if ops.NoIcon {
-		icon = ""
-	}
-	if ops.IconColor != nil {
-		iconColor = ops.IconColor
-	}
-	title := ops.Title
+	icon := opts.Icon
+	title := opts.Title
 	titleColor := t.FgMuted
-	if ops.TitleColor != nil {
-		titleColor = ops.TitleColor
+	if opts.TitleColor != nil {
+		titleColor = opts.TitleColor
 	}
-	description := ops.Description
+	description := opts.Description
 	descriptionColor := t.FgSubtle
-	if ops.DescriptionColor != nil {
-		descriptionColor = ops.DescriptionColor
+	if opts.DescriptionColor != nil {
+		descriptionColor = opts.DescriptionColor
 	}
 	title = t.S().Base.Foreground(titleColor).Render(title)
 	if description != "" {
-		extraContentWidth := lipgloss.Width(ops.ExtraContent)
+		extraContentWidth := lipgloss.Width(opts.ExtraContent)
 		if extraContentWidth > 0 {
 			extraContentWidth += 1
 		}
@@ -126,11 +115,11 @@ func Status(ops StatusOpts, width int) string {
 
 	content := []string{}
 	if icon != "" {
-		content = append(content, t.S().Base.Foreground(iconColor).Render(icon))
+		content = append(content, icon)
 	}
 	content = append(content, title, description)
-	if ops.ExtraContent != "" {
-		content = append(content, ops.ExtraContent)
+	if opts.ExtraContent != "" {
+		content = append(content, opts.ExtraContent)
 	}
 
 	return strings.Join(content, " ")

internal/tui/components/core/status_test.go πŸ”—

@@ -37,7 +37,6 @@ func TestStatus(t *testing.T) {
 		{
 			name: "NoIcon",
 			opts: core.StatusOpts{
-				NoIcon:      true,
 				Title:       "Info",
 				Description: "This status has no icon",
 			},
@@ -47,7 +46,6 @@ func TestStatus(t *testing.T) {
 			name: "WithColors",
 			opts: core.StatusOpts{
 				Icon:             "⚠",
-				IconColor:        color.RGBA{255, 165, 0, 255}, // Orange
 				Title:            "Warning",
 				TitleColor:       color.RGBA{255, 255, 0, 255}, // Yellow
 				Description:      "This is a warning message",
@@ -102,7 +100,6 @@ func TestStatus(t *testing.T) {
 			name: "AllFieldsWithExtraContent",
 			opts: core.StatusOpts{
 				Icon:             "πŸš€",
-				IconColor:        color.RGBA{0, 255, 0, 255}, // Green
 				Title:            "Deployment",
 				TitleColor:       color.RGBA{0, 0, 255, 255}, // Blue
 				Description:      "Deploying to production environment",

internal/tui/components/dialogs/commands/commands.go πŸ”—

@@ -119,7 +119,10 @@ func (c *commandDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 	case tea.WindowSizeMsg:
 		c.wWidth = msg.Width
 		c.wHeight = msg.Height
-		return c, c.commandList.SetSize(c.listWidth(), c.listHeight())
+		return c, tea.Batch(
+			c.SetCommandType(c.commandType),
+			c.commandList.SetSize(c.listWidth(), c.listHeight()),
+		)
 	case tea.KeyPressMsg:
 		switch {
 		case key.Matches(msg, c.keyMap.Select):
@@ -318,7 +321,6 @@ func (c *commandDialogCmp) defaultCommands() []Command {
 			})
 		}
 	}
-
 	// Only show toggle compact mode command if window width is larger than compact breakpoint (90)
 	if c.wWidth > 120 && c.sessionID != "" {
 		commands = append(commands, Command{

internal/tui/components/dialogs/models/list.go πŸ”—

@@ -3,6 +3,7 @@ package models
 import (
 	"fmt"
 	"slices"
+	"strings"
 
 	tea "github.com/charmbracelet/bubbletea/v2"
 	"github.com/charmbracelet/catwalk/pkg/catwalk"
@@ -49,7 +50,15 @@ func (m *ModelListComponent) Init() tea.Cmd {
 	var cmds []tea.Cmd
 	if len(m.providers) == 0 {
 		providers, err := config.Providers()
-		m.providers = providers
+		filteredProviders := []catwalk.Provider{}
+		for _, p := range providers {
+			hasAPIKeyEnv := strings.HasPrefix(p.APIKey, "$")
+			if hasAPIKeyEnv && p.ID != catwalk.InferenceProviderAzure {
+				filteredProviders = append(filteredProviders, p)
+			}
+		}
+
+		m.providers = filteredProviders
 		if err != nil {
 			cmds = append(cmds, util.ReportError(err))
 		}
@@ -242,7 +251,3 @@ func (m *ModelListComponent) GetModelType() int {
 func (m *ModelListComponent) SetInputPlaceholder(placeholder string) {
 	m.list.SetInputPlaceholder(placeholder)
 }
-
-func (m *ModelListComponent) SetProviders(providers []catwalk.Provider) {
-	m.providers = providers
-}

internal/tui/components/dialogs/models/models.go πŸ”—

@@ -2,7 +2,6 @@ package models
 
 import (
 	"fmt"
-	"slices"
 	"time"
 
 	"github.com/charmbracelet/bubbles/v2/help"
@@ -96,24 +95,6 @@ func NewModelDialogCmp() ModelDialog {
 }
 
 func (m *modelDialogCmp) Init() tea.Cmd {
-	providers, err := config.Providers()
-	if err == nil {
-		filteredProviders := []catwalk.Provider{}
-		simpleProviders := []string{
-			"anthropic",
-			"openai",
-			"gemini",
-			"xai",
-			"groq",
-			"openrouter",
-		}
-		for _, p := range providers {
-			if slices.Contains(simpleProviders, string(p.ID)) {
-				filteredProviders = append(filteredProviders, p)
-			}
-		}
-		m.modelList.SetProviders(filteredProviders)
-	}
 	return tea.Batch(m.modelList.Init(), m.apiKeyInput.Init())
 }
 

internal/tui/components/files/files.go πŸ”—

@@ -59,6 +59,9 @@ func RenderFileList(fileSlice []SessionFile, opts RenderOptions) []string {
 
 	// Sort files by the latest version's created time
 	sort.Slice(fileSlice, func(i, j int) bool {
+		if fileSlice[i].History.LatestVersion.CreatedAt == fileSlice[j].History.LatestVersion.CreatedAt {
+			return strings.Compare(fileSlice[i].FilePath, fileSlice[j].FilePath) < 0
+		}
 		return fileSlice[i].History.LatestVersion.CreatedAt > fileSlice[j].History.LatestVersion.CreatedAt
 	})
 
@@ -95,8 +98,6 @@ func RenderFileList(fileSlice []SessionFile, opts RenderOptions) []string {
 		fileList = append(fileList,
 			core.Status(
 				core.StatusOpts{
-					IconColor:    t.FgMuted,
-					NoIcon:       true,
 					Title:        filePath,
 					ExtraContent: extraContent,
 				},

internal/tui/components/lsp/lsp.go πŸ”—

@@ -57,22 +57,21 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
 		}
 
 		// Determine icon color and description based on state
-		iconColor := t.FgMuted
+		icon := t.ItemOfflineIcon
 		description := l.LSP.Command
 
 		if l.LSP.Disabled {
-			iconColor = t.FgMuted
 			description = t.S().Subtle.Render("disabled")
 		} else if state, exists := lspStates[l.Name]; exists {
 			switch state.State {
 			case lsp.StateStarting:
-				iconColor = t.Yellow
+				icon = t.ItemBusyIcon
 				description = t.S().Subtle.Render("starting...")
 			case lsp.StateReady:
-				iconColor = t.Success
+				icon = t.ItemOnlineIcon
 				description = l.LSP.Command
 			case lsp.StateError:
-				iconColor = t.Red
+				icon = t.ItemErrorIcon
 				if state.Error != nil {
 					description = t.S().Subtle.Render(fmt.Sprintf("error: %s", state.Error.Error()))
 				} else {
@@ -119,7 +118,7 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
 		lspList = append(lspList,
 			core.Status(
 				core.StatusOpts{
-					IconColor:    iconColor,
+					Icon:         icon.String(),
 					Title:        l.Name,
 					Description:  description,
 					ExtraContent: extraContent,

internal/tui/components/mcp/mcp.go πŸ”—

@@ -54,25 +54,24 @@ func RenderMCPList(opts RenderOptions) []string {
 		}
 
 		// Determine icon and color based on state
-		iconColor := t.FgMuted
+		icon := t.ItemOfflineIcon
 		description := l.MCP.Command
 		extraContent := ""
 
 		if state, exists := mcpStates[l.Name]; exists {
 			switch state.State {
 			case agent.MCPStateDisabled:
-				iconColor = t.FgMuted
 				description = t.S().Subtle.Render("disabled")
 			case agent.MCPStateStarting:
-				iconColor = t.Yellow
+				icon = t.ItemBusyIcon
 				description = t.S().Subtle.Render("starting...")
 			case agent.MCPStateConnected:
-				iconColor = t.Success
+				icon = t.ItemOnlineIcon
 				if state.ToolCount > 0 {
 					extraContent = t.S().Subtle.Render(fmt.Sprintf("(%d tools)", state.ToolCount))
 				}
 			case agent.MCPStateError:
-				iconColor = t.Red
+				icon = t.ItemErrorIcon
 				if state.Error != nil {
 					description = t.S().Subtle.Render(fmt.Sprintf("error: %s", state.Error.Error()))
 				} else {
@@ -80,14 +79,13 @@ func RenderMCPList(opts RenderOptions) []string {
 				}
 			}
 		} else if l.MCP.Disabled {
-			iconColor = t.FgMuted
 			description = t.S().Subtle.Render("disabled")
 		}
 
 		mcpList = append(mcpList,
 			core.Status(
 				core.StatusOpts{
-					IconColor:    iconColor,
+					Icon:         icon.String(),
 					Title:        l.Name,
 					Description:  description,
 					ExtraContent: extraContent,

internal/tui/exp/list/list.go πŸ”—

@@ -4,6 +4,7 @@ import (
 	"log/slog"
 	"slices"
 	"strings"
+	"sync"
 
 	"github.com/charmbracelet/bubbles/v2/key"
 	tea "github.com/charmbracelet/bubbletea/v2"
@@ -94,6 +95,7 @@ type list[T Item] struct {
 
 	renderedItems *csync.Map[string, renderedItem]
 
+	renderMu sync.Mutex
 	rendered string
 
 	movingByItem       bool
@@ -462,7 +464,9 @@ func (l *list[T]) render() tea.Cmd {
 	// we are not rendering the first time
 	if l.rendered != "" {
 		// rerender everything will mostly hit cache
+		l.renderMu.Lock()
 		l.rendered, _ = l.renderIterator(0, false, "")
+		l.renderMu.Unlock()
 		if l.direction == DirectionBackward {
 			l.recalculateItemPositions()
 		}
@@ -472,9 +476,10 @@ func (l *list[T]) render() tea.Cmd {
 		}
 		return focusChangeCmd
 	}
+	l.renderMu.Lock()
 	rendered, finishIndex := l.renderIterator(0, true, "")
 	l.rendered = rendered
-
+	l.renderMu.Unlock()
 	// recalculate for the initial items
 	if l.direction == DirectionBackward {
 		l.recalculateItemPositions()
@@ -482,7 +487,10 @@ func (l *list[T]) render() tea.Cmd {
 	renderCmd := func() tea.Msg {
 		l.offset = 0
 		// render the rest
+
+		l.renderMu.Lock()
 		l.rendered, _ = l.renderIterator(finishIndex, false, l.rendered)
+		l.renderMu.Unlock()
 		// needed for backwards
 		if l.direction == DirectionBackward {
 			l.recalculateItemPositions()
@@ -491,7 +499,6 @@ func (l *list[T]) render() tea.Cmd {
 		if l.focused {
 			l.scrollToSelection()
 		}
-
 		return nil
 	}
 	return tea.Batch(focusChangeCmd, renderCmd)

internal/tui/styles/crush.go πŸ”—

@@ -1,11 +1,12 @@
 package styles
 
 import (
+	"github.com/charmbracelet/lipgloss/v2"
 	"github.com/charmbracelet/x/exp/charmtone"
 )
 
 func NewCrushTheme() *Theme {
-	return &Theme{
+	t := &Theme{
 		Name:   "crush",
 		IsDark: true,
 
@@ -54,4 +55,12 @@ func NewCrushTheme() *Theme {
 		RedLight: charmtone.Salmon,
 		Cherry:   charmtone.Cherry,
 	}
+
+	// LSP and MCP status.
+	t.ItemOfflineIcon = lipgloss.NewStyle().Foreground(charmtone.Squid).SetString("●")
+	t.ItemBusyIcon = t.ItemOfflineIcon.Foreground(charmtone.Citron)
+	t.ItemErrorIcon = t.ItemOfflineIcon.Foreground(charmtone.Coral)
+	t.ItemOnlineIcon = t.ItemOfflineIcon.Foreground(charmtone.Guac)
+
+	return t
 }

internal/tui/styles/theme.go πŸ”—

@@ -74,6 +74,12 @@ type Theme struct {
 	RedLight color.Color
 	Cherry   color.Color
 
+	// LSP and MCP status indicators.
+	ItemOfflineIcon lipgloss.Style
+	ItemBusyIcon    lipgloss.Style
+	ItemErrorIcon   lipgloss.Style
+	ItemOnlineIcon  lipgloss.Style
+
 	styles *Styles
 }