Merge remote-tracking branch 'origin/main' into crush-fantasy

kujtimiihoxha created

Change summary

.github/cla-signatures.json                           |  16 
.goreleaser.yml                                       |   1 
CRUSH.md                                              |   2 
README.md                                             |   7 
Taskfile.yaml                                         |  16 
go.mod                                                |   5 
go.sum                                                |   6 
internal/agent/coordinator.go                         |   8 
internal/agent/tools/diagnostics.go                   |   2 
internal/agent/tools/grep.go                          | 177 ++++-----
internal/agent/tools/grep_test.go                     | 221 +++++++++++++
internal/agent/tools/ls.go                            |   2 
internal/agent/tools/mcp-tools.go                     |  71 +++
internal/agent/tools/references.go                    | 190 +++++++++++
internal/agent/tools/references.md                    |  36 ++
internal/agent/tools/rg.go                            |   2 
internal/agent/tools/testdata/grep.txt                |   3 
internal/cmd/dirs_test.go                             |  46 ++
internal/config/config.go                             |  10 
internal/config/load.go                               |   9 
internal/config/provider.go                           |  34 -
internal/config/provider_test.go                      |   2 
internal/log/http.go                                  |  38 +-
internal/lsp/client.go                                |  10 
internal/tui/components/chat/editor/editor.go         |   2 
internal/tui/components/chat/messages/messages.go     |   5 
internal/tui/components/completions/completions.go    |   2 
internal/tui/components/dialogs/commands/arguments.go |   7 
internal/tui/components/dialogs/commands/keys.go      |   7 
internal/tui/components/dialogs/permissions/keys.go   |   2 
internal/tui/exp/list/filterable.go                   |  47 +-
internal/tui/exp/list/list.go                         |   5 
internal/tui/util/util.go                             |   7 
33 files changed, 787 insertions(+), 211 deletions(-)

Detailed changes

.github/cla-signatures.json ๐Ÿ”—

@@ -711,6 +711,22 @@
       "created_at": "2025-10-13T05:56:20Z",
       "repoId": 987670088,
       "pullRequestNo": 1223
+    },
+    {
+      "name": "BrunoKrugel",
+      "id": 30608179,
+      "comment_id": 3411978929,
+      "created_at": "2025-10-16T17:30:07Z",
+      "repoId": 987670088,
+      "pullRequestNo": 1245
+    },
+    {
+      "name": "dpolishuk",
+      "id": 466424,
+      "comment_id": 3418756045,
+      "created_at": "2025-10-18T19:24:00Z",
+      "repoId": 987670088,
+      "pullRequestNo": 1254
     }
   ]
 }

.goreleaser.yml ๐Ÿ”—

@@ -303,6 +303,7 @@ changelog:
       - "^docs: update$"
       - "^test:"
       - "^test\\("
+      - "^v\\d.*"
       - "merge conflict"
       - "merge conflict"
       - Merge branch

CRUSH.md ๐Ÿ”—

@@ -54,7 +54,7 @@ func TestYourFunction(t *testing.T) {
 ## Formatting
 
 - ALWAYS format any Go code you write.
-  - First, try `goftumpt -w .`.
+  - First, try `gofumpt -w .`.
   - If `gofumpt` is not available, use `goimports`.
   - If `goimports` is not available, use `gofmt`.
   - You can also use `task fmt` to run `gofumpt -w .` on the entire project,

README.md ๐Ÿ”—

@@ -189,8 +189,8 @@ That said, you can also set environment variables for preferred providers.
 | `AWS_ACCESS_KEY_ID`         | AWS Bedrock (Claude)                               |
 | `AWS_SECRET_ACCESS_KEY`     | AWS Bedrock (Claude)                               |
 | `AWS_REGION`                | AWS Bedrock (Claude)                               |
-| `AWS_PROFILE`               | Custom AWS Profile                                 |
-| `AWS_REGION`                | AWS Region                                         |
+| `AWS_PROFILE`               | AWS Bedrock (Custom Profile)                       |
+| `AWS_BEARER_TOKEN_BEDROCK`  | AWS Bedrock                                        |
 | `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI models                                |
 | `AZURE_OPENAI_API_KEY`      | Azure OpenAI models (optional when using Entra ID) |
 | `AZURE_OPENAI_API_VERSION`  | Azure OpenAI models                                |
@@ -479,6 +479,7 @@ Crush currently supports running Anthropic models through Bedrock, with caching
 - A Bedrock provider will appear once you have AWS configured, i.e. `aws configure`
 - Crush also expects the `AWS_REGION` or `AWS_DEFAULT_REGION` to be set
 - To use a specific AWS profile set `AWS_PROFILE` in your environment, i.e. `AWS_PROFILE=myprofile crush`
+- Alternatively to `aws configure`, you can also just set `AWS_BEARER_TOKEN_BEDROCK`
 
 ### Vertex AI Platform
 
@@ -649,8 +650,8 @@ See the [contributing guide](https://github.com/charmbracelet/crush?tab=contribu
 Weโ€™d love to hear your thoughts on this project. Need help? We gotchu. You can find us on:
 
 - [Twitter](https://twitter.com/charmcli)
-- [Discord][discord]
 - [Slack](https://charm.land/slack)
+- [Discord][discord]
 - [The Fediverse](https://mastodon.social/@charmcli)
 - [Bluesky](https://bsky.app/profile/charm.land)
 

Taskfile.yaml ๐Ÿ”—

@@ -2,6 +2,10 @@
 
 version: "3"
 
+vars:
+  VERSION:
+    sh: git describe --long 2>/dev/null || echo ""
+
 env:
   CGO_ENABLED: 0
   GOEXPERIMENT: greenteagc
@@ -30,8 +34,10 @@ tasks:
 
   build:
     desc: Run build
+    vars:
+      LDFLAGS: '{{if .VERSION}}-ldflags="-X github.com/charmbracelet/crush/internal/version.Version={{.VERSION}}"{{end}}'
     cmds:
-      - go build .
+      - go build {{.LDFLAGS}} .
     generates:
       - crush
 
@@ -59,8 +65,10 @@ tasks:
 
   install:
     desc: Install the application
+    vars:
+      LDFLAGS: '{{if .VERSION}}-ldflags="-X github.com/charmbracelet/crush/internal/version.Version={{.VERSION}}"{{end}}'
     cmds:
-      - go install -v .
+      - go install {{.LDFLAGS}} -v .
 
   profile:cpu:
     desc: 10s CPU profile
@@ -99,9 +107,9 @@ tasks:
     cmds:
       - task: fetch-tags
       - git commit --allow-empty -m "{{.NEXT}}"
-      - git tag --annotate --sign {{.NEXT}} {{.CLI_ARGS}}
+      - git tag --annotate --sign -m "{{.NEXT}}" {{.NEXT}} {{.CLI_ARGS}}
       - echo "Pushing {{.NEXT}}..."
-      - git push origin --tags
+      - git push origin main --follow-tags
 
   fetch-tags:
     cmds:

go.mod ๐Ÿ”—

@@ -22,6 +22,9 @@ require (
 	github.com/charmbracelet/x/ansi v0.10.2
 	github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3
 	github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a
+	github.com/charmbracelet/x/exp/ordered v0.1.0
+	github.com/charmbracelet/x/powernap v0.0.0-20251015113943-25f979b54ad4
+	github.com/charmbracelet/x/term v0.2.1
 	github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
 	github.com/google/uuid v1.6.0
 	github.com/invopop/jsonschema v0.13.0
@@ -82,8 +85,6 @@ require (
 	github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
 	github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5
 	github.com/charmbracelet/x/json v0.2.0 // indirect
-	github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4
-	github.com/charmbracelet/x/term v0.2.1
 	github.com/charmbracelet/x/termios v0.1.1 // indirect
 	github.com/charmbracelet/x/windows v0.2.2 // indirect
 	github.com/davecgh/go-spew v1.1.1 // indirect

go.sum ๐Ÿ”—

@@ -104,12 +104,14 @@ github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3 h1:1
 github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3/go.mod h1:T9jr8CzFpjhFVHjNjKwbAD7KwBNyFnj2pntAO7F2zw0=
 github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw=
 github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
+github.com/charmbracelet/x/exp/ordered v0.1.0 h1:55/qLwjIh0gL0Vni+QAWk7T/qRVP6sBf+2agPBgnOFE=
+github.com/charmbracelet/x/exp/ordered v0.1.0/go.mod h1:5UHwmG+is5THxMyCJHNPCn2/ecI07aKNrW+LcResjJ8=
 github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5 h1:DTSZxdV9qQagD4iGcAt9RgaRBZtJl01bfKgdLzUzUPI=
 github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
 github.com/charmbracelet/x/json v0.2.0 h1:DqB+ZGx2h+Z+1s98HOuOyli+i97wsFQIxP2ZQANTPrQ=
 github.com/charmbracelet/x/json v0.2.0/go.mod h1:opFIflx2YgXgi49xVUu8gEQ21teFAxyMwvOiZhIvWNM=
-github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
-github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
+github.com/charmbracelet/x/powernap v0.0.0-20251015113943-25f979b54ad4 h1:i/XilBPYK4L1Yo/mc9FPx0SyJzIsN0y4sj1MWq9Sscc=
+github.com/charmbracelet/x/powernap v0.0.0-20251015113943-25f979b54ad4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
 github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
 github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
 github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=

internal/agent/coordinator.go ๐Ÿ”—

@@ -305,6 +305,10 @@ func (c *coordinator) buildTools(ctx context.Context, agent config.Agent) ([]fan
 		tools.NewWriteTool(c.lspClients, c.permissions, c.history, c.cfg.WorkingDir()),
 	)
 
+	if len(c.cfg.LSP) > 0 {
+		allTools = append(allTools, tools.NewDiagnosticsTool(c.lspClients), tools.NewReferencesTool(c.lspClients))
+	}
+
 	var filteredTools []fantasy.AgentTool
 	for _, tool := range allTools {
 		if slices.Contains(agent.AllowedTools, tool.Info().Name) {
@@ -337,7 +341,9 @@ func (c *coordinator) buildTools(ctx context.Context, agent config.Agent) ([]fan
 			}
 		}
 	}
-
+	slices.SortFunc(filteredTools, func(a, b fantasy.AgentTool) int {
+		return strings.Compare(a.Info().Name, b.Info().Name)
+	})
 	return filteredTools, nil
 }
 

internal/agent/tools/diagnostics.go ๐Ÿ”—

@@ -19,7 +19,7 @@ type DiagnosticsParams struct {
 	FilePath string `json:"file_path,omitempty" description:"The path to the file to get diagnostics for (leave w empty for project diagnostics)"`
 }
 
-const DiagnosticsToolName = "diagnostics"
+const DiagnosticsToolName = "lsp_diagnostics"
 
 //go:embed diagnostics.md
 var diagnosticsDescription []byte

internal/agent/tools/grep.go ๐Ÿ”—

@@ -2,16 +2,18 @@ package tools
 
 import (
 	"bufio"
+	"bytes"
 	"context"
 	_ "embed"
+	"encoding/json"
 	"fmt"
 	"io"
+	"net/http"
 	"os"
 	"os/exec"
 	"path/filepath"
 	"regexp"
 	"sort"
-	"strconv"
 	"strings"
 	"sync"
 	"time"
@@ -81,6 +83,7 @@ type grepMatch struct {
 	path     string
 	modTime  time.Time
 	lineNum  int
+	charNum  int
 	lineText string
 }
 
@@ -97,6 +100,18 @@ const (
 //go:embed grep.md
 var grepDescription []byte
 
+// escapeRegexPattern escapes special regex characters so they're treated as literal characters
+func escapeRegexPattern(pattern string) string {
+	specialChars := []string{"\\", ".", "+", "*", "?", "(", ")", "[", "]", "{", "}", "^", "$", "|"}
+	escaped := pattern
+
+	for _, char := range specialChars {
+		escaped = strings.ReplaceAll(escaped, char, "\\"+char)
+	}
+
+	return escaped
+}
+
 func NewGrepTool(workingDir string) fantasy.AgentTool {
 	return fantasy.NewAgentTool(
 		GrepToolName,
@@ -142,7 +157,11 @@ func NewGrepTool(workingDir string) fantasy.AgentTool {
 						if len(lineText) > maxGrepContentWidth {
 							lineText = lineText[:maxGrepContentWidth] + "..."
 						}
-						fmt.Fprintf(&output, "  Line %d: %s\n", match.lineNum, lineText)
+						if match.charNum > 0 {
+							fmt.Fprintf(&output, "  Line %d, Char %d: %s\n", match.lineNum, match.charNum, lineText)
+						} else {
+							fmt.Fprintf(&output, "  Line %d: %s\n", match.lineNum, lineText)
+						}
 					} else {
 						fmt.Fprintf(&output, "  %s\n", match.path)
 					}
@@ -163,18 +182,6 @@ func NewGrepTool(workingDir string) fantasy.AgentTool {
 		})
 }
 
-// escapeRegexPattern escapes special regex characters so they're treated as literal characters
-func escapeRegexPattern(pattern string) string {
-	specialChars := []string{"\\", ".", "+", "*", "?", "(", ")", "[", "]", "{", "}", "^", "$", "|"}
-	escaped := pattern
-
-	for _, char := range specialChars {
-		escaped = strings.ReplaceAll(escaped, char, "\\"+char)
-	}
-
-	return escaped
-}
-
 func searchFiles(ctx context.Context, pattern, rootPath, include string, limit int) ([]grepMatch, bool, error) {
 	matches, err := searchWithRipgrep(ctx, pattern, rootPath, include)
 	if err != nil {
@@ -218,66 +225,51 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
 		return nil, err
 	}
 
-	lines := strings.Split(strings.TrimSpace(string(output)), "\n")
-	matches := make([]grepMatch, 0, len(lines))
-
-	for _, line := range lines {
-		if line == "" {
+	var matches []grepMatch
+	for line := range bytes.SplitSeq(bytes.TrimSpace(output), []byte{'\n'}) {
+		if len(line) == 0 {
 			continue
 		}
-
-		// Parse ripgrep output using null separation
-		filePath, lineNumStr, lineText, ok := parseRipgrepLine(line)
-		if !ok {
+		var match ripgrepMatch
+		if err := json.Unmarshal(line, &match); err != nil {
 			continue
 		}
-
-		lineNum, err := strconv.Atoi(lineNumStr)
-		if err != nil {
+		if match.Type != "match" {
 			continue
 		}
-
-		fileInfo, err := os.Stat(filePath)
-		if err != nil {
-			continue // Skip files we can't access
+		for _, m := range match.Data.Submatches {
+			fi, err := os.Stat(match.Data.Path.Text)
+			if err != nil {
+				continue // Skip files we can't access
+			}
+			matches = append(matches, grepMatch{
+				path:     match.Data.Path.Text,
+				modTime:  fi.ModTime(),
+				lineNum:  match.Data.LineNumber,
+				charNum:  m.Start + 1, // ensure 1-based
+				lineText: strings.TrimSpace(match.Data.Lines.Text),
+			})
+			// only get the first match of each line
+			break
 		}
-
-		matches = append(matches, grepMatch{
-			path:     filePath,
-			modTime:  fileInfo.ModTime(),
-			lineNum:  lineNum,
-			lineText: lineText,
-		})
 	}
-
 	return matches, nil
 }
 
-// parseRipgrepLine parses ripgrep output with null separation to handle Windows paths
-func parseRipgrepLine(line string) (filePath, lineNum, lineText string, ok bool) {
-	// Split on null byte first to separate filename from rest
-	parts := strings.SplitN(line, "\x00", 2)
-	if len(parts) != 2 {
-		return "", "", "", false
-	}
-
-	filePath = parts[0]
-	remainder := parts[1]
-
-	// Now split the remainder on first colon: "linenum:content"
-	colonIndex := strings.Index(remainder, ":")
-	if colonIndex == -1 {
-		return "", "", "", false
-	}
-
-	lineNumStr := remainder[:colonIndex]
-	lineText = remainder[colonIndex+1:]
-
-	if _, err := strconv.Atoi(lineNumStr); err != nil {
-		return "", "", "", false
-	}
-
-	return filePath, lineNumStr, lineText, true
+type ripgrepMatch struct {
+	Type string `json:"type"`
+	Data struct {
+		Path struct {
+			Text string `json:"text"`
+		} `json:"path"`
+		Lines struct {
+			Text string `json:"text"`
+		} `json:"lines"`
+		LineNumber int `json:"line_number"`
+		Submatches []struct {
+			Start int `json:"start"`
+		} `json:"submatches"`
+	} `json:"data"`
 }
 
 func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
@@ -329,7 +321,7 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
 			return nil
 		}
 
-		match, lineNum, lineText, err := fileContainsPattern(path, regex)
+		match, lineNum, charNum, lineText, err := fileContainsPattern(path, regex)
 		if err != nil {
 			return nil // Skip files we can't read
 		}
@@ -339,6 +331,7 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
 				path:     path,
 				modTime:  info.ModTime(),
 				lineNum:  lineNum,
+				charNum:  charNum,
 				lineText: lineText,
 			})
 
@@ -356,15 +349,15 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
 	return matches, nil
 }
 
-func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, string, error) {
-	// Quick binary file detection
-	if isBinaryFile(filePath) {
-		return false, 0, "", nil
+func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, int, string, error) {
+	// Only search text files.
+	if !isTextFile(filePath) {
+		return false, 0, 0, "", nil
 	}
 
 	file, err := os.Open(filePath)
 	if err != nil {
-		return false, 0, "", err
+		return false, 0, 0, "", err
 	}
 	defer file.Close()
 
@@ -373,53 +366,39 @@ func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, st
 	for scanner.Scan() {
 		lineNum++
 		line := scanner.Text()
-		if pattern.MatchString(line) {
-			return true, lineNum, line, nil
+		if loc := pattern.FindStringIndex(line); loc != nil {
+			charNum := loc[0] + 1
+			return true, lineNum, charNum, line, nil
 		}
 	}
 
-	return false, 0, "", scanner.Err()
+	return false, 0, 0, "", scanner.Err()
 }
 
-var binaryExts = map[string]struct{}{
-	".exe": {}, ".dll": {}, ".so": {}, ".dylib": {},
-	".bin": {}, ".obj": {}, ".o": {}, ".a": {},
-	".zip": {}, ".tar": {}, ".gz": {}, ".bz2": {},
-	".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {},
-	".pdf": {}, ".doc": {}, ".docx": {}, ".xls": {},
-	".mp3": {}, ".mp4": {}, ".avi": {}, ".mov": {},
-}
-
-// isBinaryFile performs a quick check to determine if a file is binary
-func isBinaryFile(filePath string) bool {
-	// Check file extension first (fastest)
-	ext := strings.ToLower(filepath.Ext(filePath))
-	if _, isBinary := binaryExts[ext]; isBinary {
-		return true
-	}
-
-	// Quick content check for files without clear extensions
+// isTextFile checks if a file is a text file by examining its MIME type.
+func isTextFile(filePath string) bool {
 	file, err := os.Open(filePath)
 	if err != nil {
-		return false // If we can't open it, let the caller handle the error
+		return false
 	}
 	defer file.Close()
 
-	// Read first 512 bytes to check for null bytes
+	// Read first 512 bytes for MIME type detection.
 	buffer := make([]byte, 512)
 	n, err := file.Read(buffer)
 	if err != nil && err != io.EOF {
 		return false
 	}
 
-	// Check for null bytes (common in binary files)
-	for i := range n {
-		if buffer[i] == 0 {
-			return true
-		}
-	}
+	// Detect content type.
+	contentType := http.DetectContentType(buffer[:n])
 
-	return false
+	// Check if it's a text MIME type.
+	return strings.HasPrefix(contentType, "text/") ||
+		contentType == "application/json" ||
+		contentType == "application/xml" ||
+		contentType == "application/javascript" ||
+		contentType == "application/x-sh"
 }
 
 func globToRegex(glob string) string {

internal/agent/tools/grep_test.go ๐Ÿ”—

@@ -198,3 +198,224 @@ func BenchmarkRegexCacheVsCompile(b *testing.B) {
 		}
 	})
 }
+
+func TestIsTextFile(t *testing.T) {
+	t.Parallel()
+	tempDir := t.TempDir()
+
+	tests := []struct {
+		name     string
+		filename string
+		content  []byte
+		wantText bool
+	}{
+		{
+			name:     "go file",
+			filename: "test.go",
+			content:  []byte("package main\n\nfunc main() {}\n"),
+			wantText: true,
+		},
+		{
+			name:     "yaml file",
+			filename: "config.yaml",
+			content:  []byte("key: value\nlist:\n  - item1\n  - item2\n"),
+			wantText: true,
+		},
+		{
+			name:     "yml file",
+			filename: "config.yml",
+			content:  []byte("key: value\n"),
+			wantText: true,
+		},
+		{
+			name:     "json file",
+			filename: "data.json",
+			content:  []byte(`{"key": "value"}`),
+			wantText: true,
+		},
+		{
+			name:     "javascript file",
+			filename: "script.js",
+			content:  []byte("console.log('hello');\n"),
+			wantText: true,
+		},
+		{
+			name:     "typescript file",
+			filename: "script.ts",
+			content:  []byte("const x: string = 'hello';\n"),
+			wantText: true,
+		},
+		{
+			name:     "markdown file",
+			filename: "README.md",
+			content:  []byte("# Title\n\nSome content\n"),
+			wantText: true,
+		},
+		{
+			name:     "shell script",
+			filename: "script.sh",
+			content:  []byte("#!/bin/bash\necho 'hello'\n"),
+			wantText: true,
+		},
+		{
+			name:     "python file",
+			filename: "script.py",
+			content:  []byte("print('hello')\n"),
+			wantText: true,
+		},
+		{
+			name:     "xml file",
+			filename: "data.xml",
+			content:  []byte("<?xml version=\"1.0\"?>\n<root></root>\n"),
+			wantText: true,
+		},
+		{
+			name:     "plain text",
+			filename: "file.txt",
+			content:  []byte("plain text content\n"),
+			wantText: true,
+		},
+		{
+			name:     "css file",
+			filename: "style.css",
+			content:  []byte("body { color: red; }\n"),
+			wantText: true,
+		},
+		{
+			name:     "scss file",
+			filename: "style.scss",
+			content:  []byte("$primary: blue;\nbody { color: $primary; }\n"),
+			wantText: true,
+		},
+		{
+			name:     "sass file",
+			filename: "style.sass",
+			content:  []byte("$primary: blue\nbody\n  color: $primary\n"),
+			wantText: true,
+		},
+		{
+			name:     "rust file",
+			filename: "main.rs",
+			content:  []byte("fn main() {\n    println!(\"Hello, world!\");\n}\n"),
+			wantText: true,
+		},
+		{
+			name:     "zig file",
+			filename: "main.zig",
+			content:  []byte("const std = @import(\"std\");\npub fn main() void {}\n"),
+			wantText: true,
+		},
+		{
+			name:     "java file",
+			filename: "Main.java",
+			content:  []byte("public class Main {\n    public static void main(String[] args) {}\n}\n"),
+			wantText: true,
+		},
+		{
+			name:     "c file",
+			filename: "main.c",
+			content:  []byte("#include <stdio.h>\nint main() { return 0; }\n"),
+			wantText: true,
+		},
+		{
+			name:     "cpp file",
+			filename: "main.cpp",
+			content:  []byte("#include <iostream>\nint main() { return 0; }\n"),
+			wantText: true,
+		},
+		{
+			name:     "fish shell",
+			filename: "script.fish",
+			content:  []byte("#!/usr/bin/env fish\necho 'hello'\n"),
+			wantText: true,
+		},
+		{
+			name:     "powershell file",
+			filename: "script.ps1",
+			content:  []byte("Write-Host 'Hello, World!'\n"),
+			wantText: true,
+		},
+		{
+			name:     "cmd batch file",
+			filename: "script.bat",
+			content:  []byte("@echo off\necho Hello, World!\n"),
+			wantText: true,
+		},
+		{
+			name:     "cmd file",
+			filename: "script.cmd",
+			content:  []byte("@echo off\necho Hello, World!\n"),
+			wantText: true,
+		},
+		{
+			name:     "binary exe",
+			filename: "binary.exe",
+			content:  []byte{0x4D, 0x5A, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00},
+			wantText: false,
+		},
+		{
+			name:     "png image",
+			filename: "image.png",
+			content:  []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A},
+			wantText: false,
+		},
+		{
+			name:     "jpeg image",
+			filename: "image.jpg",
+			content:  []byte{0xFF, 0xD8, 0xFF, 0xE0, 0x00, 0x10, 0x4A, 0x46},
+			wantText: false,
+		},
+		{
+			name:     "zip archive",
+			filename: "archive.zip",
+			content:  []byte{0x50, 0x4B, 0x03, 0x04, 0x14, 0x00, 0x00, 0x00},
+			wantText: false,
+		},
+		{
+			name:     "pdf file",
+			filename: "document.pdf",
+			content:  []byte("%PDF-1.4\n%รขรฃรร“\n"),
+			wantText: false,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			t.Parallel()
+			filePath := filepath.Join(tempDir, tt.filename)
+			require.NoError(t, os.WriteFile(filePath, tt.content, 0o644))
+
+			got := isTextFile(filePath)
+			require.Equal(t, tt.wantText, got, "isTextFile(%s) = %v, want %v", tt.filename, got, tt.wantText)
+		})
+	}
+}
+
+func TestColumnMatch(t *testing.T) {
+	t.Parallel()
+
+	// Test both implementations
+	for name, fn := range map[string]func(pattern, path, include string) ([]grepMatch, error){
+		"regex": searchFilesWithRegex,
+		"rg": func(pattern, path, include string) ([]grepMatch, error) {
+			return searchWithRipgrep(t.Context(), pattern, path, include)
+		},
+	} {
+		t.Run(name, func(t *testing.T) {
+			t.Parallel()
+
+			if name == "rg" && getRg() == "" {
+				t.Skip("rg is not in $PATH")
+			}
+
+			matches, err := fn("THIS", "./testdata/", "")
+			require.NoError(t, err)
+			require.Len(t, matches, 1)
+			match := matches[0]
+			require.Equal(t, 2, match.lineNum)
+			require.Equal(t, 14, match.charNum)
+			require.Equal(t, "I wanna grep THIS particular word", match.lineText)
+			require.Equal(t, "testdata/grep.txt", filepath.ToSlash(filepath.Clean(match.path)))
+		})
+	}
+}

internal/agent/tools/ls.go ๐Ÿ”—

@@ -115,7 +115,7 @@ func ListDirectoryTree(searchPath string, params LSParams, lsConfig config.ToolL
 	}
 
 	depth, limit := lsConfig.Limits()
-	maxFiles := min(limit, maxLSFiles)
+	maxFiles := cmp.Or(limit, maxLSFiles)
 	files, truncated, err := fsext.ListDirectory(
 		searchPath,
 		params.Ignore,

internal/agent/tools/mcp-tools.go ๐Ÿ”—

@@ -10,6 +10,7 @@ import (
 	"log/slog"
 	"maps"
 	"net/http"
+	"os"
 	"os/exec"
 	"slices"
 	"strings"
@@ -116,15 +117,26 @@ func (m *McpTool) MCPToolName() string {
 }
 
 func (b *McpTool) Info() fantasy.ToolInfo {
-	input := b.tool.InputSchema.(map[string]any)
-	required, _ := input["required"].([]string)
-	if required == nil {
-		required = make([]string, 0)
-	}
-	parameters, _ := input["properties"].(map[string]any)
-	if parameters == nil {
-		parameters = make(map[string]any)
+	parameters := make(map[string]any)
+	required := make([]string, 0)
+
+	if input, ok := b.tool.InputSchema.(map[string]any); ok {
+		if props, ok := input["properties"].(map[string]any); ok {
+			parameters = props
+		}
+		if req, ok := input["required"].([]any); ok {
+			// Convert []any -> []string when elements are strings
+			for _, v := range req {
+				if s, ok := v.(string); ok {
+					required = append(required, s)
+				}
+			}
+		} else if reqStr, ok := input["required"].([]string); ok {
+			// Handle case where it's already []string
+			required = reqStr
+		}
 	}
+
 	return fantasy.ToolInfo{
 		Name:        fmt.Sprintf("mcp_%s_%s", b.mcpName, b.tool.Name),
 		Description: b.tool.Description,
@@ -322,6 +334,10 @@ func GetMCPTools(ctx context.Context, permissions permission.Service, cfg *confi
 						slog.Error("panic in mcp client initialization", "error", err, "name", name)
 					}
 				}()
+
+				ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
+				defer cancel()
+
 				c, err := createMCPSession(ctx, name, m, cfg.Resolver())
 				if err != nil {
 					return
@@ -370,6 +386,8 @@ func createMCPSession(ctx context.Context, name string, m config.MCPConfig, reso
 	if err != nil {
 		updateMCPState(name, MCPStateError, err, nil, 0)
 		slog.Error("error creating mcp client", "error", err, "name", name)
+		cancel()
+		cancelTimer.Stop()
 		return nil, err
 	}
 
@@ -392,9 +410,11 @@ func createMCPSession(ctx context.Context, name string, m config.MCPConfig, reso
 
 	session, err := client.Connect(mcpCtx, transport, nil)
 	if err != nil {
+		err = maybeStdioErr(err, transport)
 		updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
 		slog.Error("error starting mcp client", "error", err, "name", name)
 		cancel()
+		cancelTimer.Stop()
 		return nil, err
 	}
 
@@ -403,6 +423,27 @@ func createMCPSession(ctx context.Context, name string, m config.MCPConfig, reso
 	return session, nil
 }
 
+// maybeStdioErr if a stdio mcp prints an error in non-json format, it'll fail
+// to parse, and the cli will then close it, causing the EOF error.
+// so, if we got an EOF err, and the transport is STDIO, we try to exec it
+// again with a timeout and collect the output so we can add details to the
+// error.
+// this happens particularly when starting things with npx, e.g. if node can't
+// be found or some other error like that.
+func maybeStdioErr(err error, transport mcp.Transport) error {
+	if !errors.Is(err, io.EOF) {
+		return err
+	}
+	ct, ok := transport.(*mcp.CommandTransport)
+	if !ok {
+		return err
+	}
+	if err2 := stdioMCPCheck(ct.Command); err2 != nil {
+		err = errors.Join(err, err2)
+	}
+	return err
+}
+
 func maybeTimeoutErr(err error, timeout time.Duration) error {
 	if errors.Is(err, context.Canceled) {
 		return fmt.Errorf("timed out after %s", timeout)
@@ -421,7 +462,7 @@ func createMCPTransport(ctx context.Context, m config.MCPConfig, resolver config
 			return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
 		}
 		cmd := exec.CommandContext(ctx, home.Long(command), m.Args...)
-		cmd.Env = m.ResolvedEnv()
+		cmd.Env = append(os.Environ(), m.ResolvedEnv()...)
 		return &mcp.CommandTransport{
 			Command: cmd,
 		}, nil
@@ -470,3 +511,15 @@ func (rt headerRoundTripper) RoundTrip(req *http.Request) (*http.Response, error
 func mcpTimeout(m config.MCPConfig) time.Duration {
 	return time.Duration(cmp.Or(m.Timeout, 15)) * time.Second
 }
+
+func stdioMCPCheck(old *exec.Cmd) error {
+	ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
+	defer cancel()
+	cmd := exec.CommandContext(ctx, old.Path, old.Args...)
+	cmd.Env = old.Env
+	out, err := cmd.CombinedOutput()
+	if err == nil || errors.Is(ctx.Err(), context.DeadlineExceeded) {
+		return nil
+	}
+	return fmt.Errorf("%w: %s", err, string(out))
+}

internal/agent/tools/references.go ๐Ÿ”—

@@ -0,0 +1,190 @@
+package tools
+
+import (
+	"cmp"
+	"context"
+	_ "embed"
+	"errors"
+	"fmt"
+	"log/slog"
+	"maps"
+	"path/filepath"
+	"regexp"
+	"slices"
+	"sort"
+	"strings"
+
+	"charm.land/fantasy"
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/lsp"
+	"github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
+)
+
+type ReferencesParams struct {
+	Symbol string `json:"symbol" description:"The symbol name to search for (e.g., function name, variable name, type name)"`
+	Path   string `json:"path,omitempty" description:"The directory to search in. Use a directory/file to narrow down the symbol search. Defaults to the current working directory."`
+}
+
+type referencesTool struct {
+	lspClients *csync.Map[string, *lsp.Client]
+}
+
+const ReferencesToolName = "lsp_references"
+
+//go:embed references.md
+var referencesDescription []byte
+
+func NewReferencesTool(lspClients *csync.Map[string, *lsp.Client]) fantasy.AgentTool {
+	return fantasy.NewAgentTool(
+		ReferencesToolName,
+		string(referencesDescription),
+		func(ctx context.Context, params ReferencesParams, call fantasy.ToolCall) (fantasy.ToolResponse, error) {
+			if params.Symbol == "" {
+				return fantasy.NewTextErrorResponse("symbol is required"), nil
+			}
+
+			if lspClients.Len() == 0 {
+				return fantasy.NewTextErrorResponse("no LSP clients available"), nil
+			}
+
+			workingDir := cmp.Or(params.Path, ".")
+
+			matches, _, err := searchFiles(ctx, regexp.QuoteMeta(params.Symbol), workingDir, "", 100)
+			if err != nil {
+				return fantasy.NewTextErrorResponse(fmt.Sprintf("failed to search for symbol: %s", err)), nil
+			}
+
+			if len(matches) == 0 {
+				return fantasy.NewTextResponse(fmt.Sprintf("Symbol '%s' not found", params.Symbol)), nil
+			}
+
+			var allLocations []protocol.Location
+			var allErrs error
+			for _, match := range matches {
+				locations, err := find(ctx, lspClients, params.Symbol, match)
+				if err != nil {
+					if strings.Contains(err.Error(), "no identifier found") {
+						// grep probably matched a comment, string value, or something else that's irrelevant
+						continue
+					}
+					slog.Error("Failed to find references", "error", err, "symbol", params.Symbol, "path", match.path, "line", match.lineNum, "char", match.charNum)
+					allErrs = errors.Join(allErrs, err)
+					continue
+				}
+				allLocations = append(allLocations, locations...)
+				// XXX: should we break here or look for all results?
+			}
+
+			if len(allLocations) > 0 {
+				output := formatReferences(cleanupLocations(allLocations))
+				return fantasy.NewTextResponse(output), nil
+			}
+
+			if allErrs != nil {
+				return fantasy.NewTextErrorResponse(allErrs.Error()), nil
+			}
+			return fantasy.NewTextResponse(fmt.Sprintf("No references found for symbol '%s'", params.Symbol)), nil
+		})
+}
+
+func (r *referencesTool) Name() string {
+	return ReferencesToolName
+}
+
+func find(ctx context.Context, lspClients *csync.Map[string, *lsp.Client], symbol string, match grepMatch) ([]protocol.Location, error) {
+	absPath, err := filepath.Abs(match.path)
+	if err != nil {
+		return nil, fmt.Errorf("failed to get absolute path: %s", err)
+	}
+
+	var client *lsp.Client
+	for c := range lspClients.Seq() {
+		if c.HandlesFile(absPath) {
+			client = c
+			break
+		}
+	}
+
+	if client == nil {
+		slog.Warn("No LSP clients to handle", "path", match.path)
+		return nil, nil
+	}
+
+	return client.FindReferences(
+		ctx,
+		absPath,
+		match.lineNum,
+		match.charNum+getSymbolOffset(symbol),
+		true,
+	)
+}
+
+// getSymbolOffset returns the character offset to the actual symbol name
+// in a qualified symbol (e.g., "Bar" in "foo.Bar" or "method" in "Class::method").
+func getSymbolOffset(symbol string) int {
+	// Check for :: separator (Rust, C++, Ruby modules/classes, PHP static).
+	if idx := strings.LastIndex(symbol, "::"); idx != -1 {
+		return idx + 2
+	}
+	// Check for . separator (Go, Python, JavaScript, Java, C#, Ruby methods).
+	if idx := strings.LastIndex(symbol, "."); idx != -1 {
+		return idx + 1
+	}
+	// Check for \ separator (PHP namespaces).
+	if idx := strings.LastIndex(symbol, "\\"); idx != -1 {
+		return idx + 1
+	}
+	return 0
+}
+
+func cleanupLocations(locations []protocol.Location) []protocol.Location {
+	slices.SortFunc(locations, func(a, b protocol.Location) int {
+		if a.URI != b.URI {
+			return strings.Compare(string(a.URI), string(b.URI))
+		}
+		if a.Range.Start.Line != b.Range.Start.Line {
+			return cmp.Compare(a.Range.Start.Line, b.Range.Start.Line)
+		}
+		return cmp.Compare(a.Range.Start.Character, b.Range.Start.Character)
+	})
+	return slices.CompactFunc(locations, func(a, b protocol.Location) bool {
+		return a.URI == b.URI &&
+			a.Range.Start.Line == b.Range.Start.Line &&
+			a.Range.Start.Character == b.Range.Start.Character
+	})
+}
+
+func groupByFilename(locations []protocol.Location) map[string][]protocol.Location {
+	files := make(map[string][]protocol.Location)
+	for _, loc := range locations {
+		path, err := loc.URI.Path()
+		if err != nil {
+			slog.Error("Failed to convert location URI to path", "uri", loc.URI, "error", err)
+			continue
+		}
+		files[path] = append(files[path], loc)
+	}
+	return files
+}
+
+func formatReferences(locations []protocol.Location) string {
+	fileRefs := groupByFilename(locations)
+	files := slices.Collect(maps.Keys(fileRefs))
+	sort.Strings(files)
+
+	var output strings.Builder
+	output.WriteString(fmt.Sprintf("Found %d reference(s) in %d file(s):\n\n", len(locations), len(files)))
+
+	for _, file := range files {
+		refs := fileRefs[file]
+		output.WriteString(fmt.Sprintf("%s (%d reference(s)):\n", file, len(refs)))
+		for _, ref := range refs {
+			line := ref.Range.Start.Line + 1
+			char := ref.Range.Start.Character + 1
+			output.WriteString(fmt.Sprintf("  Line %d, Column %d\n", line, char))
+		}
+		output.WriteString("\n")
+	}
+
+	return output.String()
+}

internal/agent/tools/references.md ๐Ÿ”—

@@ -0,0 +1,36 @@
+Find all references to/usage of a symbol by name using the Language Server Protocol (LSP).
+
+WHEN TO USE THIS TOOL:
+
+- **ALWAYS USE THIS FIRST** when searching for where a function, method, variable, type, or constant is used
+- **DO NOT use grep/glob for symbol searches** - this tool is semantic-aware and much more accurate
+- Use when you need to find all usages of a specific symbol (function, variable, type, class, method, etc.)
+- More accurate than grep because it understands code semantics and scope
+- Finds only actual references, not string matches in comments or unrelated code
+- Helpful for understanding where a symbol is used throughout the codebase
+- Useful for refactoring or analyzing code dependencies
+- Good for finding all call sites of a function, method, type, package, constant, variable, etc.
+
+HOW TO USE:
+
+- Provide the symbol name (e.g., "MyFunction", "myVariable", "MyType")
+- Optionally specify a path to narrow the search to a specific directory
+- The tool will automatically find the symbol and locate all references
+
+FEATURES:
+
+- Returns all references grouped by file
+- Shows line and column numbers for each reference
+- Supports multiple programming languages through LSP
+- Automatically finds the symbol without needing exact position
+
+LIMITATIONS:
+
+- May not find references in files that haven't been opened or indexed
+- Results depend on the LSP server's capabilities
+
+TIPS:
+
+- **Use this tool instead of grep when looking for symbol references** - it's more accurate and semantic-aware
+- Simply provide the symbol name and let the tool find it for you
+- This tool understands code structure, so it won't match unrelated strings or comments

internal/agent/tools/rg.go ๐Ÿ”—

@@ -43,7 +43,7 @@ func getRgSearchCmd(ctx context.Context, pattern, path, include string) *exec.Cm
 		return nil
 	}
 	// Use -n to show line numbers, -0 for null separation to handle Windows paths
-	args := []string{"-H", "-n", "-0", pattern}
+	args := []string{"--json", "-H", "-n", "-0", pattern}
 	if include != "" {
 		args = append(args, "--glob", include)
 	}

internal/cmd/dirs_test.go ๐Ÿ”—

@@ -0,0 +1,46 @@
+package cmd
+
+import (
+	"bytes"
+	"os"
+	"path/filepath"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func init() {
+	os.Setenv("XDG_CONFIG_HOME", "/tmp/fakeconfig")
+	os.Setenv("XDG_DATA_HOME", "/tmp/fakedata")
+}
+
+func TestDirs(t *testing.T) {
+	var b bytes.Buffer
+	dirsCmd.SetOut(&b)
+	dirsCmd.SetErr(&b)
+	dirsCmd.SetIn(bytes.NewReader(nil))
+	dirsCmd.Run(dirsCmd, nil)
+	expected := filepath.FromSlash("/tmp/fakeconfig/crush") + "\n" +
+		filepath.FromSlash("/tmp/fakedata/crush") + "\n"
+	require.Equal(t, expected, b.String())
+}
+
+func TestConfigDir(t *testing.T) {
+	var b bytes.Buffer
+	configDirCmd.SetOut(&b)
+	configDirCmd.SetErr(&b)
+	configDirCmd.SetIn(bytes.NewReader(nil))
+	configDirCmd.Run(configDirCmd, nil)
+	expected := filepath.FromSlash("/tmp/fakeconfig/crush") + "\n"
+	require.Equal(t, expected, b.String())
+}
+
+func TestDataDir(t *testing.T) {
+	var b bytes.Buffer
+	dataDirCmd.SetOut(&b)
+	dataDirCmd.SetErr(&b)
+	dataDirCmd.SetIn(bytes.NewReader(nil))
+	dataDirCmd.Run(dataDirCmd, nil)
+	expected := filepath.FromSlash("/tmp/fakedata/crush") + "\n"
+	require.Equal(t, expected, b.String())
+}

internal/config/config.go ๐Ÿ”—

@@ -156,7 +156,7 @@ type Completions struct {
 }
 
 func (c Completions) Limits() (depth, items int) {
-	return ptrValOr(c.MaxDepth, -1), ptrValOr(c.MaxItems, -1)
+	return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
 }
 
 type Permissions struct {
@@ -278,7 +278,7 @@ type ToolLs struct {
 }
 
 func (t ToolLs) Limits() (depth, items int) {
-	return ptrValOr(t.MaxDepth, -1), ptrValOr(t.MaxItems, -1)
+	return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
 }
 
 // Config holds the configuration for crush.
@@ -301,10 +301,10 @@ type Config struct {
 
 	Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
 
+	Agents map[string]Agent `json:"-"`
+
 	// Internal
 	workingDir string `json:"-"`
-	// TODO: most likely remove this concept when I come back to it
-	Agents map[string]Agent `json:"-"`
 	// TODO: find a better way to do this this should probably not be part of the config
 	resolver       VariableResolver
 	dataConfigDir  string             `json:"-"`
@@ -470,6 +470,8 @@ func allToolNames() []string {
 		"download",
 		"edit",
 		"multiedit",
+		"lsp_diagnostics",
+		"lsp_references",
 		"fetch",
 		"glob",
 		"grep",

internal/config/load.go ๐Ÿ”—

@@ -607,6 +607,10 @@ func hasVertexCredentials(env env.Env) bool {
 }
 
 func hasAWSCredentials(env env.Env) bool {
+	if env.Get("AWS_BEARER_TOKEN_BEDROCK") != "" {
+		return true
+	}
+
 	if env.Get("AWS_ACCESS_KEY_ID") != "" && env.Get("AWS_SECRET_ACCESS_KEY") != "" {
 		return true
 	}
@@ -623,6 +627,11 @@ func hasAWSCredentials(env env.Env) bool {
 		env.Get("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
 		return true
 	}
+
+	if _, err := os.Stat(filepath.Join(home.Dir(), ".aws/credentials")); err == nil {
+		return true
+	}
+
 	return false
 }
 

internal/config/provider.go ๐Ÿ”—

@@ -126,7 +126,7 @@ func Providers(cfg *Config) ([]catwalk.Provider, error) {
 }
 
 func loadProviders(autoUpdateDisabled bool, client ProviderClient, path string) ([]catwalk.Provider, error) {
-	cacheIsStale, cacheExists := isCacheStale(path)
+	_, cacheExists := isCacheStale(path)
 
 	catwalkGetAndSave := func() ([]catwalk.Provider, error) {
 		providers, err := client.GetProviders()
@@ -142,25 +142,6 @@ func loadProviders(autoUpdateDisabled bool, client ProviderClient, path string)
 		return providers, nil
 	}
 
-	backgroundCacheUpdate := func() {
-		go func() {
-			slog.Info("Updating providers cache in background", "path", path)
-
-			providers, err := client.GetProviders()
-			if err != nil {
-				slog.Error("Failed to fetch providers in background from Catwalk", "error", err)
-				return
-			}
-			if len(providers) == 0 {
-				slog.Error("Empty providers list from Catwalk")
-				return
-			}
-			if err := saveProvidersInCache(path, providers); err != nil {
-				slog.Error("Failed to update providers.json in background", "error", err)
-			}
-		}()
-	}
-
 	switch {
 	case autoUpdateDisabled:
 		slog.Warn("Providers auto-update is disabled")
@@ -177,19 +158,6 @@ func loadProviders(autoUpdateDisabled bool, client ProviderClient, path string)
 		}
 		return providers, nil
 
-	case cacheExists && !cacheIsStale:
-		slog.Info("Recent providers cache is available.", "path", path)
-
-		providers, err := loadProvidersFromCache(path)
-		if err != nil {
-			return nil, err
-		}
-		if len(providers) == 0 {
-			return catwalkGetAndSave()
-		}
-		backgroundCacheUpdate()
-		return providers, nil
-
 	default:
 		slog.Info("Cache is not available or is stale. Fetching providers from Catwalk.", "path", path)
 

internal/config/provider_test.go ๐Ÿ”—

@@ -57,7 +57,7 @@ func TestProvider_loadProvidersWithIssues(t *testing.T) {
 	if err != nil {
 		t.Fatalf("Failed to write old providers to file: %v", err)
 	}
-	providers, err := loadProviders(false, client, tmpPath)
+	providers, err := loadProviders(true, client, tmpPath)
 	require.NoError(t, err)
 	require.NotNil(t, providers)
 	require.Len(t, providers, 1)

internal/log/http.go ๐Ÿ”—

@@ -39,12 +39,14 @@ func (h *HTTPRoundTripLogger) RoundTrip(req *http.Request) (*http.Response, erro
 		return nil, err
 	}
 
-	slog.Debug(
-		"HTTP Request",
-		"method", req.Method,
-		"url", req.URL,
-		"body", bodyToString(save),
-	)
+	if slog.Default().Enabled(req.Context(), slog.LevelDebug) {
+		slog.Debug(
+			"HTTP Request",
+			"method", req.Method,
+			"url", req.URL,
+			"body", bodyToString(save),
+		)
+	}
 
 	start := time.Now()
 	resp, err := h.Transport.RoundTrip(req)
@@ -61,16 +63,18 @@ func (h *HTTPRoundTripLogger) RoundTrip(req *http.Request) (*http.Response, erro
 	}
 
 	save, resp.Body, err = drainBody(resp.Body)
-	slog.Debug(
-		"HTTP Response",
-		"status_code", resp.StatusCode,
-		"status", resp.Status,
-		"headers", formatHeaders(resp.Header),
-		"body", bodyToString(save),
-		"content_length", resp.ContentLength,
-		"duration_ms", duration.Milliseconds(),
-		"error", err,
-	)
+	if slog.Default().Enabled(req.Context(), slog.LevelDebug) {
+		slog.Debug(
+			"HTTP Response",
+			"status_code", resp.StatusCode,
+			"status", resp.Status,
+			"headers", formatHeaders(resp.Header),
+			"body", bodyToString(save),
+			"content_length", resp.ContentLength,
+			"duration_ms", duration.Milliseconds(),
+			"error", err,
+		)
+	}
 	return resp, err
 }
 
@@ -84,7 +88,7 @@ func bodyToString(body io.ReadCloser) string {
 		return ""
 	}
 	var b bytes.Buffer
-	if json.Compact(&b, bytes.TrimSpace(src)) != nil {
+	if json.Indent(&b, bytes.TrimSpace(src), "", "  ") != nil {
 		// not json probably
 		return string(src)
 	}

internal/lsp/client.go ๐Ÿ”—

@@ -445,6 +445,16 @@ func (c *Client) WaitForDiagnostics(ctx context.Context, d time.Duration) {
 	}
 }
 
+// FindReferences finds all references to the symbol at the given position.
+func (c *Client) FindReferences(ctx context.Context, filepath string, line, character int, includeDeclaration bool) ([]protocol.Location, error) {
+	if err := c.OpenFileOnDemand(ctx, filepath); err != nil {
+		return nil, err
+	}
+	// NOTE: line and character should be 0-based.
+	// See: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position
+	return c.client.FindReferences(ctx, filepath, line-1, character-1, includeDeclaration)
+}
+
 // HasRootMarkers checks if any of the specified root marker patterns exist in the given directory.
 // Uses glob patterns to match files, allowing for more flexible matching.
 func HasRootMarkers(dir string, rootMarkers []string) bool {

internal/tui/components/chat/editor/editor.go ๐Ÿ”—

@@ -86,6 +86,7 @@ var DeleteKeyMaps = DeleteAttachmentKeyMaps{
 
 const (
 	maxAttachments = 5
+	maxFileResults = 25
 )
 
 type OpenEditorMsg struct {
@@ -500,6 +501,7 @@ func (m *editorCmp) startCompletions() tea.Msg {
 		Completions: completionItems,
 		X:           x,
 		Y:           y,
+		MaxResults:  maxFileResults,
 	}
 }
 

internal/tui/components/chat/messages/messages.go ๐Ÿ”—

@@ -12,6 +12,7 @@ import (
 	"github.com/charmbracelet/catwalk/pkg/catwalk"
 	"github.com/charmbracelet/lipgloss/v2"
 	"github.com/charmbracelet/x/ansi"
+	"github.com/charmbracelet/x/exp/ordered"
 	"github.com/google/uuid"
 
 	"github.com/atotto/clipboard"
@@ -274,7 +275,7 @@ func (m *messageCmp) renderThinkingContent() string {
 		}
 	}
 	fullContent := content.String()
-	height := util.Clamp(lipgloss.Height(fullContent), 1, 10)
+	height := ordered.Clamp(lipgloss.Height(fullContent), 1, 10)
 	m.thinkingViewport.SetHeight(height)
 	m.thinkingViewport.SetWidth(m.textWidth())
 	m.thinkingViewport.SetContent(fullContent)
@@ -347,7 +348,7 @@ func (m *messageCmp) GetSize() (int, int) {
 
 // SetSize updates the width of the message component for text wrapping
 func (m *messageCmp) SetSize(width int, height int) tea.Cmd {
-	m.width = util.Clamp(width, 1, 120)
+	m.width = ordered.Clamp(width, 1, 120)
 	m.thinkingViewport.SetWidth(m.width - 4)
 	return nil
 }

internal/tui/components/completions/completions.go ๐Ÿ”—

@@ -22,6 +22,7 @@ type OpenCompletionsMsg struct {
 	Completions []Completion
 	X           int // X position for the completions popup
 	Y           int // Y position for the completions popup
+	MaxResults  int // Maximum number of results to render, 0 for no limit
 }
 
 type FilterCompletionsMsg struct {
@@ -192,6 +193,7 @@ func (c *completionsCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 		}
 		c.width = width
 		c.height = max(min(maxCompletionsHeight, len(items)), 1) // Ensure at least 1 item height
+		c.list.SetResultsSize(msg.MaxResults)
 		return c, tea.Batch(
 			c.list.SetItems(items),
 			c.list.SetSize(c.width, c.height),

internal/tui/components/dialogs/commands/arguments.go ๐Ÿ”—

@@ -128,12 +128,17 @@ func (c *commandArgumentsDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 			c.inputs[c.focusIndex].Blur()
 			c.focusIndex = (c.focusIndex - 1 + len(c.inputs)) % len(c.inputs)
 			c.inputs[c.focusIndex].Focus()
-
+		case key.Matches(msg, c.keys.Close):
+			return c, util.CmdHandler(dialogs.CloseDialogMsg{})
 		default:
 			var cmd tea.Cmd
 			c.inputs[c.focusIndex], cmd = c.inputs[c.focusIndex].Update(msg)
 			return c, cmd
 		}
+	case tea.PasteMsg:
+		var cmd tea.Cmd
+		c.inputs[c.focusIndex], cmd = c.inputs[c.focusIndex].Update(msg)
+		return c, cmd
 	}
 	return c, nil
 }

internal/tui/components/dialogs/commands/keys.go ๐Ÿ”—

@@ -76,6 +76,7 @@ type ArgumentsDialogKeyMap struct {
 	Confirm  key.Binding
 	Next     key.Binding
 	Previous key.Binding
+	Close    key.Binding
 }
 
 func DefaultArgumentsDialogKeyMap() ArgumentsDialogKeyMap {
@@ -93,6 +94,10 @@ func DefaultArgumentsDialogKeyMap() ArgumentsDialogKeyMap {
 			key.WithKeys("shift+tab", "up"),
 			key.WithHelp("shift+tab/โ†‘", "previous"),
 		),
+		Close: key.NewBinding(
+			key.WithKeys("esc", "alt+esc"),
+			key.WithHelp("esc", "cancel"),
+		),
 	}
 }
 
@@ -102,6 +107,7 @@ func (k ArgumentsDialogKeyMap) KeyBindings() []key.Binding {
 		k.Confirm,
 		k.Next,
 		k.Previous,
+		k.Close,
 	}
 }
 
@@ -122,5 +128,6 @@ func (k ArgumentsDialogKeyMap) ShortHelp() []key.Binding {
 		k.Confirm,
 		k.Next,
 		k.Previous,
+		k.Close,
 	}
 }

internal/tui/components/dialogs/permissions/keys.go ๐Ÿ”—

@@ -42,7 +42,7 @@ func DefaultKeyMap() KeyMap {
 			key.WithHelp("s", "allow session"),
 		),
 		Deny: key.NewBinding(
-			key.WithKeys("d", "D", "ctrl+d", "esc"),
+			key.WithKeys("d", "D", "esc"),
 			key.WithHelp("d", "deny"),
 		),
 		Select: key.NewBinding(

internal/tui/exp/list/filterable.go ๐Ÿ”—

@@ -3,8 +3,6 @@ package list
 import (
 	"regexp"
 	"slices"
-	"sort"
-	"strings"
 
 	"github.com/charmbracelet/bubbles/v2/key"
 	"github.com/charmbracelet/bubbles/v2/textinput"
@@ -28,7 +26,9 @@ type FilterableList[T FilterableItem] interface {
 	Cursor() *tea.Cursor
 	SetInputWidth(int)
 	SetInputPlaceholder(string)
+	SetResultsSize(int)
 	Filter(q string) tea.Cmd
+	fuzzy.Source
 }
 
 type HasMatchIndexes interface {
@@ -47,10 +47,11 @@ type filterableList[T FilterableItem] struct {
 	*filterableOptions
 	width, height int
 	// stores all available items
-	items      []T
-	input      textinput.Model
-	inputWidth int
-	query      string
+	items       []T
+	resultsSize int
+	input       textinput.Model
+	inputWidth  int
+	query       string
 }
 
 type filterableListOption func(*filterableOptions)
@@ -246,22 +247,18 @@ func (f *filterableList[T]) Filter(query string) tea.Cmd {
 		return f.list.SetItems(f.items)
 	}
 
-	words := make([]string, len(f.items))
-	for i, item := range f.items {
-		words[i] = strings.ToLower(item.FilterValue())
-	}
-
-	matches := fuzzy.Find(query, words)
-
-	sort.SliceStable(matches, func(i, j int) bool {
-		return matches[i].Score > matches[j].Score
-	})
+	matches := fuzzy.FindFrom(query, f)
 
 	var matchedItems []T
-	for _, match := range matches {
+	resultSize := len(matches)
+	if f.resultsSize > 0 && resultSize > f.resultsSize {
+		resultSize = f.resultsSize
+	}
+	for i := range resultSize {
+		match := matches[i]
 		item := f.items[match.Index]
-		if i, ok := any(item).(HasMatchIndexes); ok {
-			i.MatchIndexes(match.MatchedIndexes)
+		if it, ok := any(item).(HasMatchIndexes); ok {
+			it.MatchIndexes(match.MatchedIndexes)
 		}
 		matchedItems = append(matchedItems, item)
 	}
@@ -307,3 +304,15 @@ func (f *filterableList[T]) SetInputWidth(w int) {
 func (f *filterableList[T]) SetInputPlaceholder(ph string) {
 	f.placeholder = ph
 }
+
+func (f *filterableList[T]) SetResultsSize(size int) {
+	f.resultsSize = size
+}
+
+func (f *filterableList[T]) String(i int) string {
+	return f.items[i].FilterValue()
+}
+
+func (f *filterableList[T]) Len() int {
+	return len(f.items)
+}

internal/tui/exp/list/list.go ๐Ÿ”—

@@ -15,6 +15,7 @@ import (
 	"github.com/charmbracelet/lipgloss/v2"
 	uv "github.com/charmbracelet/ultraviolet"
 	"github.com/charmbracelet/x/ansi"
+	"github.com/charmbracelet/x/exp/ordered"
 	"github.com/rivo/uniseg"
 )
 
@@ -1283,14 +1284,14 @@ func (l *list[T]) UpdateItem(id string, item T) tea.Cmd {
 				newItem, ok := l.renderedItems.Get(item.ID())
 				if ok {
 					newLines := newItem.height - oldItem.height
-					l.offset = util.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
+					l.offset = ordered.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
 				}
 			}
 		} else if hasOldItem && l.offset > oldItem.start {
 			newItem, ok := l.renderedItems.Get(item.ID())
 			if ok {
 				newLines := newItem.height - oldItem.height
-				l.offset = util.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
+				l.offset = ordered.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
 			}
 		}
 	}

internal/tui/util/util.go ๐Ÿ”—

@@ -60,10 +60,3 @@ type (
 	}
 	ClearStatusMsg struct{}
 )
-
-func Clamp(v, low, high int) int {
-	if high < low {
-		low, high = high, low
-	}
-	return min(high, max(low, v))
-}