1package tools
2
3import (
4 "bufio"
5 "context"
6 "encoding/json"
7 "fmt"
8 "io"
9 "os"
10 "os/exec"
11 "path/filepath"
12 "regexp"
13 "sort"
14 "strconv"
15 "strings"
16 "sync"
17 "time"
18
19 "github.com/charmbracelet/crush/internal/fsext"
20)
21
22// regexCache provides thread-safe caching of compiled regex patterns
23type regexCache struct {
24 cache map[string]*regexp.Regexp
25 mu sync.RWMutex
26}
27
28// newRegexCache creates a new regex cache
29func newRegexCache() *regexCache {
30 return ®exCache{
31 cache: make(map[string]*regexp.Regexp),
32 }
33}
34
35// get retrieves a compiled regex from cache or compiles and caches it
36func (rc *regexCache) get(pattern string) (*regexp.Regexp, error) {
37 // Try to get from cache first (read lock)
38 rc.mu.RLock()
39 if regex, exists := rc.cache[pattern]; exists {
40 rc.mu.RUnlock()
41 return regex, nil
42 }
43 rc.mu.RUnlock()
44
45 // Compile the regex (write lock)
46 rc.mu.Lock()
47 defer rc.mu.Unlock()
48
49 // Double-check in case another goroutine compiled it while we waited
50 if regex, exists := rc.cache[pattern]; exists {
51 return regex, nil
52 }
53
54 // Compile and cache the regex
55 regex, err := regexp.Compile(pattern)
56 if err != nil {
57 return nil, err
58 }
59
60 rc.cache[pattern] = regex
61 return regex, nil
62}
63
64// Global regex cache instances
65var (
66 searchRegexCache = newRegexCache()
67 globRegexCache = newRegexCache()
68 // Pre-compiled regex for glob conversion (used frequently)
69 globBraceRegex = regexp.MustCompile(`\{([^}]+)\}`)
70)
71
72type GrepParams struct {
73 Pattern string `json:"pattern"`
74 Path string `json:"path"`
75 Include string `json:"include"`
76 LiteralText bool `json:"literal_text"`
77}
78
79type grepMatch struct {
80 path string
81 modTime time.Time
82 lineNum int
83 lineText string
84}
85
86type GrepResponseMetadata struct {
87 NumberOfMatches int `json:"number_of_matches"`
88 Truncated bool `json:"truncated"`
89}
90
91type grepTool struct {
92 workingDir string
93}
94
95const (
96 GrepToolName = "grep"
97 grepDescription = `Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
98
99WHEN TO USE THIS TOOL:
100- Use when you need to find files containing specific text or patterns
101- Great for searching code bases for function names, variable declarations, or error messages
102- Useful for finding all files that use a particular API or pattern
103
104HOW TO USE:
105- Provide a regex pattern to search for within file contents
106- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
107- Optionally specify a starting directory (defaults to current working directory)
108- Optionally provide an include pattern to filter which files to search
109- Results are sorted with most recently modified files first
110
111REGEX PATTERN SYNTAX (when literal_text=false):
112- Supports standard regular expression syntax
113- 'function' searches for the literal text "function"
114- 'log\..*Error' finds text starting with "log." and ending with "Error"
115- 'import\s+.*\s+from' finds import statements in JavaScript/TypeScript
116
117COMMON INCLUDE PATTERN EXAMPLES:
118- '*.js' - Only search JavaScript files
119- '*.{ts,tsx}' - Only search TypeScript files
120- '*.go' - Only search Go files
121
122LIMITATIONS:
123- Results are limited to 100 files (newest first)
124- Performance depends on the number of files being searched
125- Very large binary files may be skipped
126- Hidden files (starting with '.') are skipped
127
128IGNORE FILE SUPPORT:
129- Respects .gitignore patterns to skip ignored files and directories
130- Respects .crushignore patterns for additional ignore rules
131- Both ignore files are automatically detected in the search root directory
132
133CROSS-PLATFORM NOTES:
134- Uses ripgrep (rg) command if available for better performance
135- Falls back to built-in Go implementation if ripgrep is not available
136- File paths are normalized automatically for cross-platform compatibility
137
138TIPS:
139- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
140- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
141- Always check if results are truncated and refine your search pattern if needed
142- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.`
143)
144
145func NewGrepTool(workingDir string) BaseTool {
146 return &grepTool{
147 workingDir: workingDir,
148 }
149}
150
151func (g *grepTool) Info() ToolInfo {
152 return ToolInfo{
153 Name: GrepToolName,
154 Description: grepDescription,
155 Parameters: map[string]any{
156 "pattern": map[string]any{
157 "type": "string",
158 "description": "The regex pattern to search for in file contents",
159 },
160 "path": map[string]any{
161 "type": "string",
162 "description": "The directory to search in. Defaults to the current working directory.",
163 },
164 "include": map[string]any{
165 "type": "string",
166 "description": "File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")",
167 },
168 "literal_text": map[string]any{
169 "type": "boolean",
170 "description": "If true, the pattern will be treated as literal text with special regex characters escaped. Default is false.",
171 },
172 },
173 Required: []string{"pattern"},
174 }
175}
176
177// escapeRegexPattern escapes special regex characters so they're treated as literal characters
178func escapeRegexPattern(pattern string) string {
179 specialChars := []string{"\\", ".", "+", "*", "?", "(", ")", "[", "]", "{", "}", "^", "$", "|"}
180 escaped := pattern
181
182 for _, char := range specialChars {
183 escaped = strings.ReplaceAll(escaped, char, "\\"+char)
184 }
185
186 return escaped
187}
188
189func (g *grepTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
190 var params GrepParams
191 if err := json.Unmarshal([]byte(call.Input), ¶ms); err != nil {
192 return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
193 }
194
195 if params.Pattern == "" {
196 return NewTextErrorResponse("pattern is required"), nil
197 }
198
199 // If literal_text is true, escape the pattern
200 searchPattern := params.Pattern
201 if params.LiteralText {
202 searchPattern = escapeRegexPattern(params.Pattern)
203 }
204
205 searchPath := params.Path
206 if searchPath == "" {
207 searchPath = g.workingDir
208 }
209
210 matches, truncated, err := searchFiles(ctx, searchPattern, searchPath, params.Include, 100)
211 if err != nil {
212 return ToolResponse{}, fmt.Errorf("error searching files: %w", err)
213 }
214
215 var output strings.Builder
216 if len(matches) == 0 {
217 output.WriteString("No files found")
218 } else {
219 fmt.Fprintf(&output, "Found %d matches\n", len(matches))
220
221 currentFile := ""
222 for _, match := range matches {
223 if currentFile != match.path {
224 if currentFile != "" {
225 output.WriteString("\n")
226 }
227 currentFile = match.path
228 fmt.Fprintf(&output, "%s:\n", match.path)
229 }
230 if match.lineNum > 0 {
231 fmt.Fprintf(&output, " Line %d: %s\n", match.lineNum, match.lineText)
232 } else {
233 fmt.Fprintf(&output, " %s\n", match.path)
234 }
235 }
236
237 if truncated {
238 output.WriteString("\n(Results are truncated. Consider using a more specific path or pattern.)")
239 }
240 }
241
242 return WithResponseMetadata(
243 NewTextResponse(output.String()),
244 GrepResponseMetadata{
245 NumberOfMatches: len(matches),
246 Truncated: truncated,
247 },
248 ), nil
249}
250
251func searchFiles(ctx context.Context, pattern, rootPath, include string, limit int) ([]grepMatch, bool, error) {
252 matches, err := searchWithRipgrep(ctx, pattern, rootPath, include)
253 if err != nil {
254 matches, err = searchFilesWithRegex(pattern, rootPath, include)
255 if err != nil {
256 return nil, false, err
257 }
258 }
259
260 sort.Slice(matches, func(i, j int) bool {
261 return matches[i].modTime.After(matches[j].modTime)
262 })
263
264 truncated := len(matches) > limit
265 if truncated {
266 matches = matches[:limit]
267 }
268
269 return matches, truncated, nil
270}
271
272func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]grepMatch, error) {
273 cmd := getRgSearchCmd(ctx, pattern, path, include)
274 if cmd == nil {
275 return nil, fmt.Errorf("ripgrep not found in $PATH")
276 }
277
278 cmd.Args = append(
279 cmd.Args,
280 "--ignore-file", filepath.Join(path, ".gitignore"),
281 "--ignore-file", filepath.Join(path, ".crushignore"),
282 )
283
284 output, err := cmd.Output()
285 if err != nil {
286 if exitErr, ok := err.(*exec.ExitError); ok && exitErr.ExitCode() == 1 {
287 return []grepMatch{}, nil
288 }
289 return nil, err
290 }
291
292 lines := strings.Split(strings.TrimSpace(string(output)), "\n")
293 matches := make([]grepMatch, 0, len(lines))
294
295 for _, line := range lines {
296 if line == "" {
297 continue
298 }
299
300 // Parse ripgrep output format: file:line:content
301 parts := strings.SplitN(line, ":", 3)
302 if len(parts) < 3 {
303 continue
304 }
305
306 filePath := parts[0]
307 lineNum, err := strconv.Atoi(parts[1])
308 if err != nil {
309 continue
310 }
311 lineText := parts[2]
312
313 fileInfo, err := os.Stat(filePath)
314 if err != nil {
315 continue // Skip files we can't access
316 }
317
318 matches = append(matches, grepMatch{
319 path: filePath,
320 modTime: fileInfo.ModTime(),
321 lineNum: lineNum,
322 lineText: lineText,
323 })
324 }
325
326 return matches, nil
327}
328
329func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
330 matches := []grepMatch{}
331
332 // Use cached regex compilation
333 regex, err := searchRegexCache.get(pattern)
334 if err != nil {
335 return nil, fmt.Errorf("invalid regex pattern: %w", err)
336 }
337
338 var includePattern *regexp.Regexp
339 if include != "" {
340 regexPattern := globToRegex(include)
341 includePattern, err = globRegexCache.get(regexPattern)
342 if err != nil {
343 return nil, fmt.Errorf("invalid include pattern: %w", err)
344 }
345 }
346
347 // Create walker with gitignore and crushignore support
348 walker := fsext.NewFastGlobWalker(rootPath)
349
350 err = filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
351 if err != nil {
352 return nil // Skip errors
353 }
354
355 if info.IsDir() {
356 return nil // Skip directories
357 }
358
359 // Use walker's shouldSkip method instead of just SkipHidden
360 if walker.ShouldSkip(path) {
361 return nil
362 }
363
364 if includePattern != nil && !includePattern.MatchString(path) {
365 return nil
366 }
367
368 match, lineNum, lineText, err := fileContainsPattern(path, regex)
369 if err != nil {
370 return nil // Skip files we can't read
371 }
372
373 if match {
374 matches = append(matches, grepMatch{
375 path: path,
376 modTime: info.ModTime(),
377 lineNum: lineNum,
378 lineText: lineText,
379 })
380
381 if len(matches) >= 200 {
382 return filepath.SkipAll
383 }
384 }
385
386 return nil
387 })
388 if err != nil {
389 return nil, err
390 }
391
392 return matches, nil
393}
394
395func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, string, error) {
396 // Quick binary file detection
397 if isBinaryFile(filePath) {
398 return false, 0, "", nil
399 }
400
401 file, err := os.Open(filePath)
402 if err != nil {
403 return false, 0, "", err
404 }
405 defer file.Close()
406
407 scanner := bufio.NewScanner(file)
408 lineNum := 0
409 for scanner.Scan() {
410 lineNum++
411 line := scanner.Text()
412 if pattern.MatchString(line) {
413 return true, lineNum, line, nil
414 }
415 }
416
417 return false, 0, "", scanner.Err()
418}
419
420var binaryExts = map[string]struct{}{
421 ".exe": {}, ".dll": {}, ".so": {}, ".dylib": {},
422 ".bin": {}, ".obj": {}, ".o": {}, ".a": {},
423 ".zip": {}, ".tar": {}, ".gz": {}, ".bz2": {},
424 ".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {},
425 ".pdf": {}, ".doc": {}, ".docx": {}, ".xls": {},
426 ".mp3": {}, ".mp4": {}, ".avi": {}, ".mov": {},
427}
428
429// isBinaryFile performs a quick check to determine if a file is binary
430func isBinaryFile(filePath string) bool {
431 // Check file extension first (fastest)
432 ext := strings.ToLower(filepath.Ext(filePath))
433 if _, isBinary := binaryExts[ext]; isBinary {
434 return true
435 }
436
437 // Quick content check for files without clear extensions
438 file, err := os.Open(filePath)
439 if err != nil {
440 return false // If we can't open it, let the caller handle the error
441 }
442 defer file.Close()
443
444 // Read first 512 bytes to check for null bytes
445 buffer := make([]byte, 512)
446 n, err := file.Read(buffer)
447 if err != nil && err != io.EOF {
448 return false
449 }
450
451 // Check for null bytes (common in binary files)
452 for i := range n {
453 if buffer[i] == 0 {
454 return true
455 }
456 }
457
458 return false
459}
460
461func globToRegex(glob string) string {
462 regexPattern := strings.ReplaceAll(glob, ".", "\\.")
463 regexPattern = strings.ReplaceAll(regexPattern, "*", ".*")
464 regexPattern = strings.ReplaceAll(regexPattern, "?", ".")
465
466 // Use pre-compiled regex instead of compiling each time
467 regexPattern = globBraceRegex.ReplaceAllStringFunc(regexPattern, func(match string) string {
468 inner := match[1 : len(match)-1]
469 return "(" + strings.ReplaceAll(inner, ",", "|") + ")"
470 })
471
472 return regexPattern
473}