From 6ab958103a5cb248eead0d738f2a1255496eb524 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Sat, 23 Aug 2025 20:07:11 +0200 Subject: [PATCH] chore: create new agent --- tools/bash.go | 341 ---------------------------- tools/bash_commands.go | 158 ------------- tools/common.go | 103 --------- tools/diagnostics.go | 281 ----------------------- tools/download.go | 182 --------------- tools/edit.go | 490 ----------------------------------------- tools/fetch.go | 222 ------------------- tools/glob.go | 154 ------------- tools/grep.go | 436 ------------------------------------ tools/ls.go | 266 ---------------------- tools/multiedit.go | 401 --------------------------------- tools/rg.go | 53 ----- tools/sourcegraph.go | 356 ------------------------------ tools/view.go | 329 --------------------------- tools/write.go | 196 ----------------- 15 files changed, 3968 deletions(-) delete mode 100644 tools/bash.go delete mode 100644 tools/bash_commands.go delete mode 100644 tools/common.go delete mode 100644 tools/diagnostics.go delete mode 100644 tools/download.go delete mode 100644 tools/edit.go delete mode 100644 tools/fetch.go delete mode 100644 tools/glob.go delete mode 100644 tools/grep.go delete mode 100644 tools/ls.go delete mode 100644 tools/multiedit.go delete mode 100644 tools/rg.go delete mode 100644 tools/sourcegraph.go delete mode 100644 tools/view.go delete mode 100644 tools/write.go diff --git a/tools/bash.go b/tools/bash.go deleted file mode 100644 index e3b547fdfed4fb2355d72ffa934d260ee0abbd36..0000000000000000000000000000000000000000 --- a/tools/bash.go +++ /dev/null @@ -1,341 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "strings" - "time" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/permission" - "github.com/charmbracelet/crush/internal/shell" -) - -type BashParams struct { - Command string `json:"command" description:"The command to execute"` - Timeout int `json:"timeout,omitempty" description:"Optional timeout in milliseconds (max 600000)"` -} - -type BashPermissionsParams struct { - Command string `json:"command"` - Timeout int `json:"timeout"` -} - -type BashResponseMetadata struct { - StartTime int64 `json:"start_time"` - EndTime int64 `json:"end_time"` - Output string `json:"output"` - WorkingDirectory string `json:"working_directory"` -} - -const ( - BashToolName = "bash" - DefaultTimeout = 1 * 60 * 1000 // 1 minutes in milliseconds - MaxTimeout = 10 * 60 * 1000 // 10 minutes in milliseconds - BashNoOutput = "no output" -) - -func NewBashTool(permissions permission.Service, workingDir string) ai.AgentTool { - // Set up command blocking on the persistent shell - persistentShell := shell.GetPersistentShell(workingDir) - persistentShell.SetBlockFuncs(blockFuncs()) - return ai.NewAgentTool( - BashToolName, - bashDescription(), - func(ctx context.Context, params BashParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.Timeout > MaxTimeout { - params.Timeout = MaxTimeout - } else if params.Timeout <= 0 { - params.Timeout = DefaultTimeout - } - - if params.Command == "" { - return ai.NewTextErrorResponse("missing command"), nil - } - - isSafeReadOnly := false - cmdLower := strings.ToLower(params.Command) - - for _, safe := range safeCommands { - if strings.HasPrefix(cmdLower, safe) { - if len(cmdLower) == len(safe) || cmdLower[len(safe)] == ' ' || cmdLower[len(safe)] == '-' { - isSafeReadOnly = true - break - } - } - } - - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file") - } - if !isSafeReadOnly { - granted := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: workingDir, - ToolCallID: call.ID, - ToolName: BashToolName, - Action: "execute", - Description: fmt.Sprintf("Execute command: %s", params.Command), - Params: BashPermissionsParams{ - Command: params.Command, - }, - }, - ) - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - } - - startTime := time.Now() - if params.Timeout > 0 { - var cancel context.CancelFunc - ctx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Millisecond) - defer cancel() - } - - persistentShell := shell.GetPersistentShell(workingDir) - stdout, stderr, err := persistentShell.Exec(ctx, params.Command) - - // Get the current working directory after command execution - currentWorkingDir := persistentShell.GetWorkingDir() - interrupted := shell.IsInterrupt(err) - exitCode := shell.ExitCode(err) - if exitCode == 0 && !interrupted && err != nil { - return ai.ToolResponse{}, fmt.Errorf("error executing command: %w", err) - } - - stdout = truncateOutput(stdout) - stderr = truncateOutput(stderr) - - errorMessage := stderr - if errorMessage == "" && err != nil { - errorMessage = err.Error() - } - - if interrupted { - if errorMessage != "" { - errorMessage += "\n" - } - errorMessage += "Command was aborted before completion" - } else if exitCode != 0 { - if errorMessage != "" { - errorMessage += "\n" - } - errorMessage += fmt.Sprintf("Exit code %d", exitCode) - } - - hasBothOutputs := stdout != "" && stderr != "" - - if hasBothOutputs { - stdout += "\n" - } - - if errorMessage != "" { - stdout += "\n" + errorMessage - } - - metadata := BashResponseMetadata{ - StartTime: startTime.UnixMilli(), - EndTime: time.Now().UnixMilli(), - Output: stdout, - WorkingDirectory: currentWorkingDir, - } - if stdout == "" { - return ai.WithResponseMetadata(ai.NewTextResponse(BashNoOutput), metadata), nil - } - stdout += fmt.Sprintf("\n\n%s", currentWorkingDir) - return ai.WithResponseMetadata(ai.NewTextResponse(stdout), metadata), nil - }, - ) -} - -func blockFuncs() []shell.BlockFunc { - return []shell.BlockFunc{ - shell.CommandsBlocker(bannedCommands), - shell.ArgumentsBlocker([][]string{ - // System package managers - {"apk", "add"}, - {"apt", "install"}, - {"apt-get", "install"}, - {"dnf", "install"}, - {"emerge"}, - {"pacman", "-S"}, - {"pkg", "install"}, - {"yum", "install"}, - {"zypper", "install"}, - - // Language-specific package managers - {"brew", "install"}, - {"cargo", "install"}, - {"gem", "install"}, - {"go", "install"}, - {"npm", "install", "-g"}, - {"npm", "install", "--global"}, - {"pip", "install", "--user"}, - {"pip3", "install", "--user"}, - {"pnpm", "add", "-g"}, - {"pnpm", "add", "--global"}, - {"yarn", "global", "add"}, - }), - } -} - -func bashDescription() string { - bannedCommandsStr := strings.Join(bannedCommands, ", ") - return fmt.Sprintf(`Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures. - -CROSS-PLATFORM SHELL SUPPORT: -* This tool uses a shell interpreter (mvdan/sh) that mimics the Bash language, - so you should use Bash syntax in all platforms, including Windows. - The most common shell builtins and core utils are available in Windows as - well. -* Make sure to use forward slashes (/) as path separators in commands, even on - Windows. Example: "ls C:/foo/bar" instead of "ls C:\foo\bar". - -Before executing the command, please follow these steps: - -1. Directory Verification: - - If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location - - For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory - -2. Security Check: - - For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User. - - Verify that the command is not one of the banned commands: %s. - -3. Command Execution: - - After ensuring proper quoting, execute the command. - - Capture the output of the command. - -4. Output Processing: - - If the output exceeds %d characters, output will be truncated before being returned to you. - - Prepare the output for display to the user. - -5. Return Result: - - Provide the processed output of the command. - - If any errors occurred during execution, include those in the output. - - The result will also have metadata like the cwd (current working directory) at the end, included with tags. - -Usage notes: -- The command argument is required. -- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes. -- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files. -- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings). -- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands. -- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it. - -pytest /foo/bar/tests - - -cd /foo/bar && pytest tests - - -# Committing changes with git - -When the user asks you to create a new git commit, follow these steps carefully: - -1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!): - - Run a git status command to see all untracked files. - - Run a git diff command to see both staged and unstaged changes that will be committed. - - Run a git log command to see recent commit messages, so that you can follow this repository's commit message style. - -2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit. - -3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in tags: - - -- List the files that have been changed or added -- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.) -- Brainstorm the purpose or motivation behind these changes -- Do not use tools to explore code, beyond what is available in the git context -- Assess the impact of these changes on the overall project -- Check for any sensitive information that shouldn't be committed -- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what" -- Ensure your language is clear, concise, and to the point -- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.) -- Ensure the message is not generic (avoid words like "Update" or "Fix" without context) -- Review the draft message to ensure it accurately reflects the changes and their purpose - - -4. Create the commit with a message ending with: -💘 Generated with Crush -Co-Authored-By: Crush - -- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example: - -git commit -m "$(cat <<'EOF' - Commit message here. - - 💘 Generated with Crush - Co-Authored-By: 💘 Crush - EOF - )" - - -5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them. - -6. Finally, run git status to make sure the commit succeeded. - -Important notes: -- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up -- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit. -- NEVER update the git config -- DO NOT push to the remote repository -- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported. -- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit -- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them. -- Return an empty response - the user will see the git output directly - -# Creating pull requests -Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed. - -IMPORTANT: When the user asks you to create a pull request, follow these steps carefully: - -1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!): - - Run a git status command to see all untracked files. - - Run a git diff command to see both staged and unstaged changes that will be committed. - - Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote - - Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.) - -2. Create new branch if needed - -3. Commit changes if needed - -4. Push to remote with -u flag if needed - -5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in tags: - - -- List the commits since diverging from the main branch -- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.) -- Brainstorm the purpose or motivation behind these changes -- Assess the impact of these changes on the overall project -- Do not use tools to explore code, beyond what is available in the git context -- Check for any sensitive information that shouldn't be committed -- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what" -- Ensure the summary accurately reflects all changes since diverging from the main branch -- Ensure your language is clear, concise, and to the point -- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.) -- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context) -- Review the draft summary to ensure it accurately reflects the changes and their purpose - - -6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting. - -gh pr create --title "the pr title" --body "$(cat <<'EOF' -## Summary -<1-3 bullet points> - -## Test plan -[Checklist of TODOs for testing the pull request...] - -💘 Generated with Crush -EOF -)" - - -Important: -- Return an empty response - the user will see the gh output directly -- Never update git config`, bannedCommandsStr, MaxOutputLength) -} diff --git a/tools/bash_commands.go b/tools/bash_commands.go deleted file mode 100644 index 36614829c5e9c4e5917df7ca48e1e262aa0b3416..0000000000000000000000000000000000000000 --- a/tools/bash_commands.go +++ /dev/null @@ -1,158 +0,0 @@ -package tools - -import "runtime" - -var safeCommands = []string{ - // Bash builtins and core utils - "cal", - "date", - "df", - "du", - "echo", - "env", - "free", - "groups", - "hostname", - "id", - "kill", - "killall", - "ls", - "nice", - "nohup", - "printenv", - "ps", - "pwd", - "set", - "time", - "timeout", - "top", - "type", - "uname", - "unset", - "uptime", - "whatis", - "whereis", - "which", - "whoami", - - // Git - "git blame", - "git branch", - "git config --get", - "git config --list", - "git describe", - "git diff", - "git grep", - "git log", - "git ls-files", - "git ls-remote", - "git remote", - "git rev-parse", - "git shortlog", - "git show", - "git status", - "git tag", - - // Go - "go build", - "go clean", - "go doc", - "go env", - "go fmt", - "go help", - "go install", - "go list", - "go mod", - "go run", - "go test", - "go version", - "go vet", -} - -func init() { - if runtime.GOOS == "windows" { - safeCommands = append( - safeCommands, - // Windows-specific commands - "ipconfig", - "nslookup", - "ping", - "systeminfo", - "tasklist", - "where", - ) - } -} - -var bannedCommands = []string{ - // Network/Download tools - "alias", - "aria2c", - "axel", - "chrome", - "curl", - "curlie", - "firefox", - "http-prompt", - "httpie", - "links", - "lynx", - "nc", - "safari", - "scp", - "ssh", - "telnet", - "w3m", - "wget", - "xh", - - // System administration - "doas", - "su", - "sudo", - - // Package managers - "apk", - "apt", - "apt-cache", - "apt-get", - "dnf", - "dpkg", - "emerge", - "home-manager", - "makepkg", - "opkg", - "pacman", - "paru", - "pkg", - "pkg_add", - "pkg_delete", - "portage", - "rpm", - "yay", - "yum", - "zypper", - - // System modification - "at", - "batch", - "chkconfig", - "crontab", - "fdisk", - "mkfs", - "mount", - "parted", - "service", - "systemctl", - "umount", - - // Network configuration - "firewall-cmd", - "ifconfig", - "ip", - "iptables", - "netstat", - "pfctl", - "route", - "ufw", -} diff --git a/tools/common.go b/tools/common.go deleted file mode 100644 index a70685197f313dc5cb89e667dd5b2d2458a7d8b0..0000000000000000000000000000000000000000 --- a/tools/common.go +++ /dev/null @@ -1,103 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "strings" - "sync" - "time" -) - -type ( - sessionIDContextKey string - messageIDContextKey string -) - -const ( - MaxOutputLength = 30000 - MaxReadSize = 250 * 1024 - DefaultReadLimit = 2000 - MaxLineLength = 2000 - - SessionIDContextKey sessionIDContextKey = "session_id" - MessageIDContextKey messageIDContextKey = "message_id" -) - -func truncateOutput(content string) string { - if len(content) <= MaxOutputLength { - return content - } - - halfLength := MaxOutputLength / 2 - start := content[:halfLength] - end := content[len(content)-halfLength:] - - truncatedLinesCount := countLines(content[halfLength : len(content)-halfLength]) - return fmt.Sprintf("%s\n\n... [%d lines truncated] ...\n\n%s", start, truncatedLinesCount, end) -} - -func countLines(s string) int { - if s == "" { - return 0 - } - return len(strings.Split(s, "\n")) -} - -func GetContextValues(ctx context.Context) (string, string) { - sessionID := ctx.Value(SessionIDContextKey) - messageID := ctx.Value(MessageIDContextKey) - if sessionID == nil { - return "", "" - } - if messageID == nil { - return sessionID.(string), "" - } - return sessionID.(string), messageID.(string) -} - -// File record to track when files were read/written -type fileRecord struct { - path string - readTime time.Time - writeTime time.Time -} - -var ( - fileRecords = make(map[string]fileRecord) - fileRecordMutex sync.RWMutex -) - -func recordFileRead(path string) { - fileRecordMutex.Lock() - defer fileRecordMutex.Unlock() - - record, exists := fileRecords[path] - if !exists { - record = fileRecord{path: path} - } - record.readTime = time.Now() - fileRecords[path] = record -} - -func getLastReadTime(path string) time.Time { - fileRecordMutex.RLock() - defer fileRecordMutex.RUnlock() - - record, exists := fileRecords[path] - if !exists { - return time.Time{} - } - return record.readTime -} - -func recordFileWrite(path string) { - fileRecordMutex.Lock() - defer fileRecordMutex.Unlock() - - record, exists := fileRecords[path] - if !exists { - record = fileRecord{path: path} - } - record.writeTime = time.Now() - fileRecords[path] = record -} diff --git a/tools/diagnostics.go b/tools/diagnostics.go deleted file mode 100644 index 160e97ea5ab804b8ac461a1e7e764a84b2f155b0..0000000000000000000000000000000000000000 --- a/tools/diagnostics.go +++ /dev/null @@ -1,281 +0,0 @@ -package tools - -import ( - "context" - "encoding/json" - "fmt" - "log/slog" - "sort" - "strings" - "time" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/lsp" - "github.com/charmbracelet/crush/internal/lsp/protocol" -) - -type DiagnosticsParams struct { - FilePath string `json:"file_path,omitempty" description:"The path to the file to get diagnostics for (leave w empty for project diagnostics)"` -} - -const ( - DiagnosticsToolName = "diagnostics" -) - -func NewDiagnosticsTool(lsps map[string]*lsp.Client) ai.AgentTool { - return ai.NewAgentTool( - DiagnosticsToolName, - `Get diagnostics for a file and/or project. -WHEN TO USE THIS TOOL: -- Use when you need to check for errors or warnings in your code -- Helpful for debugging and ensuring code quality -- Good for getting a quick overview of issues in a file or project -HOW TO USE: -- Provide a path to a file to get diagnostics for that file -- Leave the path empty to get diagnostics for the entire project -- Results are displayed in a structured format with severity levels -FEATURES: -- Displays errors, warnings, and hints -- Groups diagnostics by severity -- Provides detailed information about each diagnostic -LIMITATIONS: -- Results are limited to the diagnostics provided by the LSP clients -- May not cover all possible issues in the code -- Does not provide suggestions for fixing issues -TIPS: -- Use in conjunction with other tools for a comprehensive code review -- Combine with the LSP client for real-time diagnostics`, - - func(ctx context.Context, params DiagnosticsParams, call ai.ToolCall) (ai.ToolResponse, error) { - if len(lsps) == 0 { - return ai.NewTextErrorResponse("no LSP clients available"), nil - } - - if params.FilePath != "" { - notifyLspOpenFile(ctx, params.FilePath, lsps) - waitForLspDiagnostics(ctx, params.FilePath, lsps) - } - - output := getDiagnostics(params.FilePath, lsps) - - return ai.NewTextResponse(output), nil - }, - ) -} - -func notifyLspOpenFile(ctx context.Context, filePath string, lsps map[string]*lsp.Client) { - for _, client := range lsps { - err := client.OpenFile(ctx, filePath) - if err != nil { - continue - } - } -} - -func waitForLspDiagnostics(ctx context.Context, filePath string, lsps map[string]*lsp.Client) { - if len(lsps) == 0 { - return - } - - diagChan := make(chan struct{}, 1) - - for _, client := range lsps { - originalDiags := client.GetDiagnostics() - - handler := func(params json.RawMessage) { - lsp.HandleDiagnostics(client, params) - var diagParams protocol.PublishDiagnosticsParams - if err := json.Unmarshal(params, &diagParams); err != nil { - return - } - - path, err := diagParams.URI.Path() - if err != nil { - slog.Error("Failed to convert diagnostic URI to path", "uri", diagParams.URI, "error", err) - return - } - - if path == filePath || hasDiagnosticsChanged(client.GetDiagnostics(), originalDiags) { - select { - case diagChan <- struct{}{}: - default: - } - } - } - - client.RegisterNotificationHandler("textDocument/publishDiagnostics", handler) - - if client.IsFileOpen(filePath) { - err := client.NotifyChange(ctx, filePath) - if err != nil { - continue - } - } else { - err := client.OpenFile(ctx, filePath) - if err != nil { - continue - } - } - } - - select { - case <-diagChan: - case <-time.After(5 * time.Second): - case <-ctx.Done(): - } -} - -func hasDiagnosticsChanged(current, original map[protocol.DocumentURI][]protocol.Diagnostic) bool { - for uri, diags := range current { - origDiags, exists := original[uri] - if !exists || len(diags) != len(origDiags) { - return true - } - } - return false -} - -func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string { - fileDiagnostics := []string{} - projectDiagnostics := []string{} - - formatDiagnostic := func(pth string, diagnostic protocol.Diagnostic, source string) string { - severity := "Info" - switch diagnostic.Severity { - case protocol.SeverityError: - severity = "Error" - case protocol.SeverityWarning: - severity = "Warn" - case protocol.SeverityHint: - severity = "Hint" - } - - location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1) - - sourceInfo := "" - if diagnostic.Source != "" { - sourceInfo = diagnostic.Source - } else if source != "" { - sourceInfo = source - } - - codeInfo := "" - if diagnostic.Code != nil { - codeInfo = fmt.Sprintf("[%v]", diagnostic.Code) - } - - tagsInfo := "" - if len(diagnostic.Tags) > 0 { - tags := []string{} - for _, tag := range diagnostic.Tags { - switch tag { - case protocol.Unnecessary: - tags = append(tags, "unnecessary") - case protocol.Deprecated: - tags = append(tags, "deprecated") - } - } - if len(tags) > 0 { - tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", ")) - } - } - - return fmt.Sprintf("%s: %s [%s]%s%s %s", - severity, - location, - sourceInfo, - codeInfo, - tagsInfo, - diagnostic.Message) - } - - for lspName, client := range lsps { - diagnostics := client.GetDiagnostics() - if len(diagnostics) > 0 { - for location, diags := range diagnostics { - path, err := location.Path() - if err != nil { - slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err) - continue - } - isCurrentFile := path == filePath - - for _, diag := range diags { - formattedDiag := formatDiagnostic(path, diag, lspName) - - if isCurrentFile { - fileDiagnostics = append(fileDiagnostics, formattedDiag) - } else { - projectDiagnostics = append(projectDiagnostics, formattedDiag) - } - } - } - } - } - - sort.Slice(fileDiagnostics, func(i, j int) bool { - iIsError := strings.HasPrefix(fileDiagnostics[i], "Error") - jIsError := strings.HasPrefix(fileDiagnostics[j], "Error") - if iIsError != jIsError { - return iIsError // Errors come first - } - return fileDiagnostics[i] < fileDiagnostics[j] // Then alphabetically - }) - - sort.Slice(projectDiagnostics, func(i, j int) bool { - iIsError := strings.HasPrefix(projectDiagnostics[i], "Error") - jIsError := strings.HasPrefix(projectDiagnostics[j], "Error") - if iIsError != jIsError { - return iIsError - } - return projectDiagnostics[i] < projectDiagnostics[j] - }) - - var output strings.Builder - - if len(fileDiagnostics) > 0 { - output.WriteString("\n\n") - if len(fileDiagnostics) > 10 { - output.WriteString(strings.Join(fileDiagnostics[:10], "\n")) - fmt.Fprintf(&output, "\n... and %d more diagnostics", len(fileDiagnostics)-10) - } else { - output.WriteString(strings.Join(fileDiagnostics, "\n")) - } - output.WriteString("\n\n") - } - - if len(projectDiagnostics) > 0 { - output.WriteString("\n\n") - if len(projectDiagnostics) > 10 { - output.WriteString(strings.Join(projectDiagnostics[:10], "\n")) - fmt.Fprintf(&output, "\n... and %d more diagnostics", len(projectDiagnostics)-10) - } else { - output.WriteString(strings.Join(projectDiagnostics, "\n")) - } - output.WriteString("\n\n") - } - - if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 { - fileErrors := countSeverity(fileDiagnostics, "Error") - fileWarnings := countSeverity(fileDiagnostics, "Warn") - projectErrors := countSeverity(projectDiagnostics, "Error") - projectWarnings := countSeverity(projectDiagnostics, "Warn") - - output.WriteString("\n\n") - fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings) - fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings) - output.WriteString("\n") - } - - return output.String() -} - -func countSeverity(diagnostics []string, severity string) int { - count := 0 - for _, diag := range diagnostics { - if strings.HasPrefix(diag, severity) { - count++ - } - } - return count -} diff --git a/tools/download.go b/tools/download.go deleted file mode 100644 index 710e9e5180fc499bb5d901e5adb2a7e4074dfa22..0000000000000000000000000000000000000000 --- a/tools/download.go +++ /dev/null @@ -1,182 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "io" - "net/http" - "os" - "path/filepath" - "strings" - "time" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/permission" -) - -type DownloadParams struct { - URL string `json:"url" description:"The URL to download from"` - FilePath string `json:"file_path" description:"The local file path where the downloaded content should be saved"` - Timeout int `json:"timeout,omitempty" description:"Optional timeout in seconds (max 600)"` -} - -type DownloadPermissionsParams struct { - URL string `json:"url"` - FilePath string `json:"file_path"` - Timeout int `json:"timeout,omitempty"` -} - -const ( - DownloadToolName = "download" -) - -func NewDownloadTool(permissions permission.Service, workingDir string) ai.AgentTool { - client := &http.Client{ - Timeout: 5 * time.Minute, // Default 5 minute timeout for downloads - Transport: &http.Transport{ - MaxIdleConns: 100, - MaxIdleConnsPerHost: 10, - IdleConnTimeout: 90 * time.Second, - }, - } - return ai.NewAgentTool( - DownloadToolName, - `Downloads binary data from a URL and saves it to a local file. - -WHEN TO USE THIS TOOL: -- Use when you need to download files, images, or other binary data from URLs -- Helpful for downloading assets, documents, or any file type -- Useful for saving remote content locally for processing or storage - -HOW TO USE: -- Provide the URL to download from -- Specify the local file path where the content should be saved -- Optionally set a timeout for the request - -FEATURES: -- Downloads any file type (binary or text) -- Automatically creates parent directories if they don't exist -- Handles large files efficiently with streaming -- Sets reasonable timeouts to prevent hanging -- Validates input parameters before making requests - -LIMITATIONS: -- Maximum file size is 100MB -- Only supports HTTP and HTTPS protocols -- Cannot handle authentication or cookies -- Some websites may block automated requests -- Will overwrite existing files without warning - -TIPS: -- Use absolute paths or paths relative to the working directory -- Set appropriate timeouts for large files or slow connections`, - func(ctx context.Context, params DownloadParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.URL == "" { - return ai.NewTextErrorResponse("URL parameter is required"), nil - } - - if params.FilePath == "" { - return ai.NewTextErrorResponse("file_path parameter is required"), nil - } - - if !strings.HasPrefix(params.URL, "http://") && !strings.HasPrefix(params.URL, "https://") { - return ai.NewTextErrorResponse("URL must start with http:// or https://"), nil - } - - // Convert relative path to absolute path - var filePath string - if filepath.IsAbs(params.FilePath) { - filePath = params.FilePath - } else { - filePath = filepath.Join(workingDir, params.FilePath) - } - - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for the download tool") - } - granted := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: filePath, - ToolName: DownloadToolName, - Action: "download", - Description: fmt.Sprintf("Download file from URL: %s to %s", params.URL, filePath), - Params: DownloadPermissionsParams(params), - }, - ) - - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - // Handle timeout with context - requestCtx := ctx - if params.Timeout > 0 { - maxTimeout := 600 // 10 minutes - if params.Timeout > maxTimeout { - params.Timeout = maxTimeout - } - var cancel context.CancelFunc - requestCtx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Second) - defer cancel() - } - - req, err := http.NewRequestWithContext(requestCtx, "GET", params.URL, nil) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to create request: %w", err) - } - - req.Header.Set("User-Agent", "crush/1.0") - - resp, err := client.Do(req) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to download from URL: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil - } - - // Check content length if available - maxSize := int64(100 * 1024 * 1024) // 100MB - if resp.ContentLength > maxSize { - return ai.NewTextErrorResponse(fmt.Sprintf("File too large: %d bytes (max %d bytes)", resp.ContentLength, maxSize)), nil - } - - // Create parent directories if they don't exist - if err := os.MkdirAll(filepath.Dir(filePath), 0o755); err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to create parent directories: %w", err) - } - - // Create the output file - outFile, err := os.Create(filePath) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to create output file: %w", err) - } - defer outFile.Close() - - // Copy data with size limit - limitedReader := io.LimitReader(resp.Body, maxSize) - bytesWritten, err := io.Copy(outFile, limitedReader) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err) - } - - // Check if we hit the size limit - if bytesWritten == maxSize { - // Clean up the file since it might be incomplete - os.Remove(filePath) - return ai.NewTextErrorResponse(fmt.Sprintf("File too large: exceeded %d bytes limit", maxSize)), nil - } - - contentType := resp.Header.Get("Content-Type") - responseMsg := fmt.Sprintf("Successfully downloaded %d bytes to %s", bytesWritten, filePath) - if contentType != "" { - responseMsg += fmt.Sprintf(" (Content-Type: %s)", contentType) - } - - return ai.NewTextResponse(responseMsg), nil - }) -} diff --git a/tools/edit.go b/tools/edit.go deleted file mode 100644 index fc9060e85327d3c9d0775890f0822f1772898e27..0000000000000000000000000000000000000000 --- a/tools/edit.go +++ /dev/null @@ -1,490 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "log/slog" - "os" - "path/filepath" - "strings" - "time" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/diff" - "github.com/charmbracelet/crush/internal/fsext" - "github.com/charmbracelet/crush/internal/history" - "github.com/charmbracelet/crush/internal/lsp" - "github.com/charmbracelet/crush/internal/permission" -) - -type EditParams struct { - FilePath string `json:"file_path" description:"The absolute path to the file to modify"` - OldString string `json:"old_string" description:"The text to replace"` - NewString string `json:"new_string" description:"The text to replace it with"` - ReplaceAll bool `json:"replace_all,omitempty" description:"Replace all occurrences of old_string (default false)"` -} - -type EditPermissionsParams struct { - FilePath string `json:"file_path"` - OldContent string `json:"old_content,omitempty"` - NewContent string `json:"new_content,omitempty"` -} - -type EditResponseMetadata struct { - Additions int `json:"additions"` - Removals int `json:"removals"` - OldContent string `json:"old_content,omitempty"` - NewContent string `json:"new_content,omitempty"` -} - -const ( - EditToolName = "edit" -) - -func NewEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) ai.AgentTool { - return ai.NewAgentTool( - EditToolName, - `Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files. - -Before using this tool: - -1. Use the FileRead tool to understand the file's contents and context - -2. Verify the directory path is correct (only applicable when creating new files): - - Use the LS tool to verify the parent directory exists and is the correct location - -To make a file edit, provide the following: -1. file_path: The absolute path to the file to modify (must be absolute, not relative) -2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation) -3. new_string: The edited text to replace the old_string -4. replace_all: Replace all occurrences of old_string (default false) - -Special cases: -- To create a new file: provide file_path and new_string, leave old_string empty -- To delete content: provide file_path and old_string, leave new_string empty - -The tool will replace ONE occurrence of old_string with new_string in the specified file by default. Set replace_all to true to replace all occurrences. - -CRITICAL REQUIREMENTS FOR USING THIS TOOL: - -1. UNIQUENESS: When replace_all is false (default), the old_string MUST uniquely identify the specific instance you want to change. This means: - - Include AT LEAST 3-5 lines of context BEFORE the change point - - Include AT LEAST 3-5 lines of context AFTER the change point - - Include all whitespace, indentation, and surrounding code exactly as it appears in the file - -2. SINGLE INSTANCE: When replace_all is false, this tool can only change ONE instance at a time. If you need to change multiple instances: - - Set replace_all to true to replace all occurrences at once - - Or make separate calls to this tool for each instance - - Each call must uniquely identify its specific instance using extensive context - -3. VERIFICATION: Before using this tool: - - Check how many instances of the target text exist in the file - - If multiple instances exist and replace_all is false, gather enough context to uniquely identify each one - - Plan separate tool calls for each instance or use replace_all - -WARNING: If you do not follow these requirements: - - The tool will fail if old_string matches multiple locations and replace_all is false - - The tool will fail if old_string doesn't match exactly (including whitespace) - - You may change the wrong instance if you don't include enough context - -When making edits: - - Ensure the edit results in idiomatic, correct code - - Do not leave the code in a broken state - - Always use absolute file paths (starting with /) - -WINDOWS NOTES: -- File paths should use forward slashes (/) for cross-platform compatibility -- On Windows, absolute paths start with drive letters (C:/) but forward slashes work throughout -- File permissions are handled automatically by the Go runtime -- Always assumes \n for line endings. The tool will handle \r\n conversion automatically if needed. - -Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.`, - func(ctx context.Context, params EditParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.FilePath == "" { - return ai.NewTextErrorResponse("file_path is required"), nil - } - - if !filepath.IsAbs(params.FilePath) { - params.FilePath = filepath.Join(workingDir, params.FilePath) - } - - var response ai.ToolResponse - var err error - - if params.OldString == "" { - response, err = createNewFile(ctx, permissions, files, workingDir, params.FilePath, params.NewString, call) - if err != nil { - return response, err - } - } - - if params.NewString == "" { - response, err = deleteContent(ctx, permissions, files, workingDir, params.FilePath, params.OldString, params.ReplaceAll, call) - if err != nil { - return response, err - } - } - - response, err = replaceContent(ctx, permissions, files, workingDir, params.FilePath, params.OldString, params.NewString, params.ReplaceAll, call) - if err != nil { - return response, err - } - if response.IsError { - // Return early if there was an error during content replacement - // This prevents unnecessary LSP diagnostics processing - return response, nil - } - - waitForLspDiagnostics(ctx, params.FilePath, lspClients) - text := fmt.Sprintf("\n%s\n\n", response.Content) - text += getDiagnostics(params.FilePath, lspClients) - response.Content = text - return response, nil - }, - ) -} - -func createNewFile(ctx context.Context, permissions permission.Service, files history.Service, workingDir, filePath, content string, call ai.ToolCall) (ai.ToolResponse, error) { - fileInfo, err := os.Stat(filePath) - if err == nil { - if fileInfo.IsDir() { - return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil - } - return ai.NewTextErrorResponse(fmt.Sprintf("file already exists: %s", filePath)), nil - } else if !os.IsNotExist(err) { - return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err) - } - - dir := filepath.Dir(filePath) - if err = os.MkdirAll(dir, 0o755); err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to create parent directories: %w", err) - } - - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file") - } - - _, additions, removals := diff.GenerateDiff( - "", - content, - strings.TrimPrefix(filePath, workingDir), - ) - p := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: fsext.PathOrPrefix(filePath, workingDir), - ToolCallID: call.ID, - ToolName: EditToolName, - Action: "write", - Description: fmt.Sprintf("Create file %s", filePath), - Params: EditPermissionsParams{ - FilePath: filePath, - OldContent: "", - NewContent: content, - }, - }, - ) - if !p { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - err = os.WriteFile(filePath, []byte(content), 0o644) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err) - } - - // File can't be in the history so we create a new file history - _, err = files.Create(ctx, sessionID, filePath, "") - if err != nil { - // Log error but don't fail the operation - return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err) - } - - // Add the new content to the file history - _, err = files.CreateVersion(ctx, sessionID, filePath, content) - if err != nil { - // Log error but don't fail the operation - slog.Debug("Error creating file history version", "error", err) - } - - recordFileWrite(filePath) - recordFileRead(filePath) - - return ai.WithResponseMetadata( - ai.NewTextResponse("File created: "+filePath), - EditResponseMetadata{ - OldContent: "", - NewContent: content, - Additions: additions, - Removals: removals, - }, - ), nil -} - -func deleteContent(ctx context.Context, permissions permission.Service, files history.Service, workingDir, filePath, oldString string, replaceAll bool, call ai.ToolCall) (ai.ToolResponse, error) { - fileInfo, err := os.Stat(filePath) - if err != nil { - if os.IsNotExist(err) { - return ai.NewTextErrorResponse(fmt.Sprintf("file not found: %s", filePath)), nil - } - return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err) - } - - if fileInfo.IsDir() { - return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil - } - - if getLastReadTime(filePath).IsZero() { - return ai.NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil - } - - modTime := fileInfo.ModTime() - lastRead := getLastReadTime(filePath) - if modTime.After(lastRead) { - return ai.NewTextErrorResponse( - fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)", - filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339), - )), nil - } - - content, err := os.ReadFile(filePath) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to read file: %w", err) - } - - oldContent, isCrlf := fsext.ToUnixLineEndings(string(content)) - - var newContent string - var deletionCount int - - if replaceAll { - newContent = strings.ReplaceAll(oldContent, oldString, "") - deletionCount = strings.Count(oldContent, oldString) - if deletionCount == 0 { - return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil - } - } else { - index := strings.Index(oldContent, oldString) - if index == -1 { - return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil - } - - lastIndex := strings.LastIndex(oldContent, oldString) - if index != lastIndex { - return ai.NewTextErrorResponse("old_string appears multiple times in the file. Please provide more context to ensure a unique match, or set replace_all to true"), nil - } - - newContent = oldContent[:index] + oldContent[index+len(oldString):] - deletionCount = 1 - } - - sessionID, messageID := GetContextValues(ctx) - - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file") - } - - _, additions, removals := diff.GenerateDiff( - oldContent, - newContent, - strings.TrimPrefix(filePath, workingDir), - ) - - p := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: fsext.PathOrPrefix(filePath, workingDir), - ToolCallID: call.ID, - ToolName: EditToolName, - Action: "write", - Description: fmt.Sprintf("Delete content from file %s", filePath), - Params: EditPermissionsParams{ - FilePath: filePath, - OldContent: oldContent, - NewContent: newContent, - }, - }, - ) - if !p { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - if isCrlf { - newContent, _ = fsext.ToWindowsLineEndings(newContent) - } - - err = os.WriteFile(filePath, []byte(newContent), 0o644) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err) - } - - // Check if file exists in history - file, err := files.GetByPathAndSession(ctx, filePath, sessionID) - if err != nil { - _, err = files.Create(ctx, sessionID, filePath, oldContent) - if err != nil { - // Log error but don't fail the operation - return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err) - } - } - if file.Content != oldContent { - // User Manually changed the content store an intermediate version - _, err = files.CreateVersion(ctx, sessionID, filePath, oldContent) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - } - // Store the new version - _, err = files.CreateVersion(ctx, sessionID, filePath, "") - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - - recordFileWrite(filePath) - recordFileRead(filePath) - - return ai.WithResponseMetadata( - ai.NewTextResponse("Content deleted from file: "+filePath), - EditResponseMetadata{ - OldContent: oldContent, - NewContent: newContent, - Additions: additions, - Removals: removals, - }, - ), nil -} - -func replaceContent(ctx context.Context, permissions permission.Service, files history.Service, workingDir, filePath, oldString, newString string, replaceAll bool, call ai.ToolCall) (ai.ToolResponse, error) { - fileInfo, err := os.Stat(filePath) - if err != nil { - if os.IsNotExist(err) { - return ai.NewTextErrorResponse(fmt.Sprintf("file not found: %s", filePath)), nil - } - return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err) - } - - if fileInfo.IsDir() { - return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil - } - - if getLastReadTime(filePath).IsZero() { - return ai.NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil - } - - modTime := fileInfo.ModTime() - lastRead := getLastReadTime(filePath) - if modTime.After(lastRead) { - return ai.NewTextErrorResponse( - fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)", - filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339), - )), nil - } - - content, err := os.ReadFile(filePath) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to read file: %w", err) - } - - oldContent, isCrlf := fsext.ToUnixLineEndings(string(content)) - - var newContent string - var replacementCount int - - if replaceAll { - newContent = strings.ReplaceAll(oldContent, oldString, newString) - replacementCount = strings.Count(oldContent, oldString) - if replacementCount == 0 { - return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil - } - } else { - index := strings.Index(oldContent, oldString) - if index == -1 { - return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil - } - - lastIndex := strings.LastIndex(oldContent, oldString) - if index != lastIndex { - return ai.NewTextErrorResponse("old_string appears multiple times in the file. Please provide more context to ensure a unique match, or set replace_all to true"), nil - } - - newContent = oldContent[:index] + newString + oldContent[index+len(oldString):] - replacementCount = 1 - } - - if oldContent == newContent { - return ai.NewTextErrorResponse("new content is the same as old content. No changes made."), nil - } - sessionID, messageID := GetContextValues(ctx) - - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file") - } - _, additions, removals := diff.GenerateDiff( - oldContent, - newContent, - strings.TrimPrefix(filePath, workingDir), - ) - - p := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: fsext.PathOrPrefix(filePath, workingDir), - ToolCallID: call.ID, - ToolName: EditToolName, - Action: "write", - Description: fmt.Sprintf("Replace content in file %s", filePath), - Params: EditPermissionsParams{ - FilePath: filePath, - OldContent: oldContent, - NewContent: newContent, - }, - }, - ) - if !p { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - if isCrlf { - newContent, _ = fsext.ToWindowsLineEndings(newContent) - } - - err = os.WriteFile(filePath, []byte(newContent), 0o644) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err) - } - - // Check if file exists in history - file, err := files.GetByPathAndSession(ctx, filePath, sessionID) - if err != nil { - _, err = files.Create(ctx, sessionID, filePath, oldContent) - if err != nil { - // Log error but don't fail the operation - return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err) - } - } - if file.Content != oldContent { - // User Manually changed the content store an intermediate version - _, err = files.CreateVersion(ctx, sessionID, filePath, oldContent) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - } - // Store the new version - _, err = files.CreateVersion(ctx, sessionID, filePath, newContent) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - - recordFileWrite(filePath) - recordFileRead(filePath) - - return ai.WithResponseMetadata( - ai.NewTextResponse("Content replaced in file: "+filePath), - EditResponseMetadata{ - OldContent: oldContent, - NewContent: newContent, - Additions: additions, - Removals: removals, - }), nil -} diff --git a/tools/fetch.go b/tools/fetch.go deleted file mode 100644 index 4866ec222745255d162ff49eb9dfa5600536b923..0000000000000000000000000000000000000000 --- a/tools/fetch.go +++ /dev/null @@ -1,222 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "io" - "net/http" - "strings" - "time" - "unicode/utf8" - - md "github.com/JohannesKaufmann/html-to-markdown" - "github.com/PuerkitoBio/goquery" - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/permission" -) - -type FetchParams struct { - URL string `json:"url" description:"The URL to fetch content from"` - Format string `json:"format" description:"The format to return the content in (text, markdown, or html)"` - Timeout int `json:"timeout,omitempty" description:"Optional timeout in seconds (max 120)"` -} - -type FetchPermissionsParams struct { - URL string `json:"url"` - Format string `json:"format"` - Timeout int `json:"timeout,omitempty"` -} - -const ( - FetchToolName = "fetch" -) - -func NewFetchTool(permissions permission.Service, workingDir string) ai.AgentTool { - client := &http.Client{ - Timeout: 30 * time.Second, - Transport: &http.Transport{ - MaxIdleConns: 100, - MaxIdleConnsPerHost: 10, - IdleConnTimeout: 90 * time.Second, - }, - } - return ai.NewAgentTool( - FetchToolName, - `Fetches content from a URL and returns it in the specified format. - -WHEN TO USE THIS TOOL: -- Use when you need to download content from a URL -- Helpful for retrieving documentation, API responses, or web content -- Useful for getting external information to assist with tasks - -HOW TO USE: -- Provide the URL to fetch content from -- Specify the desired output format (text, markdown, or html) -- Optionally set a timeout for the request - -FEATURES: -- Supports three output formats: text, markdown, and html -- Automatically handles HTTP redirects -- Sets reasonable timeouts to prevent hanging -- Validates input parameters before making requests - -LIMITATIONS: -- Maximum response size is 5MB -- Only supports HTTP and HTTPS protocols -- Cannot handle authentication or cookies -- Some websites may block automated requests - -TIPS: -- Use text format for plain text content or simple API responses -- Use markdown format for content that should be rendered with formatting -- Use html format when you need the raw HTML structure -- Set appropriate timeouts for potentially slow websites`, - func(ctx context.Context, params FetchParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.URL == "" { - return ai.NewTextErrorResponse("URL parameter is required"), nil - } - - format := strings.ToLower(params.Format) - if format != "text" && format != "markdown" && format != "html" { - return ai.NewTextErrorResponse("Format must be one of: text, markdown, html"), nil - } - - if !strings.HasPrefix(params.URL, "http://") && !strings.HasPrefix(params.URL, "https://") { - return ai.NewTextErrorResponse("URL must start with http:// or https://"), nil - } - - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file") - } - - granted := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: workingDir, - ToolCallID: call.ID, - ToolName: FetchToolName, - Action: "fetch", - Description: fmt.Sprintf("Fetch content from URL: %s", params.URL), - Params: FetchPermissionsParams(params), - }, - ) - - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - // Handle timeout with context - requestCtx := ctx - if params.Timeout > 0 { - maxTimeout := 120 // 2 minutes - if params.Timeout > maxTimeout { - params.Timeout = maxTimeout - } - var cancel context.CancelFunc - requestCtx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Second) - defer cancel() - } - - req, err := http.NewRequestWithContext(requestCtx, "GET", params.URL, nil) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to create request: %w", err) - } - - req.Header.Set("User-Agent", "crush/1.0") - - resp, err := client.Do(req) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to fetch URL: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil - } - - maxSize := int64(5 * 1024 * 1024) // 5MB - body, err := io.ReadAll(io.LimitReader(resp.Body, maxSize)) - if err != nil { - return ai.NewTextErrorResponse("Failed to read response body: " + err.Error()), nil - } - - content := string(body) - - isValidUt8 := utf8.ValidString(content) - if !isValidUt8 { - return ai.NewTextErrorResponse("Response content is not valid UTF-8"), nil - } - contentType := resp.Header.Get("Content-Type") - - switch format { - case "text": - if strings.Contains(contentType, "text/html") { - text, err := extractTextFromHTML(content) - if err != nil { - return ai.NewTextErrorResponse("Failed to extract text from HTML: " + err.Error()), nil - } - content = text - } - - case "markdown": - if strings.Contains(contentType, "text/html") { - markdown, err := convertHTMLToMarkdown(content) - if err != nil { - return ai.NewTextErrorResponse("Failed to convert HTML to Markdown: " + err.Error()), nil - } - content = markdown - } - - content = "```\n" + content + "\n```" - - case "html": - // return only the body of the HTML document - if strings.Contains(contentType, "text/html") { - doc, err := goquery.NewDocumentFromReader(strings.NewReader(content)) - if err != nil { - return ai.NewTextErrorResponse("Failed to parse HTML: " + err.Error()), nil - } - body, err := doc.Find("body").Html() - if err != nil { - return ai.NewTextErrorResponse("Failed to extract body from HTML: " + err.Error()), nil - } - if body == "" { - return ai.NewTextErrorResponse("No body content found in HTML"), nil - } - content = "\n\n" + body + "\n\n" - } - } - // calculate byte size of content - contentSize := int64(len(content)) - if contentSize > MaxReadSize { - content = content[:MaxReadSize] - content += fmt.Sprintf("\n\n[Content truncated to %d bytes]", MaxReadSize) - } - - return ai.NewTextResponse(content), nil - }) -} - -func extractTextFromHTML(html string) (string, error) { - doc, err := goquery.NewDocumentFromReader(strings.NewReader(html)) - if err != nil { - return "", err - } - - text := doc.Find("body").Text() - text = strings.Join(strings.Fields(text), " ") - - return text, nil -} - -func convertHTMLToMarkdown(html string) (string, error) { - converter := md.NewConverter("", true, nil) - - markdown, err := converter.ConvertString(html) - if err != nil { - return "", err - } - - return markdown, nil -} diff --git a/tools/glob.go b/tools/glob.go deleted file mode 100644 index a643adbb2f7c8af1ff552b72ebd3a20695d9dc17..0000000000000000000000000000000000000000 --- a/tools/glob.go +++ /dev/null @@ -1,154 +0,0 @@ -package tools - -import ( - "bytes" - "context" - "fmt" - "log/slog" - "os/exec" - "path/filepath" - "sort" - "strings" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/fsext" -) - -type GlobParams struct { - Pattern string `json:"pattern" description:"The glob pattern to match files against"` - Path string `json:"path" description:"The directory to search in. Defaults to the current working directory."` -} - -type GlobResponseMetadata struct { - NumberOfFiles int `json:"number_of_files"` - Truncated bool `json:"truncated"` -} - -const ( - GlobToolName = "glob" -) - -func NewGlobTool(workingDir string) ai.AgentTool { - return ai.NewAgentTool( - GlobToolName, - `Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first). - -WHEN TO USE THIS TOOL: -- Use when you need to find files by name patterns or extensions -- Great for finding specific file types across a directory structure -- Useful for discovering files that match certain naming conventions - -HOW TO USE: -- Provide a glob pattern to match against file paths -- Optionally specify a starting directory (defaults to current working directory) -- Results are sorted with most recently modified files first - -GLOB PATTERN SYNTAX: -- '*' matches any sequence of non-separator characters -- '**' matches any sequence of characters, including separators -- '?' matches any single non-separator character -- '[...]' matches any character in the brackets -- '[!...]' matches any character not in the brackets - -COMMON PATTERN EXAMPLES: -- '*.js' - Find all JavaScript files in the current directory -- '**/*.js' - Find all JavaScript files in any subdirectory -- 'src/**/*.{ts,tsx}' - Find all TypeScript files in the src directory -- '*.{html,css,js}' - Find all HTML, CSS, and JS files - -LIMITATIONS: -- Results are limited to 100 files (newest first) -- Does not search file contents (use Grep tool for that) -- Hidden files (starting with '.') are skipped - -WINDOWS NOTES: -- Path separators are handled automatically (both / and \ work) -- Uses ripgrep (rg) command if available, otherwise falls back to built-in Go implementation - -TIPS: -- Patterns should use forward slashes (/) for cross-platform compatibility -- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep -- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead -- Always check if results are truncated and refine your search pattern if needed`, - func(ctx context.Context, params GlobParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.Pattern == "" { - return ai.NewTextErrorResponse("pattern is required"), nil - } - - searchPath := params.Path - if searchPath == "" { - searchPath = workingDir - } - - files, truncated, err := globFiles(ctx, params.Pattern, searchPath, 100) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error finding files: %w", err) - } - - var output string - if len(files) == 0 { - output = "No files found" - } else { - output = strings.Join(files, "\n") - if truncated { - output += "\n\n(Results are truncated. Consider using a more specific path or pattern.)" - } - } - - return ai.WithResponseMetadata( - ai.NewTextResponse(output), - GlobResponseMetadata{ - NumberOfFiles: len(files), - Truncated: truncated, - }, - ), nil - }) -} - -func globFiles(ctx context.Context, pattern, searchPath string, limit int) ([]string, bool, error) { - cmdRg := getRgCmd(ctx, pattern) - if cmdRg != nil { - cmdRg.Dir = searchPath - matches, err := runRipgrep(cmdRg, searchPath, limit) - if err == nil { - return matches, len(matches) >= limit && limit > 0, nil - } - slog.Warn("Ripgrep execution failed, falling back to doublestar", "error", err) - } - - return fsext.GlobWithDoubleStar(pattern, searchPath, limit) -} - -func runRipgrep(cmd *exec.Cmd, searchRoot string, limit int) ([]string, error) { - out, err := cmd.CombinedOutput() - if err != nil { - if ee, ok := err.(*exec.ExitError); ok && ee.ExitCode() == 1 { - return nil, nil - } - return nil, fmt.Errorf("ripgrep: %w\n%s", err, out) - } - - var matches []string - for p := range bytes.SplitSeq(out, []byte{0}) { - if len(p) == 0 { - continue - } - absPath := string(p) - if !filepath.IsAbs(absPath) { - absPath = filepath.Join(searchRoot, absPath) - } - if fsext.SkipHidden(absPath) { - continue - } - matches = append(matches, absPath) - } - - sort.SliceStable(matches, func(i, j int) bool { - return len(matches[i]) < len(matches[j]) - }) - - if limit > 0 && len(matches) > limit { - matches = matches[:limit] - } - return matches, nil -} diff --git a/tools/grep.go b/tools/grep.go deleted file mode 100644 index 7ac412314f5eafe1638449d3a36033e9f69220d2..0000000000000000000000000000000000000000 --- a/tools/grep.go +++ /dev/null @@ -1,436 +0,0 @@ -package tools - -import ( - "bufio" - "context" - "fmt" - "io" - "os" - "os/exec" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - "sync" - "time" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/fsext" -) - -// regexCache provides thread-safe caching of compiled regex patterns -type regexCache struct { - cache map[string]*regexp.Regexp - mu sync.RWMutex -} - -// newRegexCache creates a new regex cache -func newRegexCache() *regexCache { - return ®exCache{ - cache: make(map[string]*regexp.Regexp), - } -} - -// get retrieves a compiled regex from cache or compiles and caches it -func (rc *regexCache) get(pattern string) (*regexp.Regexp, error) { - // Try to get from cache first (read lock) - rc.mu.RLock() - if regex, exists := rc.cache[pattern]; exists { - rc.mu.RUnlock() - return regex, nil - } - rc.mu.RUnlock() - - // Compile the regex (write lock) - rc.mu.Lock() - defer rc.mu.Unlock() - - // Double-check in case another goroutine compiled it while we waited - if regex, exists := rc.cache[pattern]; exists { - return regex, nil - } - - // Compile and cache the regex - regex, err := regexp.Compile(pattern) - if err != nil { - return nil, err - } - - rc.cache[pattern] = regex - return regex, nil -} - -// Global regex cache instances -var ( - searchRegexCache = newRegexCache() - globRegexCache = newRegexCache() - // Pre-compiled regex for glob conversion (used frequently) - globBraceRegex = regexp.MustCompile(`\{([^}]+)\}`) -) - -type GrepParams struct { - Pattern string `json:"pattern" description:"The regex pattern to search for in file contents"` - Path string `json:"path" description:"The directory to search in. Defaults to the current working directory."` - Include string `json:"include" description:"File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")"` - LiteralText bool `json:"literal_text" description:"If true, the pattern will be treated as literal text with special regex characters escaped. Default is false."` -} - -type grepMatch struct { - path string - modTime time.Time - lineNum int - lineText string -} - -type GrepResponseMetadata struct { - NumberOfMatches int `json:"number_of_matches"` - Truncated bool `json:"truncated"` -} - -const ( - GrepToolName = "grep" -) - -func NewGrepTool(workingDir string) ai.AgentTool { - return ai.NewAgentTool( - GrepToolName, - `Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first). - -WHEN TO USE THIS TOOL: -- Use when you need to find files containing specific text or patterns -- Great for searching code bases for function names, variable declarations, or error messages -- Useful for finding all files that use a particular API or pattern - -HOW TO USE: -- Provide a regex pattern to search for within file contents -- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users) -- Optionally specify a starting directory (defaults to current working directory) -- Optionally provide an include pattern to filter which files to search -- Results are sorted with most recently modified files first - -REGEX PATTERN SYNTAX (when literal_text=false): -- Supports standard regular expression syntax -- 'function' searches for the literal text "function" -- 'log\..*Error' finds text starting with "log." and ending with "Error" -- 'import\s+.*\s+from' finds import statements in JavaScript/TypeScript - -COMMON INCLUDE PATTERN EXAMPLES: -- '*.js' - Only search JavaScript files -- '*.{ts,tsx}' - Only search TypeScript files -- '*.go' - Only search Go files - -LIMITATIONS: -- Results are limited to 100 files (newest first) -- Performance depends on the number of files being searched -- Very large binary files may be skipped -- Hidden files (starting with '.') are skipped - -IGNORE FILE SUPPORT: -- Respects .gitignore patterns to skip ignored files and directories -- Respects .crushignore patterns for additional ignore rules -- Both ignore files are automatically detected in the search root directory - -CROSS-PLATFORM NOTES: -- Uses ripgrep (rg) command if available for better performance -- Falls back to built-in Go implementation if ripgrep is not available -- File paths are normalized automatically for cross-platform compatibility - -TIPS: -- For faster, more targeted searches, first use Glob to find relevant files, then use Grep -- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead -- Always check if results are truncated and refine your search pattern if needed -- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.`, - func(ctx context.Context, params GrepParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.Pattern == "" { - return ai.NewTextErrorResponse("pattern is required"), nil - } - - // If literal_text is true, escape the pattern - searchPattern := params.Pattern - if params.LiteralText { - searchPattern = escapeRegexPattern(params.Pattern) - } - - searchPath := params.Path - if searchPath == "" { - searchPath = workingDir - } - - matches, truncated, err := searchFiles(ctx, searchPattern, searchPath, params.Include, 100) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error searching files: %w", err) - } - - var output strings.Builder - if len(matches) == 0 { - output.WriteString("No files found") - } else { - fmt.Fprintf(&output, "Found %d matches\n", len(matches)) - - currentFile := "" - for _, match := range matches { - if currentFile != match.path { - if currentFile != "" { - output.WriteString("\n") - } - currentFile = match.path - fmt.Fprintf(&output, "%s:\n", match.path) - } - if match.lineNum > 0 { - fmt.Fprintf(&output, " Line %d: %s\n", match.lineNum, match.lineText) - } else { - fmt.Fprintf(&output, " %s\n", match.path) - } - } - - if truncated { - output.WriteString("\n(Results are truncated. Consider using a more specific path or pattern.)") - } - } - - return ai.WithResponseMetadata( - ai.NewTextResponse(output.String()), - GrepResponseMetadata{ - NumberOfMatches: len(matches), - Truncated: truncated, - }, - ), nil - }) -} - -// escapeRegexPattern escapes special regex characters so they're treated as literal characters -func escapeRegexPattern(pattern string) string { - specialChars := []string{"\\", ".", "+", "*", "?", "(", ")", "[", "]", "{", "}", "^", "$", "|"} - escaped := pattern - - for _, char := range specialChars { - escaped = strings.ReplaceAll(escaped, char, "\\"+char) - } - - return escaped -} - -func searchFiles(ctx context.Context, pattern, rootPath, include string, limit int) ([]grepMatch, bool, error) { - matches, err := searchWithRipgrep(ctx, pattern, rootPath, include) - if err != nil { - matches, err = searchFilesWithRegex(pattern, rootPath, include) - if err != nil { - return nil, false, err - } - } - - sort.Slice(matches, func(i, j int) bool { - return matches[i].modTime.After(matches[j].modTime) - }) - - truncated := len(matches) > limit - if truncated { - matches = matches[:limit] - } - - return matches, truncated, nil -} - -func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]grepMatch, error) { - cmd := getRgSearchCmd(ctx, pattern, path, include) - if cmd == nil { - return nil, fmt.Errorf("ripgrep not found in $PATH") - } - - cmd.Args = append( - cmd.Args, - "--ignore-file", filepath.Join(path, ".gitignore"), - "--ignore-file", filepath.Join(path, ".crushignore"), - ) - - output, err := cmd.Output() - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok && exitErr.ExitCode() == 1 { - return []grepMatch{}, nil - } - return nil, err - } - - lines := strings.Split(strings.TrimSpace(string(output)), "\n") - matches := make([]grepMatch, 0, len(lines)) - - for _, line := range lines { - if line == "" { - continue - } - - // Parse ripgrep output format: file:line:content - parts := strings.SplitN(line, ":", 3) - if len(parts) < 3 { - continue - } - - filePath := parts[0] - lineNum, err := strconv.Atoi(parts[1]) - if err != nil { - continue - } - lineText := parts[2] - - fileInfo, err := os.Stat(filePath) - if err != nil { - continue // Skip files we can't access - } - - matches = append(matches, grepMatch{ - path: filePath, - modTime: fileInfo.ModTime(), - lineNum: lineNum, - lineText: lineText, - }) - } - - return matches, nil -} - -func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) { - matches := []grepMatch{} - - // Use cached regex compilation - regex, err := searchRegexCache.get(pattern) - if err != nil { - return nil, fmt.Errorf("invalid regex pattern: %w", err) - } - - var includePattern *regexp.Regexp - if include != "" { - regexPattern := globToRegex(include) - includePattern, err = globRegexCache.get(regexPattern) - if err != nil { - return nil, fmt.Errorf("invalid include pattern: %w", err) - } - } - - // Create walker with gitignore and crushignore support - walker := fsext.NewFastGlobWalker(rootPath) - - err = filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil // Skip errors - } - - if info.IsDir() { - return nil // Skip directories - } - - // Use walker's shouldSkip method instead of just SkipHidden - if walker.ShouldSkip(path) { - return nil - } - - if includePattern != nil && !includePattern.MatchString(path) { - return nil - } - - match, lineNum, lineText, err := fileContainsPattern(path, regex) - if err != nil { - return nil // Skip files we can't read - } - - if match { - matches = append(matches, grepMatch{ - path: path, - modTime: info.ModTime(), - lineNum: lineNum, - lineText: lineText, - }) - - if len(matches) >= 200 { - return filepath.SkipAll - } - } - - return nil - }) - if err != nil { - return nil, err - } - - return matches, nil -} - -func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, string, error) { - // Quick binary file detection - if isBinaryFile(filePath) { - return false, 0, "", nil - } - - file, err := os.Open(filePath) - if err != nil { - return false, 0, "", err - } - defer file.Close() - - scanner := bufio.NewScanner(file) - lineNum := 0 - for scanner.Scan() { - lineNum++ - line := scanner.Text() - if pattern.MatchString(line) { - return true, lineNum, line, nil - } - } - - return false, 0, "", scanner.Err() -} - -var binaryExts = map[string]struct{}{ - ".exe": {}, ".dll": {}, ".so": {}, ".dylib": {}, - ".bin": {}, ".obj": {}, ".o": {}, ".a": {}, - ".zip": {}, ".tar": {}, ".gz": {}, ".bz2": {}, - ".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {}, - ".pdf": {}, ".doc": {}, ".docx": {}, ".xls": {}, - ".mp3": {}, ".mp4": {}, ".avi": {}, ".mov": {}, -} - -// isBinaryFile performs a quick check to determine if a file is binary -func isBinaryFile(filePath string) bool { - // Check file extension first (fastest) - ext := strings.ToLower(filepath.Ext(filePath)) - if _, isBinary := binaryExts[ext]; isBinary { - return true - } - - // Quick content check for files without clear extensions - file, err := os.Open(filePath) - if err != nil { - return false // If we can't open it, let the caller handle the error - } - defer file.Close() - - // Read first 512 bytes to check for null bytes - buffer := make([]byte, 512) - n, err := file.Read(buffer) - if err != nil && err != io.EOF { - return false - } - - // Check for null bytes (common in binary files) - for i := range n { - if buffer[i] == 0 { - return true - } - } - - return false -} - -func globToRegex(glob string) string { - regexPattern := strings.ReplaceAll(glob, ".", "\\.") - regexPattern = strings.ReplaceAll(regexPattern, "*", ".*") - regexPattern = strings.ReplaceAll(regexPattern, "?", ".") - - // Use pre-compiled regex instead of compiling each time - regexPattern = globBraceRegex.ReplaceAllStringFunc(regexPattern, func(match string) string { - inner := match[1 : len(match)-1] - return "(" + strings.ReplaceAll(inner, ",", "|") + ")" - }) - - return regexPattern -} diff --git a/tools/ls.go b/tools/ls.go deleted file mode 100644 index b01f2ca5873d945fe8fe2df10399306cf7acc2fe..0000000000000000000000000000000000000000 --- a/tools/ls.go +++ /dev/null @@ -1,266 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/fsext" - "github.com/charmbracelet/crush/internal/permission" -) - -const ( - MaxLSFiles = 1000 - LSToolName = "ls" -) - -type LSParams struct { - Path string `json:"path" description:"The path to the directory to list (defaults to current working directory)"` - Ignore []string `json:"ignore,omitempty" description:"List of glob patterns to ignore"` -} - -type LSPermissionsParams struct { - Path string `json:"path"` - Ignore []string `json:"ignore"` -} - -type TreeNode struct { - Name string `json:"name"` - Path string `json:"path"` - Type string `json:"type"` - Children []*TreeNode `json:"children,omitempty"` -} - -type LSResponseMetadata struct { - NumberOfFiles int `json:"number_of_files"` - Truncated bool `json:"truncated"` -} - -func NewLSTool(permissions permission.Service, workingDir string) ai.AgentTool { - return ai.NewAgentTool( - LSToolName, - `Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization. - -WHEN TO USE THIS TOOL: -- Use when you need to explore the structure of a directory -- Helpful for understanding the organization of a project -- Good first step when getting familiar with a new codebase - -HOW TO USE: -- Provide a path to list (defaults to current working directory) -- Optionally specify glob patterns to ignore -- Results are displayed in a tree structure - -FEATURES: -- Displays a hierarchical view of files and directories -- Automatically skips hidden files/directories (starting with '.') -- Skips common system directories like __pycache__ -- Can filter out files matching specific patterns - -LIMITATIONS: -- Results are limited to 1000 files -- Very large directories will be truncated -- Does not show file sizes or permissions -- Cannot recursively list all directories in a large project - -WINDOWS NOTES: -- Hidden file detection uses Unix convention (files starting with '.') -- Windows-specific hidden files (with hidden attribute) are not automatically skipped -- Common Windows directories like System32, Program Files are not in default ignore list -- Path separators are handled automatically (both / and \ work) - -TIPS: -- Use Glob tool for finding files by name patterns instead of browsing -- Use Grep tool for searching file contents -- Combine with other tools for more effective exploration`, - func(ctx context.Context, params LSParams, call ai.ToolCall) (ai.ToolResponse, error) { - searchPath := params.Path - if searchPath == "" { - searchPath = workingDir - } - - var err error - searchPath, err = fsext.Expand(searchPath) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error expanding path: %w", err) - } - - if !filepath.IsAbs(searchPath) { - searchPath = filepath.Join(workingDir, searchPath) - } - - // Check if directory is outside working directory and request permission if needed - absWorkingDir, err := filepath.Abs(workingDir) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error resolving working directory: %w", err) - } - - absSearchPath, err := filepath.Abs(searchPath) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error resolving search path: %w", err) - } - - relPath, err := filepath.Rel(absWorkingDir, absSearchPath) - if err != nil || strings.HasPrefix(relPath, "..") { - - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for the ls tool") - } - granted := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - ToolCallID: call.ID, - ToolName: LSToolName, - Path: absSearchPath, - Action: "list", - Description: fmt.Sprintf("List directory outside working directory: %s", absSearchPath), - Params: LSPermissionsParams(params), - }) - - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - } - output, err := ListDirectoryTree(searchPath, params.Ignore) - if err != nil { - return ai.ToolResponse{}, err - } - - // Get file count for metadata - files, truncated, err := fsext.ListDirectory(searchPath, params.Ignore, MaxLSFiles) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error listing directory for metadata: %w", err) - } - - return ai.WithResponseMetadata( - ai.NewTextResponse(output), - LSResponseMetadata{ - NumberOfFiles: len(files), - Truncated: truncated, - }, - ), nil - }, - ) -} - -func ListDirectoryTree(searchPath string, ignore []string) (string, error) { - if _, err := os.Stat(searchPath); os.IsNotExist(err) { - return "", fmt.Errorf("path does not exist: %s", searchPath) - } - - files, truncated, err := fsext.ListDirectory(searchPath, ignore, MaxLSFiles) - if err != nil { - return "", fmt.Errorf("error listing directory: %w", err) - } - - tree := createFileTree(files, searchPath) - output := printTree(tree, searchPath) - - if truncated { - output = fmt.Sprintf("There are more than %d files in the directory. Use a more specific path or use the Glob tool to find specific files. The first %d files and directories are included below:\n\n%s", MaxLSFiles, MaxLSFiles, output) - } - - return output, nil -} - -func createFileTree(sortedPaths []string, rootPath string) []*TreeNode { - root := []*TreeNode{} - pathMap := make(map[string]*TreeNode) - - for _, path := range sortedPaths { - relativePath := strings.TrimPrefix(path, rootPath) - parts := strings.Split(relativePath, string(filepath.Separator)) - currentPath := "" - var parentPath string - - var cleanParts []string - for _, part := range parts { - if part != "" { - cleanParts = append(cleanParts, part) - } - } - parts = cleanParts - - if len(parts) == 0 { - continue - } - - for i, part := range parts { - if currentPath == "" { - currentPath = part - } else { - currentPath = filepath.Join(currentPath, part) - } - - if _, exists := pathMap[currentPath]; exists { - parentPath = currentPath - continue - } - - isLastPart := i == len(parts)-1 - isDir := !isLastPart || strings.HasSuffix(relativePath, string(filepath.Separator)) - nodeType := "file" - if isDir { - nodeType = "directory" - } - newNode := &TreeNode{ - Name: part, - Path: currentPath, - Type: nodeType, - Children: []*TreeNode{}, - } - - pathMap[currentPath] = newNode - - if i > 0 && parentPath != "" { - if parent, ok := pathMap[parentPath]; ok { - parent.Children = append(parent.Children, newNode) - } - } else { - root = append(root, newNode) - } - - parentPath = currentPath - } - } - - return root -} - -func printTree(tree []*TreeNode, rootPath string) string { - var result strings.Builder - - result.WriteString("- ") - result.WriteString(rootPath) - if rootPath[len(rootPath)-1] != '/' { - result.WriteByte(filepath.Separator) - } - result.WriteByte('\n') - - for _, node := range tree { - printNode(&result, node, 1) - } - - return result.String() -} - -func printNode(builder *strings.Builder, node *TreeNode, level int) { - indent := strings.Repeat(" ", level) - - nodeName := node.Name - if node.Type == "directory" { - nodeName = nodeName + string(filepath.Separator) - } - - fmt.Fprintf(builder, "%s- %s\n", indent, nodeName) - - if node.Type == "directory" && len(node.Children) > 0 { - for _, child := range node.Children { - printNode(builder, child, level+1) - } - } -} diff --git a/tools/multiedit.go b/tools/multiedit.go deleted file mode 100644 index 6630cf43317e6e2c54c31aa6b32a8e264e580b1c..0000000000000000000000000000000000000000 --- a/tools/multiedit.go +++ /dev/null @@ -1,401 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "log/slog" - "os" - "path/filepath" - "strings" - "time" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/diff" - "github.com/charmbracelet/crush/internal/fsext" - "github.com/charmbracelet/crush/internal/history" - "github.com/charmbracelet/crush/internal/lsp" - "github.com/charmbracelet/crush/internal/permission" -) - -type MultiEditOperation struct { - OldString string `json:"old_string" description:"The text to replace"` - NewString string `json:"new_string" description:"The text to replace it with"` - ReplaceAll bool `json:"replace_all,omitempty" description:"Replace all occurrences of old_string (default false)."` -} - -type MultiEditParams struct { - FilePath string `json:"file_path" description:"The absolute path to the file to modify"` - Edits []MultiEditOperation `json:"edits" description:"Array of edit operations to perform sequentially on the file"` -} - -type MultiEditPermissionsParams struct { - FilePath string `json:"file_path"` - OldContent string `json:"old_content,omitempty"` - NewContent string `json:"new_content,omitempty"` -} - -type MultiEditResponseMetadata struct { - Additions int `json:"additions"` - Removals int `json:"removals"` - OldContent string `json:"old_content,omitempty"` - NewContent string `json:"new_content,omitempty"` - EditsApplied int `json:"edits_applied"` -} - -const ( - MultiEditToolName = "multiedit" -) - -func NewMultiEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) ai.AgentTool { - return ai.NewAgentTool( - MultiEditToolName, - `This is a tool for making multiple edits to a single file in one operation. It is built on top of the Edit tool and allows you to perform multiple find-and-replace operations efficiently. Prefer this tool over the Edit tool when you need to make multiple edits to the same file. - -Before using this tool: - -1. Use the Read tool to understand the file's contents and context - -2. Verify the directory path is correct - -To make multiple file edits, provide the following: -1. file_path: The absolute path to the file to modify (must be absolute, not relative) -2. edits: An array of edit operations to perform, where each edit contains: - - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation) - - new_string: The edited text to replace the old_string - - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false. - -IMPORTANT: -- All edits are applied in sequence, in the order they are provided -- Each edit operates on the result of the previous edit -- All edits must be valid for the operation to succeed - if any edit fails, none will be applied -- This tool is ideal when you need to make several changes to different parts of the same file - -CRITICAL REQUIREMENTS: -1. All edits follow the same requirements as the single Edit tool -2. The edits are atomic - either all succeed or none are applied -3. Plan your edits carefully to avoid conflicts between sequential operations - -WARNING: -- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace) -- The tool will fail if edits.old_string and edits.new_string are the same -- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find - -When making edits: -- Ensure all edits result in idiomatic, correct code -- Do not leave the code in a broken state -- Always use absolute file paths (starting with /) -- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked. -- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance. - -If you want to create a new file, use: -- A new file path, including dir name if needed -- First edit: empty old_string and the new file's contents as new_string -- Subsequent edits: normal edit operations on the created content`, - func(ctx context.Context, params MultiEditParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.FilePath == "" { - return ai.NewTextErrorResponse("file_path is required"), nil - } - - if len(params.Edits) == 0 { - return ai.NewTextErrorResponse("at least one edit operation is required"), nil - } - - if !filepath.IsAbs(params.FilePath) { - params.FilePath = filepath.Join(workingDir, params.FilePath) - } - - // Validate all edits before applying any - if err := validateEdits(params.Edits); err != nil { - return ai.NewTextErrorResponse(err.Error()), nil - } - - var response ai.ToolResponse - var err error - - // Handle file creation case (first edit has empty old_string) - if len(params.Edits) > 0 && params.Edits[0].OldString == "" { - response, err = processMultiEditWithCreation(ctx, params, call, permissions, files, workingDir) - } else { - response, err = processMultiEditExistingFile(ctx, params, call, permissions, files, workingDir) - } - - if err != nil { - return response, err - } - - if response.IsError { - return response, nil - } - - // Wait for LSP diagnostics and add them to the response - waitForLspDiagnostics(ctx, params.FilePath, lspClients) - text := fmt.Sprintf("\n%s\n\n", response.Content) - text += getDiagnostics(params.FilePath, lspClients) - response.Content = text - return response, nil - }) -} - -func validateEdits(edits []MultiEditOperation) error { - for i, edit := range edits { - if edit.OldString == edit.NewString { - return fmt.Errorf("edit %d: old_string and new_string are identical", i+1) - } - // Only the first edit can have empty old_string (for file creation) - if i > 0 && edit.OldString == "" { - return fmt.Errorf("edit %d: only the first edit can have empty old_string (for file creation)", i+1) - } - } - return nil -} - -func processMultiEditWithCreation(ctx context.Context, params MultiEditParams, call ai.ToolCall, permissions permission.Service, files history.Service, workingDir string) (ai.ToolResponse, error) { - // First edit creates the file - firstEdit := params.Edits[0] - if firstEdit.OldString != "" { - return ai.NewTextErrorResponse("first edit must have empty old_string for file creation"), nil - } - - // Check if file already exists - if _, err := os.Stat(params.FilePath); err == nil { - return ai.NewTextErrorResponse(fmt.Sprintf("file already exists: %s", params.FilePath)), nil - } else if !os.IsNotExist(err) { - return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err) - } - - // Create parent directories - dir := filepath.Dir(params.FilePath) - if err := os.MkdirAll(dir, 0o755); err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to create parent directories: %w", err) - } - - // Start with the content from the first edit - currentContent := firstEdit.NewString - - // Apply remaining edits to the content - for i := 1; i < len(params.Edits); i++ { - edit := params.Edits[i] - newContent, err := applyEditToContent(currentContent, edit) - if err != nil { - return ai.NewTextErrorResponse(fmt.Sprintf("edit %d failed: %s", i+1, err.Error())), nil - } - currentContent = newContent - } - - // Get session and message IDs - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file") - } - - // Check permissions - _, additions, removals := diff.GenerateDiff("", currentContent, strings.TrimPrefix(params.FilePath, workingDir)) - - granted := permissions.Request(permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: fsext.PathOrPrefix(params.FilePath, workingDir), - ToolCallID: call.ID, - ToolName: MultiEditToolName, - Action: "write", - Description: fmt.Sprintf("Create file %s with %d edits", params.FilePath, len(params.Edits)), - Params: MultiEditPermissionsParams{ - FilePath: params.FilePath, - OldContent: "", - NewContent: currentContent, - }, - }) - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - // Write the file - err := os.WriteFile(params.FilePath, []byte(currentContent), 0o644) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err) - } - - // Update file history - _, err = files.Create(ctx, sessionID, params.FilePath, "") - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err) - } - - _, err = files.CreateVersion(ctx, sessionID, params.FilePath, currentContent) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - - recordFileWrite(params.FilePath) - recordFileRead(params.FilePath) - - return ai.WithResponseMetadata( - ai.NewTextResponse(fmt.Sprintf("File created with %d edits: %s", len(params.Edits), params.FilePath)), - MultiEditResponseMetadata{ - OldContent: "", - NewContent: currentContent, - Additions: additions, - Removals: removals, - EditsApplied: len(params.Edits), - }, - ), nil -} - -func processMultiEditExistingFile(ctx context.Context, params MultiEditParams, call ai.ToolCall, permissions permission.Service, files history.Service, workingDir string) (ai.ToolResponse, error) { - // Validate file exists and is readable - fileInfo, err := os.Stat(params.FilePath) - if err != nil { - if os.IsNotExist(err) { - return ai.NewTextErrorResponse(fmt.Sprintf("file not found: %s", params.FilePath)), nil - } - return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err) - } - - if fileInfo.IsDir() { - return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", params.FilePath)), nil - } - - // Check if file was read before editing - if getLastReadTime(params.FilePath).IsZero() { - return ai.NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil - } - - // Check if file was modified since last read - modTime := fileInfo.ModTime() - lastRead := getLastReadTime(params.FilePath) - if modTime.After(lastRead) { - return ai.NewTextErrorResponse( - fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)", - params.FilePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339), - )), nil - } - - // Read current file content - content, err := os.ReadFile(params.FilePath) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to read file: %w", err) - } - - oldContent, isCrlf := fsext.ToUnixLineEndings(string(content)) - currentContent := oldContent - - // Apply all edits sequentially - for i, edit := range params.Edits { - newContent, err := applyEditToContent(currentContent, edit) - if err != nil { - return ai.NewTextErrorResponse(fmt.Sprintf("edit %d failed: %s", i+1, err.Error())), nil - } - currentContent = newContent - } - - // Check if content actually changed - if oldContent == currentContent { - return ai.NewTextErrorResponse("no changes made - all edits resulted in identical content"), nil - } - - // Get session and message IDs - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for editing file") - } - - // Generate diff and check permissions - _, additions, removals := diff.GenerateDiff(oldContent, currentContent, strings.TrimPrefix(params.FilePath, workingDir)) - granted := permissions.Request(permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: fsext.PathOrPrefix(params.FilePath, workingDir), - ToolCallID: call.ID, - ToolName: MultiEditToolName, - Action: "write", - Description: fmt.Sprintf("Apply %d edits to file %s", len(params.Edits), params.FilePath), - Params: MultiEditPermissionsParams{ - FilePath: params.FilePath, - OldContent: oldContent, - NewContent: currentContent, - }, - }) - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - if isCrlf { - currentContent, _ = fsext.ToWindowsLineEndings(currentContent) - } - - // Write the updated content - err = os.WriteFile(params.FilePath, []byte(currentContent), 0o644) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err) - } - - // Update file history - file, err := files.GetByPathAndSession(ctx, params.FilePath, sessionID) - if err != nil { - _, err = files.Create(ctx, sessionID, params.FilePath, oldContent) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err) - } - } - if file.Content != oldContent { - // User manually changed the content, store an intermediate version - _, err = files.CreateVersion(ctx, sessionID, params.FilePath, oldContent) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - } - - // Store the new version - _, err = files.CreateVersion(ctx, sessionID, params.FilePath, currentContent) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - - recordFileWrite(params.FilePath) - recordFileRead(params.FilePath) - - return ai.WithResponseMetadata( - ai.NewTextResponse(fmt.Sprintf("Applied %d edits to file: %s", len(params.Edits), params.FilePath)), - MultiEditResponseMetadata{ - OldContent: oldContent, - NewContent: currentContent, - Additions: additions, - Removals: removals, - EditsApplied: len(params.Edits), - }, - ), nil -} - -func applyEditToContent(content string, edit MultiEditOperation) (string, error) { - if edit.OldString == "" && edit.NewString == "" { - return content, nil - } - - if edit.OldString == "" { - return "", fmt.Errorf("old_string cannot be empty for content replacement") - } - - var newContent string - var replacementCount int - - if edit.ReplaceAll { - newContent = strings.ReplaceAll(content, edit.OldString, edit.NewString) - replacementCount = strings.Count(content, edit.OldString) - if replacementCount == 0 { - return "", fmt.Errorf("old_string not found in content. Make sure it matches exactly, including whitespace and line breaks") - } - } else { - index := strings.Index(content, edit.OldString) - if index == -1 { - return "", fmt.Errorf("old_string not found in content. Make sure it matches exactly, including whitespace and line breaks") - } - - lastIndex := strings.LastIndex(content, edit.OldString) - if index != lastIndex { - return "", fmt.Errorf("old_string appears multiple times in the content. Please provide more context to ensure a unique match, or set replace_all to true") - } - - newContent = content[:index] + edit.NewString + content[index+len(edit.OldString):] - replacementCount = 1 - } - - return newContent, nil -} diff --git a/tools/rg.go b/tools/rg.go deleted file mode 100644 index 40ab7f2f520697659e3ef092a7ff3e96b2c3c47c..0000000000000000000000000000000000000000 --- a/tools/rg.go +++ /dev/null @@ -1,53 +0,0 @@ -package tools - -import ( - "context" - "log/slog" - "os/exec" - "path/filepath" - "strings" - "sync" - - "github.com/charmbracelet/crush/internal/log" -) - -var getRg = sync.OnceValue(func() string { - path, err := exec.LookPath("rg") - if err != nil { - if log.Initialized() { - slog.Warn("Ripgrep (rg) not found in $PATH. Some grep features might be limited or slower.") - } - return "" - } - return path -}) - -func getRgCmd(ctx context.Context, globPattern string) *exec.Cmd { - name := getRg() - if name == "" { - return nil - } - args := []string{"--files", "-L", "--null"} - if globPattern != "" { - if !filepath.IsAbs(globPattern) && !strings.HasPrefix(globPattern, "/") { - globPattern = "/" + globPattern - } - args = append(args, "--glob", globPattern) - } - return exec.CommandContext(ctx, name, args...) -} - -func getRgSearchCmd(ctx context.Context, pattern, path, include string) *exec.Cmd { - name := getRg() - if name == "" { - return nil - } - // Use -n to show line numbers and include the matched line - args := []string{"-H", "-n", pattern} - if include != "" { - args = append(args, "--glob", include) - } - args = append(args, path) - - return exec.CommandContext(ctx, name, args...) -} diff --git a/tools/sourcegraph.go b/tools/sourcegraph.go deleted file mode 100644 index 04f5193525d73af40d4b29352a72b398df1ba4d1..0000000000000000000000000000000000000000 --- a/tools/sourcegraph.go +++ /dev/null @@ -1,356 +0,0 @@ -package tools - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "strings" - "time" - - "github.com/charmbracelet/crush/internal/ai" -) - -type SourcegraphParams struct { - Query string `json:"query" description:"The Sourcegraph search query"` - Count int `json:"count,omitempty" description:"Optional number of results to return (default: 10, max: 20)"` - ContextWindow int `json:"context_window,omitempty" description:"The context around the match to return (default: 10 lines)"` - Timeout int `json:"timeout,omitempty" description:"Optional timeout in seconds (max 120)"` -} - -type SourcegraphResponseMetadata struct { - NumberOfMatches int `json:"number_of_matches"` - Truncated bool `json:"truncated"` -} - -const ( - SourcegraphToolName = "sourcegraph" -) - -func NewSourcegraphTool() ai.AgentTool { - client := &http.Client{ - Timeout: 30 * time.Second, - Transport: &http.Transport{ - MaxIdleConns: 100, - MaxIdleConnsPerHost: 10, - IdleConnTimeout: 90 * time.Second, - }, - } - return ai.NewAgentTool( - SourcegraphToolName, - `Search code across public repositories using Sourcegraph's GraphQL API. - -WHEN TO USE THIS TOOL: -- Use when you need to find code examples or implementations across public repositories -- Helpful for researching how others have solved similar problems -- Useful for discovering patterns and best practices in open source code - -HOW TO USE: -- Provide a search query using Sourcegraph's query syntax -- Optionally specify the number of results to return (default: 10) -- Optionally set a timeout for the request - -QUERY SYNTAX: -- Basic search: "fmt.Println" searches for exact matches -- File filters: "file:.go fmt.Println" limits to Go files -- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos -- Language filters: "lang:go fmt.Println" limits to Go code -- Boolean operators: "fmt.Println AND log.Fatal" for combined terms -- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching -- Quoted strings: "\"exact phrase\"" for exact phrase matching -- Exclude filters: "-file:test" or "-repo:forks" to exclude matches - -ADVANCED FILTERS: -- Repository filters: - * "repo:name" - Match repositories with name containing "name" - * "repo:^github\.com/org/repo$" - Exact repository match - * "repo:org/repo@branch" - Search specific branch - * "repo:org/repo rev:branch" - Alternative branch syntax - * "-repo:name" - Exclude repositories - * "fork:yes" or "fork:only" - Include or only show forks - * "archived:yes" or "archived:only" - Include or only show archived repos - * "visibility:public" or "visibility:private" - Filter by visibility - -- File filters: - * "file:\.js$" - Files with .js extension - * "file:internal/" - Files in internal directory - * "-file:test" - Exclude test files - * "file:has.content(Copyright)" - Files containing "Copyright" - * "file:has.contributor([email protected])" - Files with specific contributor - -- Content filters: - * "content:\"exact string\"" - Search for exact string - * "-content:\"unwanted\"" - Exclude files with unwanted content - * "case:yes" - Case-sensitive search - -- Type filters: - * "type:symbol" - Search for symbols (functions, classes, etc.) - * "type:file" - Search file content only - * "type:path" - Search filenames only - * "type:diff" - Search code changes - * "type:commit" - Search commit messages - -- Commit/diff search: - * "after:\"1 month ago\"" - Commits after date - * "before:\"2023-01-01\"" - Commits before date - * "author:name" - Commits by author - * "message:\"fix bug\"" - Commits with message - -- Result selection: - * "select:repo" - Show only repository names - * "select:file" - Show only file paths - * "select:content" - Show only matching content - * "select:symbol" - Show only matching symbols - -- Result control: - * "count:100" - Return up to 100 results - * "count:all" - Return all results - * "timeout:30s" - Set search timeout - -EXAMPLES: -- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout -- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks -- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing -- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code -- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images -- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow - -BOOLEAN OPERATORS: -- "term1 AND term2" - Results containing both terms -- "term1 OR term2" - Results containing either term -- "term1 NOT term2" - Results with term1 but not term2 -- "term1 and (term2 or term3)" - Grouping with parentheses - -LIMITATIONS: -- Only searches public repositories -- Rate limits may apply -- Complex queries may take longer to execute -- Maximum of 20 results per query - -TIPS: -- Use specific file extensions to narrow results -- Add repo: filters for more targeted searches -- Use type:symbol to find function/method definitions -- Use type:file to find relevant files`, - func(ctx context.Context, params SourcegraphParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.Query == "" { - return ai.NewTextErrorResponse("Query parameter is required"), nil - } - - if params.Count <= 0 { - params.Count = 10 - } else if params.Count > 20 { - params.Count = 20 // Limit to 20 results - } - - if params.ContextWindow <= 0 { - params.ContextWindow = 10 // Default context window - } - - // Handle timeout with context - requestCtx := ctx - if params.Timeout > 0 { - maxTimeout := 120 // 2 minutes - if params.Timeout > maxTimeout { - params.Timeout = maxTimeout - } - var cancel context.CancelFunc - requestCtx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Second) - defer cancel() - } - - type graphqlRequest struct { - Query string `json:"query"` - Variables struct { - Query string `json:"query"` - } `json:"variables"` - } - - request := graphqlRequest{ - Query: "query Search($query: String!) { search(query: $query, version: V2, patternType: keyword ) { results { matchCount, limitHit, resultCount, approximateResultCount, missing { name }, timedout { name }, indexUnavailable, results { __typename, ... on FileMatch { repository { name }, file { path, url, content }, lineMatches { preview, lineNumber, offsetAndLengths } } } } } }", - } - request.Variables.Query = params.Query - - graphqlQueryBytes, err := json.Marshal(request) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to marshal GraphQL request: %w", err) - } - graphqlQuery := string(graphqlQueryBytes) - - req, err := http.NewRequestWithContext( - requestCtx, - "POST", - "https://sourcegraph.com/.api/graphql", - bytes.NewBuffer([]byte(graphqlQuery)), - ) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to create request: %w", err) - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("User-Agent", "crush/1.0") - - resp, err := client.Do(req) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to fetch URL: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - body, _ := io.ReadAll(resp.Body) - if len(body) > 0 { - return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d, response: %s", resp.StatusCode, string(body))), nil - } - - return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil - } - body, err := io.ReadAll(resp.Body) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to read response body: %w", err) - } - - var result map[string]any - if err = json.Unmarshal(body, &result); err != nil { - return ai.ToolResponse{}, fmt.Errorf("failed to unmarshal response: %w", err) - } - - formattedResults, err := formatSourcegraphResults(result, params.ContextWindow) - if err != nil { - return ai.NewTextErrorResponse("Failed to format results: " + err.Error()), nil - } - - return ai.NewTextResponse(formattedResults), nil - }) -} - -func formatSourcegraphResults(result map[string]any, contextWindow int) (string, error) { - var buffer strings.Builder - - if errors, ok := result["errors"].([]any); ok && len(errors) > 0 { - buffer.WriteString("## Sourcegraph API Error\n\n") - for _, err := range errors { - if errMap, ok := err.(map[string]any); ok { - if message, ok := errMap["message"].(string); ok { - buffer.WriteString(fmt.Sprintf("- %s\n", message)) - } - } - } - return buffer.String(), nil - } - - data, ok := result["data"].(map[string]any) - if !ok { - return "", fmt.Errorf("invalid response format: missing data field") - } - - search, ok := data["search"].(map[string]any) - if !ok { - return "", fmt.Errorf("invalid response format: missing search field") - } - - searchResults, ok := search["results"].(map[string]any) - if !ok { - return "", fmt.Errorf("invalid response format: missing results field") - } - - matchCount, _ := searchResults["matchCount"].(float64) - resultCount, _ := searchResults["resultCount"].(float64) - limitHit, _ := searchResults["limitHit"].(bool) - - buffer.WriteString("# Sourcegraph Search Results\n\n") - buffer.WriteString(fmt.Sprintf("Found %d matches across %d results\n", int(matchCount), int(resultCount))) - - if limitHit { - buffer.WriteString("(Result limit reached, try a more specific query)\n") - } - - buffer.WriteString("\n") - - results, ok := searchResults["results"].([]any) - if !ok || len(results) == 0 { - buffer.WriteString("No results found. Try a different query.\n") - return buffer.String(), nil - } - - maxResults := 10 - if len(results) > maxResults { - results = results[:maxResults] - } - - for i, res := range results { - fileMatch, ok := res.(map[string]any) - if !ok { - continue - } - - typeName, _ := fileMatch["__typename"].(string) - if typeName != "FileMatch" { - continue - } - - repo, _ := fileMatch["repository"].(map[string]any) - file, _ := fileMatch["file"].(map[string]any) - lineMatches, _ := fileMatch["lineMatches"].([]any) - - if repo == nil || file == nil { - continue - } - - repoName, _ := repo["name"].(string) - filePath, _ := file["path"].(string) - fileURL, _ := file["url"].(string) - fileContent, _ := file["content"].(string) - - buffer.WriteString(fmt.Sprintf("## Result %d: %s/%s\n\n", i+1, repoName, filePath)) - - if fileURL != "" { - buffer.WriteString(fmt.Sprintf("URL: %s\n\n", fileURL)) - } - - if len(lineMatches) > 0 { - for _, lm := range lineMatches { - lineMatch, ok := lm.(map[string]any) - if !ok { - continue - } - - lineNumber, _ := lineMatch["lineNumber"].(float64) - preview, _ := lineMatch["preview"].(string) - - if fileContent != "" { - lines := strings.Split(fileContent, "\n") - - buffer.WriteString("```\n") - - startLine := max(1, int(lineNumber)-contextWindow) - - for j := startLine - 1; j < int(lineNumber)-1 && j < len(lines); j++ { - if j >= 0 { - buffer.WriteString(fmt.Sprintf("%d| %s\n", j+1, lines[j])) - } - } - - buffer.WriteString(fmt.Sprintf("%d| %s\n", int(lineNumber), preview)) - - endLine := int(lineNumber) + contextWindow - - for j := int(lineNumber); j < endLine && j < len(lines); j++ { - if j < len(lines) { - buffer.WriteString(fmt.Sprintf("%d| %s\n", j+1, lines[j])) - } - } - - buffer.WriteString("```\n\n") - } else { - buffer.WriteString("```\n") - buffer.WriteString(fmt.Sprintf("%d| %s\n", int(lineNumber), preview)) - buffer.WriteString("```\n\n") - } - } - } - } - - return buffer.String(), nil -} diff --git a/tools/view.go b/tools/view.go deleted file mode 100644 index 3079073af7ac89e205206e49c4da7cd8bc70b5a4..0000000000000000000000000000000000000000 --- a/tools/view.go +++ /dev/null @@ -1,329 +0,0 @@ -package tools - -import ( - "bufio" - "context" - "fmt" - "io" - "os" - "path/filepath" - "strings" - "unicode/utf8" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/lsp" - "github.com/charmbracelet/crush/internal/permission" -) - -type ViewParams struct { - FilePath string `json:"file_path" description:"The path to the file to read"` - Offset int `json:"offset" description:"The line number to start reading from (0-based)"` - Limit int `json:"limit" description:"The number of lines to read (defaults to 2000)"` -} - -type ViewPermissionsParams struct { - FilePath string `json:"file_path"` - Offset int `json:"offset"` - Limit int `json:"limit"` -} - -type ViewResponseMetadata struct { - FilePath string `json:"file_path"` - Content string `json:"content"` -} - -const ( - ViewToolName = "view" -) - -func NewViewTool(lspClients map[string]*lsp.Client, permissions permission.Service, workingDir string) ai.AgentTool { - return ai.NewAgentTool( - ViewToolName, - `File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data. - -WHEN TO USE THIS TOOL: -- Use when you need to read the contents of a specific file -- Helpful for examining source code, configuration files, or log files -- Perfect for looking at text-based file formats - -HOW TO USE: -- Provide the path to the file you want to view -- Optionally specify an offset to start reading from a specific line -- Optionally specify a limit to control how many lines are read -- Do not use this for directories use the ls tool instead - -FEATURES: -- Displays file contents with line numbers for easy reference -- Can read from any position in a file using the offset parameter -- Handles large files by limiting the number of lines read -- Automatically truncates very long lines for better display -- Suggests similar file names when the requested file isn't found - -LIMITATIONS: -- Maximum file size is 250KB -- Default reading limit is 2000 lines -- Lines longer than 2000 characters are truncated -- Cannot display binary files or images -- Images can be identified but not displayed - -WINDOWS NOTES: -- Handles both Windows (CRLF) and Unix (LF) line endings automatically -- File paths work with both forward slashes (/) and backslashes (\) -- Text encoding is detected automatically for most common formats - -TIPS: -- Use with Glob tool to first find files you want to view -- For code exploration, first use Grep to find relevant files, then View to examine them -- When viewing large files, use the offset parameter to read specific sections`, - func(ctx context.Context, params ViewParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.FilePath == "" { - return ai.NewTextErrorResponse("file_path is required"), nil - } - - // Handle relative paths - filePath := params.FilePath - if !filepath.IsAbs(filePath) { - filePath = filepath.Join(workingDir, filePath) - } - - // Check if file is outside working directory and request permission if needed - absWorkingDir, err := filepath.Abs(workingDir) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error resolving working directory: %w", err) - } - - absFilePath, err := filepath.Abs(filePath) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error resolving file path: %w", err) - } - - relPath, err := filepath.Rel(absWorkingDir, absFilePath) - if err != nil || strings.HasPrefix(relPath, "..") { - // File is outside working directory, request permission - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session ID and message ID are required for accessing files outside working directory") - } - - granted := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: absFilePath, - ToolCallID: call.ID, - ToolName: ViewToolName, - Action: "read", - Description: fmt.Sprintf("Read file outside working directory: %s", absFilePath), - Params: ViewPermissionsParams(params), - }, - ) - - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - } - - // Check if file exists - fileInfo, err := os.Stat(filePath) - if err != nil { - if os.IsNotExist(err) { - // Try to offer suggestions for similarly named files - dir := filepath.Dir(filePath) - base := filepath.Base(filePath) - - dirEntries, dirErr := os.ReadDir(dir) - if dirErr == nil { - var suggestions []string - for _, entry := range dirEntries { - if strings.Contains(strings.ToLower(entry.Name()), strings.ToLower(base)) || - strings.Contains(strings.ToLower(base), strings.ToLower(entry.Name())) { - suggestions = append(suggestions, filepath.Join(dir, entry.Name())) - if len(suggestions) >= 3 { - break - } - } - } - - if len(suggestions) > 0 { - return ai.NewTextErrorResponse(fmt.Sprintf("File not found: %s\n\nDid you mean one of these?\n%s", - filePath, strings.Join(suggestions, "\n"))), nil - } - } - - return ai.NewTextErrorResponse(fmt.Sprintf("File not found: %s", filePath)), nil - } - return ai.ToolResponse{}, fmt.Errorf("error accessing file: %w", err) - } - - // Check if it's a directory - if fileInfo.IsDir() { - return ai.NewTextErrorResponse(fmt.Sprintf("Path is a directory, not a file: %s", filePath)), nil - } - - // Check file size - if fileInfo.Size() > MaxReadSize { - return ai.NewTextErrorResponse(fmt.Sprintf("File is too large (%d bytes). Maximum size is %d bytes", - fileInfo.Size(), MaxReadSize)), nil - } - - // Set default limit if not provided - if params.Limit <= 0 { - params.Limit = DefaultReadLimit - } - - // Check if it's an image file - isImage, imageType := isImageFile(filePath) - // TODO: handle images - if isImage { - return ai.NewTextErrorResponse(fmt.Sprintf("This is an image file of type: %s\n", imageType)), nil - } - - // Read the file content - content, lineCount, err := readTextFile(filePath, params.Offset, params.Limit) - isValidUt8 := utf8.ValidString(content) - if !isValidUt8 { - return ai.NewTextErrorResponse("File content is not valid UTF-8"), nil - } - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error reading file: %w", err) - } - - notifyLspOpenFile(ctx, filePath, lspClients) - output := "\n" - // Format the output with line numbers - output += addLineNumbers(content, params.Offset+1) - - // Add a note if the content was truncated - if lineCount > params.Offset+len(strings.Split(content, "\n")) { - output += fmt.Sprintf("\n\n(File has more lines. Use 'offset' parameter to read beyond line %d)", - params.Offset+len(strings.Split(content, "\n"))) - } - output += "\n\n" - output += getDiagnostics(filePath, lspClients) - recordFileRead(filePath) - return ai.WithResponseMetadata( - ai.NewTextResponse(output), - ViewResponseMetadata{ - FilePath: filePath, - Content: content, - }, - ), nil - }) -} - -func addLineNumbers(content string, startLine int) string { - if content == "" { - return "" - } - - lines := strings.Split(content, "\n") - - var result []string - for i, line := range lines { - line = strings.TrimSuffix(line, "\r") - - lineNum := i + startLine - numStr := fmt.Sprintf("%d", lineNum) - - if len(numStr) >= 6 { - result = append(result, fmt.Sprintf("%s|%s", numStr, line)) - } else { - paddedNum := fmt.Sprintf("%6s", numStr) - result = append(result, fmt.Sprintf("%s|%s", paddedNum, line)) - } - } - - return strings.Join(result, "\n") -} - -func readTextFile(filePath string, offset, limit int) (string, int, error) { - file, err := os.Open(filePath) - if err != nil { - return "", 0, err - } - defer file.Close() - - lineCount := 0 - - scanner := NewLineScanner(file) - if offset > 0 { - for lineCount < offset && scanner.Scan() { - lineCount++ - } - if err = scanner.Err(); err != nil { - return "", 0, err - } - } - - if offset == 0 { - _, err = file.Seek(0, io.SeekStart) - if err != nil { - return "", 0, err - } - } - - // Pre-allocate slice with expected capacity - lines := make([]string, 0, limit) - lineCount = offset - - for scanner.Scan() && len(lines) < limit { - lineCount++ - lineText := scanner.Text() - if len(lineText) > MaxLineLength { - lineText = lineText[:MaxLineLength] + "..." - } - lines = append(lines, lineText) - } - - // Continue scanning to get total line count - for scanner.Scan() { - lineCount++ - } - - if err := scanner.Err(); err != nil { - return "", 0, err - } - - return strings.Join(lines, "\n"), lineCount, nil -} - -func isImageFile(filePath string) (bool, string) { - ext := strings.ToLower(filepath.Ext(filePath)) - switch ext { - case ".jpg", ".jpeg": - return true, "JPEG" - case ".png": - return true, "PNG" - case ".gif": - return true, "GIF" - case ".bmp": - return true, "BMP" - case ".svg": - return true, "SVG" - case ".webp": - return true, "WebP" - default: - return false, "" - } -} - -type LineScanner struct { - scanner *bufio.Scanner -} - -func NewLineScanner(r io.Reader) *LineScanner { - return &LineScanner{ - scanner: bufio.NewScanner(r), - } -} - -func (s *LineScanner) Scan() bool { - return s.scanner.Scan() -} - -func (s *LineScanner) Text() string { - return s.scanner.Text() -} - -func (s *LineScanner) Err() error { - return s.scanner.Err() -} diff --git a/tools/write.go b/tools/write.go deleted file mode 100644 index 2eec7f3ee82008bef78e6addceea3e43e8c17105..0000000000000000000000000000000000000000 --- a/tools/write.go +++ /dev/null @@ -1,196 +0,0 @@ -package tools - -import ( - "context" - "fmt" - "log/slog" - "os" - "path/filepath" - "strings" - "time" - - "github.com/charmbracelet/crush/internal/ai" - "github.com/charmbracelet/crush/internal/diff" - "github.com/charmbracelet/crush/internal/fsext" - "github.com/charmbracelet/crush/internal/history" - "github.com/charmbracelet/crush/internal/lsp" - "github.com/charmbracelet/crush/internal/permission" -) - -type WriteParams struct { - FilePath string `json:"file_path" description:"The path to the file to write"` - Content string `json:"content" description:"The content to write to the file"` -} - -type WritePermissionsParams struct { - FilePath string `json:"file_path"` - OldContent string `json:"old_content,omitempty"` - NewContent string `json:"new_content,omitempty"` -} - -type WriteResponseMetadata struct { - Diff string `json:"diff"` - Additions int `json:"additions"` - Removals int `json:"removals"` -} - -const ( - WriteToolName = "write" -) - -func NewWriteTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) ai.AgentTool { - return ai.NewAgentTool( - WriteToolName, - `File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content. - -WHEN TO USE THIS TOOL: -- Use when you need to create a new file -- Helpful for updating existing files with modified content -- Perfect for saving generated code, configurations, or text data - -HOW TO USE: -- Provide the path to the file you want to write -- Include the content to be written to the file -- The tool will create any necessary parent directories - -FEATURES: -- Can create new files or overwrite existing ones -- Creates parent directories automatically if they don't exist -- Checks if the file has been modified since last read for safety -- Avoids unnecessary writes when content hasn't changed - -LIMITATIONS: -- You should read a file before writing to it to avoid conflicts -- Cannot append to files (rewrites the entire file) - -WINDOWS NOTES: -- File permissions (0o755, 0o644) are Unix-style but work on Windows with appropriate translations -- Use forward slashes (/) in paths for cross-platform compatibility -- Windows file attributes and permissions are handled automatically by the Go runtime - -TIPS: -- Use the View tool first to examine existing files before modifying them -- Use the LS tool to verify the correct location when creating new files -- Combine with Glob and Grep tools to find and modify multiple files -- Always include descriptive comments when making changes to existing code`, - func(ctx context.Context, params WriteParams, call ai.ToolCall) (ai.ToolResponse, error) { - if params.FilePath == "" { - return ai.NewTextErrorResponse("file_path is required"), nil - } - - if params.Content == "" { - return ai.NewTextErrorResponse("content is required"), nil - } - - filePath := params.FilePath - if !filepath.IsAbs(filePath) { - filePath = filepath.Join(workingDir, filePath) - } - - fileInfo, err := os.Stat(filePath) - if err == nil { - if fileInfo.IsDir() { - return ai.NewTextErrorResponse(fmt.Sprintf("Path is a directory, not a file: %s", filePath)), nil - } - - modTime := fileInfo.ModTime() - lastRead := getLastReadTime(filePath) - if modTime.After(lastRead) { - return ai.NewTextErrorResponse(fmt.Sprintf("File %s has been modified since it was last read.\nLast modification: %s\nLast read: %s\n\nPlease read the file again before modifying it.", - filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339))), nil - } - - oldContent, readErr := os.ReadFile(filePath) - if readErr == nil && string(oldContent) == params.Content { - return ai.NewTextErrorResponse(fmt.Sprintf("File %s already contains the exact content. No changes made.", filePath)), nil - } - } else if !os.IsNotExist(err) { - return ai.ToolResponse{}, fmt.Errorf("error checking file: %w", err) - } - - dir := filepath.Dir(filePath) - if err = os.MkdirAll(dir, 0o755); err != nil { - return ai.ToolResponse{}, fmt.Errorf("error creating directory: %w", err) - } - - oldContent := "" - if fileInfo != nil && !fileInfo.IsDir() { - oldBytes, readErr := os.ReadFile(filePath) - if readErr == nil { - oldContent = string(oldBytes) - } - } - - sessionID, messageID := GetContextValues(ctx) - if sessionID == "" || messageID == "" { - return ai.ToolResponse{}, fmt.Errorf("session_id and message_id are required") - } - - diff, additions, removals := diff.GenerateDiff( - oldContent, - params.Content, - strings.TrimPrefix(filePath, workingDir), - ) - - granted := permissions.Request( - permission.CreatePermissionRequest{ - SessionID: sessionID, - Path: fsext.PathOrPrefix(filePath, workingDir), - ToolCallID: call.ID, - ToolName: WriteToolName, - Action: "write", - Description: fmt.Sprintf("Create file %s", filePath), - Params: WritePermissionsParams{ - FilePath: filePath, - OldContent: oldContent, - NewContent: params.Content, - }, - }, - ) - if !granted { - return ai.ToolResponse{}, permission.ErrorPermissionDenied - } - - err = os.WriteFile(filePath, []byte(params.Content), 0o644) - if err != nil { - return ai.ToolResponse{}, fmt.Errorf("error writing file: %w", err) - } - - // Check if file exists in history - file, err := files.GetByPathAndSession(ctx, filePath, sessionID) - if err != nil { - _, err = files.Create(ctx, sessionID, filePath, oldContent) - if err != nil { - // Log error but don't fail the operation - return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err) - } - } - if file.Content != oldContent { - // User Manually changed the content store an intermediate version - _, err = files.CreateVersion(ctx, sessionID, filePath, oldContent) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - } - // Store the new version - _, err = files.CreateVersion(ctx, sessionID, filePath, params.Content) - if err != nil { - slog.Debug("Error creating file history version", "error", err) - } - - recordFileWrite(filePath) - recordFileRead(filePath) - waitForLspDiagnostics(ctx, filePath, lspClients) - - result := fmt.Sprintf("File successfully written: %s", filePath) - result = fmt.Sprintf("\n%s\n", result) - result += getDiagnostics(filePath, lspClients) - return ai.WithResponseMetadata(ai.NewTextResponse(result), - WriteResponseMetadata{ - Diff: diff, - Additions: additions, - Removals: removals, - }, - ), nil - }) -}