From e2e99af1446e4f6a243dc1550da25c4baeac8c6e Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 27 Aug 2025 14:37:01 -0300
Subject: [PATCH 001/236] chore: run `modernize` (#906)
---
internal/app/app.go | 6 ++----
internal/app/lsp_events.go | 7 ++-----
internal/csync/slices.go | 6 ++----
internal/permission/permission_test.go | 12 ++++--------
4 files changed, 10 insertions(+), 21 deletions(-)
diff --git a/internal/app/app.go b/internal/app/app.go
index c2bfb0ebd027388146b9745cb3fbc69a514336ed..961ce5960e7d64e38c5d6548e881ed697f6283f9 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -235,9 +235,7 @@ func setupSubscriber[T any](
subscriber func(context.Context) <-chan pubsub.Event[T],
outputCh chan<- tea.Msg,
) {
- wg.Add(1)
- go func() {
- defer wg.Done()
+ wg.Go(func() {
subCh := subscriber(ctx)
for {
select {
@@ -260,7 +258,7 @@ func setupSubscriber[T any](
return
}
}
- }()
+ })
}
func (app *App) InitCoderAgent() error {
diff --git a/internal/app/lsp_events.go b/internal/app/lsp_events.go
index 5961ec5c13e05fc42ff4eab7fbee744224a49694..08e54582b95d8db725bffc7ff8bd43d4a37528b1 100644
--- a/internal/app/lsp_events.go
+++ b/internal/app/lsp_events.go
@@ -2,6 +2,7 @@ package app
import (
"context"
+ "maps"
"time"
"github.com/charmbracelet/crush/internal/csync"
@@ -48,11 +49,7 @@ func SubscribeLSPEvents(ctx context.Context) <-chan pubsub.Event[LSPEvent] {
// GetLSPStates returns the current state of all LSP clients
func GetLSPStates() map[string]LSPClientInfo {
- states := make(map[string]LSPClientInfo)
- for name, info := range lspStates.Seq2() {
- states[name] = info
- }
- return states
+ return maps.Collect(lspStates.Seq2())
}
// GetLSPState returns the state of a specific LSP client
diff --git a/internal/csync/slices.go b/internal/csync/slices.go
index b7717f0c6df667701f5b64658b0073eaf73fc248..c5c635683e70046694f1cdf647aac8cb425abd24 100644
--- a/internal/csync/slices.go
+++ b/internal/csync/slices.go
@@ -16,11 +16,9 @@ type LazySlice[K any] struct {
// to populate it.
func NewLazySlice[K any](load func() []K) *LazySlice[K] {
s := &LazySlice[K]{}
- s.wg.Add(1)
- go func() {
+ s.wg.Go(func() {
s.inner = load()
- s.wg.Done()
- }()
+ })
return s
}
diff --git a/internal/permission/permission_test.go b/internal/permission/permission_test.go
index c3c646ecd97f51a0f91d8209e2a34c6855d6547b..d1ccd286836768f1bc1119966568941f7494affd 100644
--- a/internal/permission/permission_test.go
+++ b/internal/permission/permission_test.go
@@ -154,12 +154,10 @@ func TestPermissionService_SequentialProperties(t *testing.T) {
events := service.Subscribe(t.Context())
var result1 bool
var wg sync.WaitGroup
- wg.Add(1)
- go func() {
- defer wg.Done()
+ wg.Go(func() {
result1 = service.Request(req)
- }()
+ })
var permissionReq PermissionRequest
event := <-events
@@ -170,12 +168,10 @@ func TestPermissionService_SequentialProperties(t *testing.T) {
assert.True(t, result1, "First request should be granted")
var result2 bool
- wg.Add(1)
- go func() {
- defer wg.Done()
+ wg.Go(func() {
result2 = service.Request(req)
- }()
+ })
event = <-events
permissionReq = event.Payload
From 3b9babbc87819a6ec047c6f9ab11d92d45a6ad5d Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 27 Aug 2025 14:46:00 -0300
Subject: [PATCH 002/236] fix(lsp): simplify init/ping, store capabilities
(#713)
* fix(lsp): simplify init/ping
Signed-off-by: Carlos Alexandro Becker
* feat(lsp): store server capabilities
Signed-off-by: Carlos Alexandro Becker
* fix(lsp): improve init
Signed-off-by: Carlos Alexandro Becker
* fix(lsp): cancel request id
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
---
internal/lsp/caps.go | 112 +++++++++++++++++++++++
internal/lsp/client.go | 156 ++++++--------------------------
internal/lsp/transport.go | 11 ++-
internal/lsp/watcher/watcher.go | 9 +-
4 files changed, 153 insertions(+), 135 deletions(-)
create mode 100644 internal/lsp/caps.go
diff --git a/internal/lsp/caps.go b/internal/lsp/caps.go
new file mode 100644
index 0000000000000000000000000000000000000000..7edc0886f72a92183a8570e45db74218e3aead47
--- /dev/null
+++ b/internal/lsp/caps.go
@@ -0,0 +1,112 @@
+package lsp
+
+import "github.com/charmbracelet/crush/internal/lsp/protocol"
+
+func (c *Client) setCapabilities(caps protocol.ServerCapabilities) {
+ c.capsMu.Lock()
+ defer c.capsMu.Unlock()
+ c.caps = caps
+ c.capsSet.Store(true)
+}
+
+func (c *Client) getCapabilities() (protocol.ServerCapabilities, bool) {
+ c.capsMu.RLock()
+ defer c.capsMu.RUnlock()
+ return c.caps, c.capsSet.Load()
+}
+
+func (c *Client) IsMethodSupported(method string) bool {
+ // Always allow core lifecycle and generic methods
+ switch method {
+ case "initialize", "shutdown", "exit", "$/cancelRequest":
+ return true
+ }
+
+ caps, ok := c.getCapabilities()
+ if !ok {
+ // caps not set yet, be permissive
+ return true
+ }
+
+ switch method {
+ case "textDocument/hover":
+ return caps.HoverProvider != nil
+ case "textDocument/definition":
+ return caps.DefinitionProvider != nil
+ case "textDocument/references":
+ return caps.ReferencesProvider != nil
+ case "textDocument/implementation":
+ return caps.ImplementationProvider != nil
+ case "textDocument/typeDefinition":
+ return caps.TypeDefinitionProvider != nil
+ case "textDocument/documentColor", "textDocument/colorPresentation":
+ return caps.ColorProvider != nil
+ case "textDocument/foldingRange":
+ return caps.FoldingRangeProvider != nil
+ case "textDocument/declaration":
+ return caps.DeclarationProvider != nil
+ case "textDocument/selectionRange":
+ return caps.SelectionRangeProvider != nil
+ case "textDocument/prepareCallHierarchy", "callHierarchy/incomingCalls", "callHierarchy/outgoingCalls":
+ return caps.CallHierarchyProvider != nil
+ case "textDocument/semanticTokens/full", "textDocument/semanticTokens/full/delta", "textDocument/semanticTokens/range":
+ return caps.SemanticTokensProvider != nil
+ case "textDocument/linkedEditingRange":
+ return caps.LinkedEditingRangeProvider != nil
+ case "workspace/willCreateFiles":
+ return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillCreate != nil
+ case "workspace/willRenameFiles":
+ return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillRename != nil
+ case "workspace/willDeleteFiles":
+ return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillDelete != nil
+ case "textDocument/moniker":
+ return caps.MonikerProvider != nil
+ case "textDocument/prepareTypeHierarchy", "typeHierarchy/supertypes", "typeHierarchy/subtypes":
+ return caps.TypeHierarchyProvider != nil
+ case "textDocument/inlineValue":
+ return caps.InlineValueProvider != nil
+ case "textDocument/inlayHint", "inlayHint/resolve":
+ return caps.InlayHintProvider != nil
+ case "textDocument/diagnostic", "workspace/diagnostic":
+ return caps.DiagnosticProvider != nil
+ case "textDocument/inlineCompletion":
+ return caps.InlineCompletionProvider != nil
+ case "workspace/textDocumentContent":
+ return caps.Workspace != nil && caps.Workspace.TextDocumentContent != nil
+ case "textDocument/willSaveWaitUntil":
+ if caps.TextDocumentSync == nil {
+ return false
+ }
+ return true
+ case "textDocument/completion", "completionItem/resolve":
+ return caps.CompletionProvider != nil
+ case "textDocument/signatureHelp":
+ return caps.SignatureHelpProvider != nil
+ case "textDocument/documentHighlight":
+ return caps.DocumentHighlightProvider != nil
+ case "textDocument/documentSymbol":
+ return caps.DocumentSymbolProvider != nil
+ case "textDocument/codeAction", "codeAction/resolve":
+ return caps.CodeActionProvider != nil
+ case "workspace/symbol", "workspaceSymbol/resolve":
+ return caps.WorkspaceSymbolProvider != nil
+ case "textDocument/codeLens", "codeLens/resolve":
+ return caps.CodeLensProvider != nil
+ case "textDocument/documentLink", "documentLink/resolve":
+ return caps.DocumentLinkProvider != nil
+ case "textDocument/formatting":
+ return caps.DocumentFormattingProvider != nil
+ case "textDocument/rangeFormatting":
+ return caps.DocumentRangeFormattingProvider != nil
+ case "textDocument/rangesFormatting":
+ return caps.DocumentRangeFormattingProvider != nil
+ case "textDocument/onTypeFormatting":
+ return caps.DocumentOnTypeFormattingProvider != nil
+ case "textDocument/rename", "textDocument/prepareRename":
+ return caps.RenameProvider != nil
+ case "workspace/executeCommand":
+ return caps.ExecuteCommandProvider != nil
+ default:
+ return true
+ }
+}
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index a6b9fcbb4caea4992fb2dbce6ddc6e75066c9da7..e09a6a446db2f62476e072c79daadd2d832f895b 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -62,6 +62,11 @@ type Client struct {
// Server state
serverState atomic.Value
+
+ // Server capabilities as returned by initialize
+ caps protocol.ServerCapabilities
+ capsMu sync.RWMutex
+ capsSet atomic.Bool
}
// NewClient creates a new LSP client.
@@ -143,7 +148,7 @@ func (c *Client) RegisterServerRequestHandler(method string, handler ServerReque
}
func (c *Client) InitializeLSPClient(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
- initParams := &protocol.InitializeParams{
+ initParams := protocol.ParamInitialize{
WorkspaceFoldersInitializeParams: protocol.WorkspaceFoldersInitializeParams{
WorkspaceFolders: []protocol.WorkspaceFolder{
{
@@ -220,12 +225,14 @@ func (c *Client) InitializeLSPClient(ctx context.Context, workspaceDir string) (
},
}
- var result protocol.InitializeResult
- if err := c.Call(ctx, "initialize", initParams, &result); err != nil {
+ result, err := c.Initialize(ctx, initParams)
+ if err != nil {
return nil, fmt.Errorf("initialize failed: %w", err)
}
- if err := c.Notify(ctx, "initialized", struct{}{}); err != nil {
+ c.setCapabilities(result.Capabilities)
+
+ if err := c.Initialized(ctx, protocol.InitializedParams{}); err != nil {
return nil, fmt.Errorf("initialized notification failed: %w", err)
}
@@ -234,14 +241,9 @@ func (c *Client) InitializeLSPClient(ctx context.Context, workspaceDir string) (
c.RegisterServerRequestHandler("workspace/configuration", HandleWorkspaceConfiguration)
c.RegisterServerRequestHandler("client/registerCapability", HandleRegisterCapability)
c.RegisterNotificationHandler("window/showMessage", HandleServerMessage)
- c.RegisterNotificationHandler("textDocument/publishDiagnostics",
- func(params json.RawMessage) { HandleDiagnostics(c, params) })
-
- // Notify the LSP server
- err := c.Initialized(ctx, protocol.InitializedParams{})
- if err != nil {
- return nil, fmt.Errorf("initialization failed: %w", err)
- }
+ c.RegisterNotificationHandler("textDocument/publishDiagnostics", func(params json.RawMessage) {
+ HandleDiagnostics(c, params)
+ })
return &result, nil
}
@@ -329,16 +331,7 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
slog.Debug("Waiting for LSP server to be ready...")
}
- // Determine server type for specialized initialization
- serverType := c.detectServerType()
-
- // For TypeScript-like servers, we need to open some key files first
- if serverType == ServerTypeTypeScript {
- if cfg.Options.DebugLSP {
- slog.Debug("TypeScript-like server detected, opening key configuration files")
- }
- c.openKeyConfigFiles(ctx)
- }
+ c.openKeyConfigFiles(ctx)
for {
select {
@@ -347,21 +340,19 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
return fmt.Errorf("timeout waiting for LSP server to be ready")
case <-ticker.C:
// Try a ping method appropriate for this server type
- err := c.pingServerByType(ctx, serverType)
- if err == nil {
- // Server responded successfully
- c.SetServerState(StateReady)
+ if err := c.ping(ctx); err != nil {
if cfg.Options.DebugLSP {
- slog.Debug("LSP server is ready")
+ slog.Debug("LSP server not ready yet", "error", err, "server", c.name)
}
- return nil
- } else {
- slog.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
+ continue
}
+ // Server responded successfully
+ c.SetServerState(StateReady)
if cfg.Options.DebugLSP {
- slog.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
+ slog.Debug("LSP server is ready")
}
+ return nil
}
}
}
@@ -443,86 +434,13 @@ func (c *Client) openKeyConfigFiles(ctx context.Context) {
}
}
-// pingServerByType sends a ping request appropriate for the server type
-func (c *Client) pingServerByType(ctx context.Context, serverType ServerType) error {
- switch serverType {
- case ServerTypeTypeScript:
- // For TypeScript, try a document symbol request on an open file
- return c.pingTypeScriptServer(ctx)
- case ServerTypeGo:
- // For Go, workspace/symbol works well
- return c.pingWithWorkspaceSymbol(ctx)
- case ServerTypeRust:
- // For Rust, workspace/symbol works well
- return c.pingWithWorkspaceSymbol(ctx)
- default:
- // Default ping method
- return c.pingWithWorkspaceSymbol(ctx)
- }
-}
-
-// pingTypeScriptServer tries to ping a TypeScript server with appropriate methods
-func (c *Client) pingTypeScriptServer(ctx context.Context) error {
- // First try workspace/symbol which works for many servers
- if err := c.pingWithWorkspaceSymbol(ctx); err == nil {
- return nil
- }
-
- // If that fails, try to find an open file and request document symbols
- c.openFilesMu.RLock()
- defer c.openFilesMu.RUnlock()
-
- // If we have any open files, try to get document symbols for one
- for uri := range c.openFiles {
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path for TypeScript symbol collection", "uri", uri, "error", err)
- continue
- }
-
- if strings.HasSuffix(filePath, ".ts") || strings.HasSuffix(filePath, ".js") ||
- strings.HasSuffix(filePath, ".tsx") || strings.HasSuffix(filePath, ".jsx") {
- var symbols []protocol.DocumentSymbol
- err := c.Call(ctx, "textDocument/documentSymbol", protocol.DocumentSymbolParams{
- TextDocument: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
- },
- }, &symbols)
- if err == nil {
- return nil
- }
- }
- }
-
- // If we have no open TypeScript files, try to find and open one
- workDir := config.Get().WorkingDir()
- err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip directories and non-TypeScript files
- if d.IsDir() {
- return nil
- }
-
- ext := filepath.Ext(path)
- if ext == ".ts" || ext == ".js" || ext == ".tsx" || ext == ".jsx" {
- // Found a TypeScript file, try to open it
- if err := c.OpenFile(ctx, path); err == nil {
- // Successfully opened, stop walking
- return filepath.SkipAll
- }
- }
-
+// ping sends a ping request...
+func (c *Client) ping(ctx context.Context) error {
+ if _, err := c.Symbol(ctx, protocol.WorkspaceSymbolParams{}); err == nil {
return nil
- })
- if err != nil {
- slog.Debug("Error walking directory for TypeScript files", "error", err)
}
-
- // Final fallback - just try a generic capability
- return c.pingWithServerCapabilities(ctx)
+ // This is a very lightweight request that should work for most servers
+ return c.Notify(ctx, "$/cancelRequest", protocol.CancelParams{ID: "1"})
}
// openTypeScriptFiles finds and opens TypeScript files to help initialize the server
@@ -597,20 +515,6 @@ func shouldSkipDir(path string) bool {
return skipDirs[dirName]
}
-// pingWithWorkspaceSymbol tries a workspace/symbol request
-func (c *Client) pingWithWorkspaceSymbol(ctx context.Context) error {
- var result []protocol.SymbolInformation
- return c.Call(ctx, "workspace/symbol", protocol.WorkspaceSymbolParams{
- Query: "",
- }, &result)
-}
-
-// pingWithServerCapabilities tries to get server capabilities
-func (c *Client) pingWithServerCapabilities(ctx context.Context) error {
- // This is a very lightweight request that should work for most servers
- return c.Notify(ctx, "$/cancelRequest", struct{ ID int }{ID: -1})
-}
-
type OpenFileInfo struct {
Version int32
URI protocol.DocumentURI
@@ -668,7 +572,7 @@ func (c *Client) OpenFile(ctx context.Context, filepath string) error {
},
}
- if err := c.Notify(ctx, "textDocument/didOpen", params); err != nil {
+ if err := c.DidOpen(ctx, params); err != nil {
return err
}
@@ -718,7 +622,7 @@ func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
},
}
- return c.Notify(ctx, "textDocument/didChange", params)
+ return c.DidChange(ctx, params)
}
func (c *Client) CloseFile(ctx context.Context, filepath string) error {
@@ -741,7 +645,7 @@ func (c *Client) CloseFile(ctx context.Context, filepath string) error {
if cfg.Options.DebugLSP {
slog.Debug("Closing file", "file", filepath)
}
- if err := c.Notify(ctx, "textDocument/didClose", params); err != nil {
+ if err := c.DidClose(ctx, params); err != nil {
return err
}
diff --git a/internal/lsp/transport.go b/internal/lsp/transport.go
index b468101dbc36537c9f306399b4af6cbbe451d96f..483281d25c51a6bfb71ca3314419b570f9a6bf0d 100644
--- a/internal/lsp/transport.go
+++ b/internal/lsp/transport.go
@@ -188,9 +188,12 @@ func (c *Client) handleMessages() {
// Call makes a request and waits for the response
func (c *Client) Call(ctx context.Context, method string, params any, result any) error {
- cfg := config.Get()
+ if !c.IsMethodSupported(method) {
+ return fmt.Errorf("method not supported by server: %s", method)
+ }
id := c.nextID.Add(1)
+ cfg := config.Get()
if cfg.Options.DebugLSP {
slog.Debug("Making call", "method", method, "id", id)
}
@@ -253,6 +256,12 @@ func (c *Client) Call(ctx context.Context, method string, params any, result any
// Notify sends a notification (a request without an ID that doesn't expect a response)
func (c *Client) Notify(ctx context.Context, method string, params any) error {
cfg := config.Get()
+ if !c.IsMethodSupported(method) {
+ if cfg.Options.DebugLSP {
+ slog.Debug("Skipping notification: method not supported by server", "method", method)
+ }
+ return nil
+ }
if cfg.Options.DebugLSP {
slog.Debug("Sending notification", "method", method)
}
diff --git a/internal/lsp/watcher/watcher.go b/internal/lsp/watcher/watcher.go
index 476c49361e2ba4e07b6c9b64a8d884e74d3013ed..ad03099ae9a2b1e516fdcab820052c1ca858bd2a 100644
--- a/internal/lsp/watcher/watcher.go
+++ b/internal/lsp/watcher/watcher.go
@@ -623,18 +623,11 @@ func (w *WorkspaceWatcher) matchesPattern(path string, pattern protocol.GlobPatt
if basePath == "" {
return false
}
- // For relative patterns
- if basePath, err = protocol.DocumentURI(basePath).Path(); err != nil {
- // XXX: Do we want to return here, or send the error up the stack?
- slog.Error("Error converting base path to URI", "basePath", basePath, "error", err)
- }
-
- basePath = filepath.ToSlash(basePath)
// Make path relative to basePath for matching
relPath, err := filepath.Rel(basePath, path)
if err != nil {
- slog.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err)
+ slog.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err, "server", w.name)
return false
}
relPath = filepath.ToSlash(relPath)
From 8d735d6e3d859a5d61d835372b1e0b497c5ee4d5 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 27 Aug 2025 14:54:49 -0300
Subject: [PATCH 003/236] refactor: home.Dir, home.Short, home.Long (#884)
* refactor: home.Dir, home.Short, home.Long
Centralized all home-related operations in a package and removed a bunch
of repeated code all over the place.
Signed-off-by: Carlos Alexandro Becker
* test: more cases
Signed-off-by: Carlos Alexandro Becker
* fix: more places
Signed-off-by: Carlos Alexandro Becker
* test: fix on windows
Signed-off-by: Carlos Alexandro Becker
* test: fix
Signed-off-by: Carlos Alexandro Becker
* test: fix
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
---
internal/config/load.go | 5 +-
internal/config/provider.go | 3 +-
internal/fsext/fileutil.go | 8 +--
internal/fsext/home.go | 20 ------
internal/fsext/ls.go | 3 +-
internal/fsext/parent.go | 4 +-
internal/home/home.go | 42 +++++++++++++
internal/home/home_test.go | 26 ++++++++
internal/llm/prompt/prompt.go | 14 +----
internal/llm/prompt/prompt_test.go | 62 ++-----------------
.../tui/components/chat/sidebar/sidebar.go | 10 +--
internal/tui/components/chat/splash/splash.go | 9 +--
.../tui/components/dialogs/commands/loader.go | 5 +-
.../dialogs/filepicker/filepicker.go | 6 +-
.../tui/components/dialogs/models/apikey.go | 5 +-
15 files changed, 98 insertions(+), 124 deletions(-)
delete mode 100644 internal/fsext/home.go
create mode 100644 internal/home/home.go
create mode 100644 internal/home/home_test.go
diff --git a/internal/config/load.go b/internal/config/load.go
index b644eb3f2b35253c310dd899dbb06fcfe65e6b2e..a703a049c7697be9209d3994c857ff0548f60b8b 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -16,6 +16,7 @@ import (
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/env"
"github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/log"
)
@@ -584,7 +585,7 @@ func globalConfig() string {
return filepath.Join(localAppData, appName, fmt.Sprintf("%s.json", appName))
}
- return filepath.Join(os.Getenv("HOME"), ".config", appName, fmt.Sprintf("%s.json", appName))
+ return filepath.Join(home.Dir(), ".config", appName, fmt.Sprintf("%s.json", appName))
}
// GlobalConfigData returns the path to the main data directory for the application.
@@ -606,5 +607,5 @@ func GlobalConfigData() string {
return filepath.Join(localAppData, appName, fmt.Sprintf("%s.json", appName))
}
- return filepath.Join(os.Getenv("HOME"), ".local", "share", appName, fmt.Sprintf("%s.json", appName))
+ return filepath.Join(home.Dir(), ".local", "share", appName, fmt.Sprintf("%s.json", appName))
}
diff --git a/internal/config/provider.go b/internal/config/provider.go
index 56125d482286b7c954af2254f185311ec142df04..68ede5095506b21dc4d744e309aaa836917345e5 100644
--- a/internal/config/provider.go
+++ b/internal/config/provider.go
@@ -12,6 +12,7 @@ import (
"time"
"github.com/charmbracelet/catwalk/pkg/catwalk"
+ "github.com/charmbracelet/crush/internal/home"
)
type ProviderClient interface {
@@ -41,7 +42,7 @@ func providerCacheFileData() string {
return filepath.Join(localAppData, appName, "providers.json")
}
- return filepath.Join(os.Getenv("HOME"), ".local", "share", appName, "providers.json")
+ return filepath.Join(home.Dir(), ".local", "share", appName, "providers.json")
}
func saveProvidersInCache(path string, providers []catwalk.Provider) error {
diff --git a/internal/fsext/fileutil.go b/internal/fsext/fileutil.go
index e68888452cdc190cb1e6cbdec8d87760dd8e432c..ee5fff66fb66e152319ea40c6abab4950a276a2f 100644
--- a/internal/fsext/fileutil.go
+++ b/internal/fsext/fileutil.go
@@ -10,6 +10,7 @@ import (
"github.com/bmatcuk/doublestar/v4"
"github.com/charlievieth/fastwalk"
+ "github.com/charmbracelet/crush/internal/home"
ignore "github.com/sabhiram/go-gitignore"
)
@@ -182,12 +183,7 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
}
func PrettyPath(path string) string {
- // replace home directory with ~
- homeDir, err := os.UserHomeDir()
- if err == nil {
- path = strings.ReplaceAll(path, homeDir, "~")
- }
- return path
+ return home.Short(path)
}
func DirTrim(pwd string, lim int) string {
diff --git a/internal/fsext/home.go b/internal/fsext/home.go
deleted file mode 100644
index d81a4bc251c0205032a606e91bc53eac9bd43918..0000000000000000000000000000000000000000
--- a/internal/fsext/home.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package fsext
-
-import (
- "cmp"
- "os"
- "os/user"
- "sync"
-)
-
-var HomeDir = sync.OnceValue(func() string {
- u, err := user.Current()
- if err == nil {
- return u.HomeDir
- }
- return cmp.Or(
- os.Getenv("HOME"),
- os.Getenv("USERPROFILE"),
- os.Getenv("HOMEPATH"),
- )
-})
diff --git a/internal/fsext/ls.go b/internal/fsext/ls.go
index e4b98bb2810d7e5014b881f3b0cac51f4e71965c..884c5b150e64cce3da3d1e3f2e08355a53361272 100644
--- a/internal/fsext/ls.go
+++ b/internal/fsext/ls.go
@@ -9,6 +9,7 @@ import (
"github.com/charlievieth/fastwalk"
"github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/home"
ignore "github.com/sabhiram/go-gitignore"
)
@@ -73,7 +74,7 @@ var commonIgnorePatterns = sync.OnceValue(func() ignore.IgnoreParser {
})
var homeIgnore = sync.OnceValue(func() ignore.IgnoreParser {
- home := HomeDir()
+ home := home.Dir()
var lines []string
for _, name := range []string{
filepath.Join(home, ".gitignore"),
diff --git a/internal/fsext/parent.go b/internal/fsext/parent.go
index 1b04143660e7700c51693ededf90ef7489a10e18..bd3193610a79cbc80b5bb2c1d75be32a819f34f5 100644
--- a/internal/fsext/parent.go
+++ b/internal/fsext/parent.go
@@ -4,6 +4,8 @@ import (
"errors"
"os"
"path/filepath"
+
+ "github.com/charmbracelet/crush/internal/home"
)
// SearchParent searches for a target file or directory starting from dir
@@ -33,7 +35,7 @@ func SearchParent(dir, target string) (string, bool) {
for {
parent := filepath.Dir(previousParent)
- if parent == previousParent || parent == HomeDir() {
+ if parent == previousParent || parent == home.Dir() {
return "", false
}
diff --git a/internal/home/home.go b/internal/home/home.go
new file mode 100644
index 0000000000000000000000000000000000000000..f2a9b73b922abd8f027ba68655afc68f42a58b09
--- /dev/null
+++ b/internal/home/home.go
@@ -0,0 +1,42 @@
+package home
+
+import (
+ "log/slog"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+)
+
+// Dir returns the users home directory, or if it fails, tries to create a new
+// temporary directory and use that instead.
+var Dir = sync.OnceValue(func() string {
+ home, err := os.UserHomeDir()
+ if err == nil {
+ slog.Debug("user home directory", "home", home)
+ return home
+ }
+ tmp, err := os.MkdirTemp("crush", "")
+ if err != nil {
+ slog.Error("could not find the user home directory")
+ return ""
+ }
+ slog.Warn("could not find the user home directory, using a temporary one", "home", tmp)
+ return tmp
+})
+
+// Short replaces the actual home path from [Dir] with `~`.
+func Short(p string) string {
+ if !strings.HasPrefix(p, Dir()) || Dir() == "" {
+ return p
+ }
+ return filepath.Join("~", strings.TrimPrefix(p, Dir()))
+}
+
+// Long replaces the `~` with actual home path from [Dir].
+func Long(p string) string {
+ if !strings.HasPrefix(p, "~") || Dir() == "" {
+ return p
+ }
+ return strings.Replace(p, "~", Dir(), 1)
+}
diff --git a/internal/home/home_test.go b/internal/home/home_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..e5775c31bd51545b4d9f6ec5dbd9f28cac69ae16
--- /dev/null
+++ b/internal/home/home_test.go
@@ -0,0 +1,26 @@
+package home
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestDir(t *testing.T) {
+ require.NotEmpty(t, Dir())
+}
+
+func TestShort(t *testing.T) {
+ d := filepath.Join(Dir(), "documents", "file.txt")
+ require.Equal(t, filepath.FromSlash("~/documents/file.txt"), Short(d))
+ ad := filepath.FromSlash("/absolute/path/file.txt")
+ require.Equal(t, ad, Short(ad))
+}
+
+func TestLong(t *testing.T) {
+ d := filepath.FromSlash("~/documents/file.txt")
+ require.Equal(t, filepath.Join(Dir(), "documents", "file.txt"), Long(d))
+ ad := filepath.FromSlash("/absolute/path/file.txt")
+ require.Equal(t, ad, Long(ad))
+}
diff --git a/internal/llm/prompt/prompt.go b/internal/llm/prompt/prompt.go
index 8c87482a71679f5bc682e6fdd8c1f5a03b89c184..919686a7d248d6ac2f02ae21ff4a323b26fc536f 100644
--- a/internal/llm/prompt/prompt.go
+++ b/internal/llm/prompt/prompt.go
@@ -9,6 +9,7 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/env"
+ "github.com/charmbracelet/crush/internal/home"
)
type PromptID string
@@ -44,18 +45,7 @@ func getContextFromPaths(workingDir string, contextPaths []string) string {
// expandPath expands ~ and environment variables in file paths
func expandPath(path string) string {
- // Handle tilde expansion
- if strings.HasPrefix(path, "~/") {
- homeDir, err := os.UserHomeDir()
- if err == nil {
- path = filepath.Join(homeDir, path[2:])
- }
- } else if path == "~" {
- homeDir, err := os.UserHomeDir()
- if err == nil {
- path = homeDir
- }
- }
+ path = home.Long(path)
// Handle environment variable expansion using the same pattern as config
if strings.HasPrefix(path, "$") {
diff --git a/internal/llm/prompt/prompt_test.go b/internal/llm/prompt/prompt_test.go
index e4289595fa13b4d5a9e4ef12302b2390edcdba54..66f9d438d9a5ab62d0f0871c718b166ad44795b0 100644
--- a/internal/llm/prompt/prompt_test.go
+++ b/internal/llm/prompt/prompt_test.go
@@ -2,10 +2,10 @@ package prompt
import (
"os"
- "path/filepath"
- "runtime"
"strings"
"testing"
+
+ "github.com/charmbracelet/crush/internal/home"
)
func TestExpandPath(t *testing.T) {
@@ -25,16 +25,14 @@ func TestExpandPath(t *testing.T) {
name: "tilde expansion",
input: "~/documents",
expected: func() string {
- home, _ := os.UserHomeDir()
- return filepath.Join(home, "documents")
+ return home.Dir() + "/documents"
},
},
{
name: "tilde only",
input: "~",
expected: func() string {
- home, _ := os.UserHomeDir()
- return home
+ return home.Dir()
},
},
{
@@ -69,55 +67,3 @@ func TestExpandPath(t *testing.T) {
})
}
}
-
-func TestProcessContextPaths(t *testing.T) {
- // Create a temporary directory and file for testing
- tmpDir := t.TempDir()
- testFile := filepath.Join(tmpDir, "test.txt")
- testContent := "test content"
-
- err := os.WriteFile(testFile, []byte(testContent), 0o644)
- if err != nil {
- t.Fatalf("Failed to create test file: %v", err)
- }
-
- // Test with absolute path to file
- result := processContextPaths("", []string{testFile})
- expected := "# From:" + testFile + "\n" + testContent
-
- if result != expected {
- t.Errorf("processContextPaths with absolute path failed.\nGot: %q\nWant: %q", result, expected)
- }
-
- // Test with directory path (should process all files in directory)
- result = processContextPaths("", []string{tmpDir})
- if !strings.Contains(result, testContent) {
- t.Errorf("processContextPaths with directory path failed to include file content")
- }
-
- // Test with tilde expansion (if we can create a file in home directory)
- tmpDir = t.TempDir()
- setHomeEnv(t, tmpDir)
- homeTestFile := filepath.Join(tmpDir, "crush_test_file.txt")
- err = os.WriteFile(homeTestFile, []byte(testContent), 0o644)
- if err == nil {
- defer os.Remove(homeTestFile) // Clean up
-
- tildeFile := "~/crush_test_file.txt"
- result = processContextPaths("", []string{tildeFile})
- expected = "# From:" + homeTestFile + "\n" + testContent
-
- if result != expected {
- t.Errorf("processContextPaths with tilde expansion failed.\nGot: %q\nWant: %q", result, expected)
- }
- }
-}
-
-func setHomeEnv(tb testing.TB, path string) {
- tb.Helper()
- key := "HOME"
- if runtime.GOOS == "windows" {
- key = "USERPROFILE"
- }
- tb.Setenv(key, path)
-}
diff --git a/internal/tui/components/chat/sidebar/sidebar.go b/internal/tui/components/chat/sidebar/sidebar.go
index eeabeac3f9a7b1f17c2b24acc4950deb186ff56b..236c5d2e31c6e7f81482757ff750f572e23cc3fb 100644
--- a/internal/tui/components/chat/sidebar/sidebar.go
+++ b/internal/tui/components/chat/sidebar/sidebar.go
@@ -3,7 +3,6 @@ package sidebar
import (
"context"
"fmt"
- "os"
"slices"
"strings"
@@ -14,6 +13,7 @@ import (
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
@@ -609,11 +609,5 @@ func (m *sidebarCmp) SetCompactMode(compact bool) {
func cwd() string {
cwd := config.Get().WorkingDir()
t := styles.CurrentTheme()
- // Replace home directory with ~, unless we're at the top level of the
- // home directory).
- homeDir, err := os.UserHomeDir()
- if err == nil && cwd != homeDir {
- cwd = strings.ReplaceAll(cwd, homeDir, "~")
- }
- return t.S().Muted.Render(cwd)
+ return t.S().Muted.Render(home.Short(cwd))
}
diff --git a/internal/tui/components/chat/splash/splash.go b/internal/tui/components/chat/splash/splash.go
index 2416888fa184d5dcd04e0770e0816b9ee63fd5bd..7fa46cdd279a2cbe98a86654a23e81a49bc8aebf 100644
--- a/internal/tui/components/chat/splash/splash.go
+++ b/internal/tui/components/chat/splash/splash.go
@@ -2,7 +2,6 @@ package splash
import (
"fmt"
- "os"
"strings"
"time"
@@ -11,6 +10,7 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/llm/prompt"
"github.com/charmbracelet/crush/internal/tui/components/chat"
"github.com/charmbracelet/crush/internal/tui/components/core"
@@ -648,12 +648,7 @@ func (s *splashCmp) cwdPart() string {
}
func (s *splashCmp) cwd() string {
- cwd := config.Get().WorkingDir()
- homeDir, err := os.UserHomeDir()
- if err == nil && cwd != homeDir {
- cwd = strings.ReplaceAll(cwd, homeDir, "~")
- }
- return cwd
+ return home.Short(config.Get().WorkingDir())
}
func LSPList(maxWidth int) []string {
diff --git a/internal/tui/components/dialogs/commands/loader.go b/internal/tui/components/dialogs/commands/loader.go
index 9aee528ee48d0f23e48c417f8bee5bc0e3f381c5..74d9c7e4baee2e2d19f8baca914942f0c0d34cd3 100644
--- a/internal/tui/components/dialogs/commands/loader.go
+++ b/internal/tui/components/dialogs/commands/loader.go
@@ -10,6 +10,7 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/tui/util"
)
@@ -54,7 +55,7 @@ func buildCommandSources(cfg *config.Config) []commandSource {
}
// Home directory
- if home, err := os.UserHomeDir(); err == nil {
+ if home := home.Dir(); home != "" {
sources = append(sources, commandSource{
path: filepath.Join(home, ".crush", "commands"),
prefix: UserCommandPrefix,
@@ -73,7 +74,7 @@ func buildCommandSources(cfg *config.Config) []commandSource {
func getXDGCommandsDir() string {
xdgHome := os.Getenv("XDG_CONFIG_HOME")
if xdgHome == "" {
- if home, err := os.UserHomeDir(); err == nil {
+ if home := home.Dir(); home != "" {
xdgHome = filepath.Join(home, ".config")
}
}
diff --git a/internal/tui/components/dialogs/filepicker/filepicker.go b/internal/tui/components/dialogs/filepicker/filepicker.go
index fd853cdc1e2f7ae8a049aa0c7f456cc406c41d88..fcec2fc8b6e3e606e555c55949049f397a30f921 100644
--- a/internal/tui/components/dialogs/filepicker/filepicker.go
+++ b/internal/tui/components/dialogs/filepicker/filepicker.go
@@ -11,6 +11,7 @@ import (
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/dialogs"
@@ -60,7 +61,7 @@ func NewFilePickerCmp(workingDir string) FilePicker {
if cwd, err := os.Getwd(); err == nil {
fp.CurrentDirectory = cwd
} else {
- fp.CurrentDirectory, _ = os.UserHomeDir()
+ fp.CurrentDirectory = home.Dir()
}
}
@@ -106,8 +107,7 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
if key.Matches(msg, m.filePicker.KeyMap.Back) {
// make sure we don't go back if we are at the home directory
- homeDir, _ := os.UserHomeDir()
- if m.filePicker.CurrentDirectory == homeDir {
+ if m.filePicker.CurrentDirectory == home.Dir() {
return m, nil
}
}
diff --git a/internal/tui/components/dialogs/models/apikey.go b/internal/tui/components/dialogs/models/apikey.go
index 80f812cd9c5e92313089aec70f9b9dba4b75375d..0490335f9ad745839a94de0460a0fc5c1b6f125c 100644
--- a/internal/tui/components/dialogs/models/apikey.go
+++ b/internal/tui/components/dialogs/models/apikey.go
@@ -2,13 +2,12 @@ package models
import (
"fmt"
- "strings"
"github.com/charmbracelet/bubbles/v2/spinner"
"github.com/charmbracelet/bubbles/v2/textinput"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/lipgloss/v2"
)
@@ -145,7 +144,7 @@ func (a *APIKeyInput) View() string {
inputView := a.input.View()
dataPath := config.GlobalConfigData()
- dataPath = strings.Replace(dataPath, fsext.HomeDir(), "~", 1)
+ dataPath = home.Short(dataPath)
helpText := styles.CurrentTheme().S().Muted.
Render(fmt.Sprintf("This will be written to the global configuration: %s", dataPath))
From c917b685fab5032b4936e7a2cde902edd1d89b56 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Thu, 28 Aug 2025 05:23:56 -0300
Subject: [PATCH 005/236] chore(legal): @negz has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index ecadcbc9d58ebd531ed2007bbb63fdc64344db67..cb4bb824ccea5ce8cff5caee83e5eed683629df8 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -519,6 +519,14 @@
"created_at": "2025-08-26T11:34:17Z",
"repoId": 987670088,
"pullRequestNo": 900
+ },
+ {
+ "name": "negz",
+ "id": 1049349,
+ "comment_id": 3232462357,
+ "created_at": "2025-08-28T08:23:46Z",
+ "repoId": 987670088,
+ "pullRequestNo": 914
}
]
}
\ No newline at end of file
From d69dcc1ae314c55b0d3e93d06beb9320485fe1c3 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Thu, 28 Aug 2025 11:14:02 -0300
Subject: [PATCH 006/236] chore(labeler): add shell label
---
.github/labeler.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.github/labeler.yml b/.github/labeler.yml
index bc3a261e9bfebba40c3f0b5ab9b03b855342e32f..c8f2a3fbe357bfb93440881fda03fb7c87f6cd55 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -23,6 +23,8 @@
- "/(release|releasing|aur|brew|nix|npm|scoop|termux|winget)/i"
"area: session":
- "/session/i"
+"area: shell":
+ - "/shell/i"
"area: themes":
- "/theme/i"
"area: tools":
From 0c8e111af5d8bccf77d544da967c28f7df56d36f Mon Sep 17 00:00:00 2001
From: Kujtim Hoxha
Date: Wed, 27 Aug 2025 14:30:08 -0400
Subject: [PATCH 007/236] fix: openai provider tool calls
---
internal/llm/provider/openai.go | 30 +++++++++++++++---------------
1 file changed, 15 insertions(+), 15 deletions(-)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index e28b0444df023245e235f4a9cffa47adb9a46286..ffed9325e0a70fb86ffe2fecd5b7f00e63e3e215 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -7,7 +7,6 @@ import (
"fmt"
"io"
"log/slog"
- "slices"
"strings"
"time"
@@ -342,18 +341,15 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
acc := openai.ChatCompletionAccumulator{}
currentContent := ""
toolCalls := make([]message.ToolCall, 0)
- var msgToolCalls []openai.ChatCompletionMessageToolCall
+ msgToolCalls := make(map[int64]openai.ChatCompletionMessageToolCall)
for openaiStream.Next() {
chunk := openaiStream.Current()
- if len(chunk.Choices) == 0 {
- continue
- }
// Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index
- if len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 {
+ if len(chunk.Choices) != 0 && len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 {
chunk.Choices[0].Delta.ToolCalls[0].Index = 0
}
acc.AddChunk(chunk)
- for i, choice := range chunk.Choices {
+ for _, choice := range chunk.Choices {
reasoning, ok := choice.Delta.JSON.ExtraFields["reasoning"]
if ok && reasoning.Raw() != "" {
reasoningStr := ""
@@ -374,14 +370,14 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
} else if len(choice.Delta.ToolCalls) > 0 {
toolCall := choice.Delta.ToolCalls[0]
newToolCall := false
- if len(msgToolCalls)-1 >= int(toolCall.Index) { // tool call exists
- existingToolCall := msgToolCalls[toolCall.Index]
+ if existingToolCall, ok := msgToolCalls[toolCall.Index]; ok { // tool call exists
if toolCall.ID != "" && toolCall.ID != existingToolCall.ID {
found := false
// try to find the tool based on the ID
- for i, tool := range msgToolCalls {
+ for _, tool := range msgToolCalls {
if tool.ID == toolCall.ID {
- msgToolCalls[i].Function.Arguments += toolCall.Function.Arguments
+ existingToolCall.Function.Arguments += toolCall.Function.Arguments
+ msgToolCalls[toolCall.Index] = existingToolCall
found = true
}
}
@@ -389,7 +385,8 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
newToolCall = true
}
} else {
- msgToolCalls[toolCall.Index].Function.Arguments += toolCall.Function.Arguments
+ existingToolCall.Function.Arguments += toolCall.Function.Arguments
+ msgToolCalls[toolCall.Index] = existingToolCall
}
} else {
newToolCall = true
@@ -406,17 +403,16 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
Finished: false,
},
}
- msgToolCalls = append(msgToolCalls, openai.ChatCompletionMessageToolCall{
+ msgToolCalls[toolCall.Index] = openai.ChatCompletionMessageToolCall{
ID: toolCall.ID,
Type: "function",
Function: openai.ChatCompletionMessageToolCallFunction{
Name: toolCall.Function.Name,
Arguments: toolCall.Function.Arguments,
},
- })
+ }
}
}
- acc.Choices[i].Message.ToolCalls = slices.Clone(msgToolCalls)
}
}
@@ -541,6 +537,10 @@ func (o *openaiClient) toolCalls(completion openai.ChatCompletion) []message.Too
if len(completion.Choices) > 0 && len(completion.Choices[0].Message.ToolCalls) > 0 {
for _, call := range completion.Choices[0].Message.ToolCalls {
+ // accumulator for some reason does this.
+ if call.Function.Name == "" {
+ continue
+ }
toolCall := message.ToolCall{
ID: call.ID,
Name: call.Function.Name,
From 71bca980cce575bf79b1f91e206fb941577dbfc0 Mon Sep 17 00:00:00 2001
From: Kujtim Hoxha
Date: Thu, 28 Aug 2025 12:35:38 -0400
Subject: [PATCH 008/236] fix: assistant message
---
internal/llm/provider/openai.go | 39 ++++++++++-----------------------
1 file changed, 11 insertions(+), 28 deletions(-)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index ffed9325e0a70fb86ffe2fecd5b7f00e63e3e215..b26ef3477a82edd349236194d215ed805774f2fa 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -125,33 +125,7 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
Role: "assistant",
}
- hasContent := false
- if msg.Content().String() != "" {
- hasContent = true
- textBlock := openai.ChatCompletionContentPartTextParam{Text: msg.Content().String()}
- if cache && !o.providerOptions.disableCache && isAnthropicModel {
- textBlock.SetExtraFields(map[string]any{
- "cache_control": map[string]string{
- "type": "ephemeral",
- },
- })
- }
- assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
- OfArrayOfContentParts: []openai.ChatCompletionAssistantMessageParamContentArrayOfContentPartUnion{
- {
- OfText: &textBlock,
- },
- },
- }
- if !isAnthropicModel {
- assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
- OfString: param.NewOpt(msg.Content().String()),
- }
- }
- }
-
if len(msg.ToolCalls()) > 0 {
- hasContent = true
assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(msg.ToolCalls()))
for i, call := range msg.ToolCalls() {
assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
@@ -164,10 +138,19 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
}
}
}
- if !hasContent {
- continue
+ if msg.Content().String() != "" {
+ assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
+ OfString: param.NewOpt(msg.Content().Text),
+ }
}
+ if cache && !o.providerOptions.disableCache && isAnthropicModel {
+ assistantMsg.SetExtraFields(map[string]any{
+ "cache_control": map[string]string{
+ "type": "ephemeral",
+ },
+ })
+ }
openaiMessages = append(openaiMessages, openai.ChatCompletionMessageParamUnion{
OfAssistant: &assistantMsg,
})
From 2b1d87d289176346fa32fc8b0610a7f74e17a9a6 Mon Sep 17 00:00:00 2001
From: Kujtim Hoxha
Date: Thu, 28 Aug 2025 15:43:16 -0400
Subject: [PATCH 010/236] fix: handle providers that do not send the right
index
---
internal/llm/provider/openai.go | 12 +++++++++++-
1 file changed, 11 insertions(+), 1 deletion(-)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index b26ef3477a82edd349236194d215ed805774f2fa..2e3623f2c56e2497a24116ef9565d7442173f953 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -325,6 +325,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
currentContent := ""
toolCalls := make([]message.ToolCall, 0)
msgToolCalls := make(map[int64]openai.ChatCompletionMessageToolCall)
+ toolMap := make(map[string]openai.ChatCompletionMessageToolCall)
for openaiStream.Next() {
chunk := openaiStream.Current()
// Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index
@@ -332,7 +333,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
chunk.Choices[0].Delta.ToolCalls[0].Index = 0
}
acc.AddChunk(chunk)
- for _, choice := range chunk.Choices {
+ for i, choice := range chunk.Choices {
reasoning, ok := choice.Delta.JSON.ExtraFields["reasoning"]
if ok && reasoning.Raw() != "" {
reasoningStr := ""
@@ -361,6 +362,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
if tool.ID == toolCall.ID {
existingToolCall.Function.Arguments += toolCall.Function.Arguments
msgToolCalls[toolCall.Index] = existingToolCall
+ toolMap[existingToolCall.ID] = existingToolCall
found = true
}
}
@@ -370,6 +372,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
} else {
existingToolCall.Function.Arguments += toolCall.Function.Arguments
msgToolCalls[toolCall.Index] = existingToolCall
+ toolMap[existingToolCall.ID] = existingToolCall
}
} else {
newToolCall = true
@@ -394,7 +397,14 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
Arguments: toolCall.Function.Arguments,
},
}
+ toolMap[toolCall.ID] = msgToolCalls[toolCall.Index]
+
+ }
+ toolCalls := []openai.ChatCompletionMessageToolCall{}
+ for _, tc := range toolMap {
+ toolCalls = append(toolCalls, tc)
}
+ acc.Choices[i].Message.ToolCalls = toolCalls
}
}
}
From b42b1b66db719df7e0e19bcddfe150bfe9167290 Mon Sep 17 00:00:00 2001
From: Kujtim Hoxha
Date: Thu, 28 Aug 2025 15:46:02 -0400
Subject: [PATCH 011/236] chore: lint
---
internal/llm/provider/openai.go | 1 -
1 file changed, 1 deletion(-)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index 2e3623f2c56e2497a24116ef9565d7442173f953..4e88aea895120db101ae67ff95139fc28fdaf3a6 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -398,7 +398,6 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
},
}
toolMap[toolCall.ID] = msgToolCalls[toolCall.Index]
-
}
toolCalls := []openai.ChatCompletionMessageToolCall{}
for _, tc := range toolMap {
From 48553e1d677be8f06c7e7906f23121954ce6a0db Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Thu, 28 Aug 2025 17:12:32 -0300
Subject: [PATCH 012/236] chore(labeler): adjust xai grok label
---
.github/labeler.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/labeler.yml b/.github/labeler.yml
index c8f2a3fbe357bfb93440881fda03fb7c87f6cd55..75642def1c1e84476d692bee5e8711f52208d05d 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -47,8 +47,6 @@
- "/gemini/i"
"provider: google vertex":
- "/vertex/i"
-"provider: grok":
- - "/grok/i"
"provider: kimi":
- "/kimi/i"
"provider: ollama":
@@ -59,5 +57,7 @@
- "/openrouter/i"
"provider: qwen":
- "/qwen/i"
+"provider: xai grok":
+ - "/(xai|x\\.ai|grok)/i"
"security":
- "/(security|vulnerability|exploit)/i"
From e0733b5e37e8dfc685a055e7e906119bb45c9bcf Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Thu, 28 Aug 2025 17:15:34 -0300
Subject: [PATCH 013/236] ci: rename "issue labeler" to just "labeler" (because
it also labels prs)
---
.github/workflows/{issue-labeler.yml => labeler.yml} | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
rename .github/workflows/{issue-labeler.yml => labeler.yml} (92%)
diff --git a/.github/workflows/issue-labeler.yml b/.github/workflows/labeler.yml
similarity index 92%
rename from .github/workflows/issue-labeler.yml
rename to .github/workflows/labeler.yml
index f0cb041b935dcdec6e63013972b686a66404398b..afc6427ff864eaf4929b831c7df23a2699304528 100644
--- a/.github/workflows/issue-labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -1,4 +1,4 @@
-name: Issue Labeler
+name: labeler
on:
issues:
@@ -8,7 +8,7 @@ on:
workflow_dispatch:
inputs:
issue-number:
- description: Issue Number
+ description: "Issue/PR #"
required: true
type: string
From 2ba1047fc44234133f239ef85c740981596c619c Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Thu, 28 Aug 2025 18:53:09 -0300
Subject: [PATCH 014/236] chore(legal): @undo76 has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index cb4bb824ccea5ce8cff5caee83e5eed683629df8..dd8f16ca300d058543540a6959554fa7e0286227 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -527,6 +527,14 @@
"created_at": "2025-08-28T08:23:46Z",
"repoId": 987670088,
"pullRequestNo": 914
+ },
+ {
+ "name": "undo76",
+ "id": 1415667,
+ "comment_id": 3235052544,
+ "created_at": "2025-08-28T21:53:00Z",
+ "repoId": 987670088,
+ "pullRequestNo": 921
}
]
}
\ No newline at end of file
From 72dc56689031b5d79225e36ef6d85c6ddfcd3760 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 29 Aug 2025 10:12:56 -0300
Subject: [PATCH 015/236] sec: show persistent shell path in permission dialog
(#916)
Currently it will always show the path of when it was started.
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/tools/bash.go | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/internal/llm/tools/bash.go b/internal/llm/tools/bash.go
index ad8c799ac61485b3ea7d99b03643644dfa5ee319..6b55820632029e84f9381faa5ca2bd25734abeee 100644
--- a/internal/llm/tools/bash.go
+++ b/internal/llm/tools/bash.go
@@ -370,10 +370,11 @@ func (b *bashTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
return ToolResponse{}, fmt.Errorf("session ID and message ID are required for executing shell command")
}
if !isSafeReadOnly {
+ shell := shell.GetPersistentShell(b.workingDir)
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
- Path: b.workingDir,
+ Path: shell.GetWorkingDir(),
ToolCallID: call.ID,
ToolName: BashToolName,
Action: "execute",
From a2e89c82fb02965730366f347f91d702f0cd99f6 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Fri, 29 Aug 2025 12:12:11 -0300
Subject: [PATCH 016/236] chore: `IsSubset` was moved to `x/exp/slice` (#923)
---
go.mod | 2 +-
go.sum | 4 +-
internal/shell/shell.go | 4 +-
internal/slicesext/slices.go | 17 ----
internal/slicesext/slices_test.go | 158 ------------------------------
5 files changed, 5 insertions(+), 180 deletions(-)
delete mode 100644 internal/slicesext/slices.go
delete mode 100644 internal/slicesext/slices_test.go
diff --git a/go.mod b/go.mod
index 83b796964abf2f2a39af6ca76da7d7a16163d11a..1bd1b27a7707b29632e9069368e3dfca100fbb2b 100644
--- a/go.mod
+++ b/go.mod
@@ -76,7 +76,7 @@ require (
github.com/charmbracelet/colorprofile v0.3.2 // indirect
github.com/charmbracelet/ultraviolet v0.0.0-20250813213450-50737e162af5
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
- github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef
+ github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
github.com/charmbracelet/x/term v0.2.1
github.com/charmbracelet/x/termios v0.1.1 // indirect
github.com/charmbracelet/x/windows v0.2.2 // indirect
diff --git a/go.sum b/go.sum
index df215cd1458bc6a49da822616e21847628c7730d..74cff4241beb8bf7551cc327422b8431ef95085a 100644
--- a/go.sum
+++ b/go.sum
@@ -102,8 +102,8 @@ github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3 h1:1
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3/go.mod h1:T9jr8CzFpjhFVHjNjKwbAD7KwBNyFnj2pntAO7F2zw0=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
-github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef h1:v7qwsZ2OxzlwvpKwz8dtZXp7fIJlcDEUOyFBNE4fz4Q=
-github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
+github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
+github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
diff --git a/internal/shell/shell.go b/internal/shell/shell.go
index 618899d808b90163ac82c6970440c26c0110db76..ef3abf8d30d37490e452478abe38ef39efd8a7fa 100644
--- a/internal/shell/shell.go
+++ b/internal/shell/shell.go
@@ -20,7 +20,7 @@ import (
"strings"
"sync"
- "github.com/charmbracelet/crush/internal/slicesext"
+ "github.com/charmbracelet/x/exp/slice"
"mvdan.cc/sh/moreinterp/coreutils"
"mvdan.cc/sh/v3/expand"
"mvdan.cc/sh/v3/interp"
@@ -186,7 +186,7 @@ func ArgumentsBlocker(cmd string, args []string, flags []string) BlockFunc {
}
argsMatch := slices.Equal(argParts[:len(args)], args)
- flagsMatch := slicesext.IsSubset(flags, flagParts)
+ flagsMatch := slice.IsSubset(flags, flagParts)
return argsMatch && flagsMatch
}
diff --git a/internal/slicesext/slices.go b/internal/slicesext/slices.go
deleted file mode 100644
index 9d4e1a07d4439f9d686562e1b9b91894289726a8..0000000000000000000000000000000000000000
--- a/internal/slicesext/slices.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package slicesext
-
-func IsSubset[T comparable](a, b []T) bool {
- if len(a) > len(b) {
- return false
- }
- set := make(map[T]struct{}, len(b))
- for _, item := range b {
- set[item] = struct{}{}
- }
- for _, item := range a {
- if _, exists := set[item]; !exists {
- return false
- }
- }
- return true
-}
diff --git a/internal/slicesext/slices_test.go b/internal/slicesext/slices_test.go
deleted file mode 100644
index 3593209513f44e55a51f510094aeebf550f08f75..0000000000000000000000000000000000000000
--- a/internal/slicesext/slices_test.go
+++ /dev/null
@@ -1,158 +0,0 @@
-package slicesext
-
-import (
- "testing"
-
- "github.com/stretchr/testify/require"
-)
-
-func TestIsSubset(t *testing.T) {
- tests := []struct {
- name string
- a []string
- b []string
- expect bool
- }{
- // Basic subset cases
- {
- name: "empty subset of empty",
- a: []string{},
- b: []string{},
- expect: true,
- },
- {
- name: "empty subset of non-empty",
- a: []string{},
- b: []string{"a", "b", "c"},
- expect: true,
- },
- {
- name: "non-empty not subset of empty",
- a: []string{"a"},
- b: []string{},
- expect: false,
- },
- {
- name: "single element subset",
- a: []string{"b"},
- b: []string{"a", "b", "c"},
- expect: true,
- },
- {
- name: "single element not subset",
- a: []string{"d"},
- b: []string{"a", "b", "c"},
- expect: false,
- },
- {
- name: "multiple elements subset",
- a: []string{"a", "c"},
- b: []string{"a", "b", "c", "d"},
- expect: true,
- },
- {
- name: "multiple elements not subset",
- a: []string{"a", "e"},
- b: []string{"a", "b", "c", "d"},
- expect: false,
- },
- {
- name: "equal sets are subsets",
- a: []string{"a", "b", "c"},
- b: []string{"a", "b", "c"},
- expect: true,
- },
- {
- name: "larger set not subset of smaller",
- a: []string{"a", "b", "c", "d"},
- b: []string{"a", "b"},
- expect: false,
- },
-
- // Order independence
- {
- name: "subset with different order",
- a: []string{"c", "a"},
- b: []string{"b", "a", "d", "c"},
- expect: true,
- },
-
- // Duplicate handling
- {
- name: "duplicates in subset",
- a: []string{"a", "a", "b"},
- b: []string{"a", "b", "c"},
- expect: true,
- },
- {
- name: "duplicates in superset",
- a: []string{"a", "b"},
- b: []string{"a", "a", "b", "b", "c"},
- expect: true,
- },
- {
- name: "duplicates in both",
- a: []string{"a", "a", "b"},
- b: []string{"a", "a", "b", "b", "c"},
- expect: true,
- },
-
- // Real-world examples
- {
- name: "npm flags subset",
- a: []string{"-g"},
- b: []string{"-g", "--verbose", "--save-dev"},
- expect: true,
- },
- {
- name: "npm flags not subset",
- a: []string{"--global"},
- b: []string{"-g", "--verbose", "--save-dev"},
- expect: false,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- result := IsSubset(tt.a, tt.b)
- require.Equal(t, tt.expect, result,
- "IsSubset(%v, %v) should be %v", tt.a, tt.b, tt.expect)
- })
- }
-}
-
-func TestIsSubsetWithInts(t *testing.T) {
- tests := []struct {
- name string
- a []int
- b []int
- expect bool
- }{
- {
- name: "int subset",
- a: []int{1, 3},
- b: []int{1, 2, 3, 4},
- expect: true,
- },
- {
- name: "int not subset",
- a: []int{1, 5},
- b: []int{1, 2, 3, 4},
- expect: false,
- },
- {
- name: "empty int subset",
- a: []int{},
- b: []int{1, 2, 3},
- expect: true,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- result := IsSubset(tt.a, tt.b)
- require.Equal(t, tt.expect, result,
- "IsSubset(%v, %v) should be %v", tt.a, tt.b, tt.expect)
- })
- }
-}
From e76fd5488f9e72ea75784e2c4c90f9350a444b45 Mon Sep 17 00:00:00 2001
From: Manolo Santos
Date: Thu, 28 Aug 2025 23:11:33 +0200
Subject: [PATCH 017/236] fix: tool calls break the converation if interrupted.
---
internal/llm/provider/anthropic.go | 3 +++
internal/llm/provider/gemini.go | 3 +++
internal/llm/provider/openai.go | 32 +++++++++++++++++++++---------
3 files changed, 29 insertions(+), 9 deletions(-)
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index 4448cd79d52782e2ec8a2410e03aa6f59e6ed16c..3fffd6b35fe6ee8b6a765e2e5b815ad36a5b6a55 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -151,6 +151,9 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
}
for _, toolCall := range msg.ToolCalls() {
+ if !toolCall.Finished {
+ continue
+ }
var inputMap map[string]any
err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
if err != nil {
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 21f4e65dfb400d449d3da050fe4c2436ffa66c0e..2e02bd088b57c9434d1d534204b664f3ef7443ed 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -81,6 +81,9 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
if len(msg.ToolCalls()) > 0 {
for _, call := range msg.ToolCalls() {
+ if !call.Finished {
+ continue
+ }
args, _ := parseJSONToMap(call.Input)
assistantParts = append(assistantParts, &genai.Part{
FunctionCall: &genai.FunctionCall{
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index 4e88aea895120db101ae67ff95139fc28fdaf3a6..eb5a84867aecf0a76b30a7c022ccb14bf6a2139a 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -125,16 +125,25 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
Role: "assistant",
}
+ // Only include finished tool calls; interrupted tool calls must not be resent.
if len(msg.ToolCalls()) > 0 {
- assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(msg.ToolCalls()))
- for i, call := range msg.ToolCalls() {
- assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
- ID: call.ID,
- Type: "function",
- Function: openai.ChatCompletionMessageToolCallFunctionParam{
- Name: call.Name,
- Arguments: call.Input,
- },
+ finished := make([]message.ToolCall, 0, len(msg.ToolCalls()))
+ for _, call := range msg.ToolCalls() {
+ if call.Finished {
+ finished = append(finished, call)
+ }
+ }
+ if len(finished) > 0 {
+ assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(finished))
+ for i, call := range finished {
+ assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
+ ID: call.ID,
+ Type: "function",
+ Function: openai.ChatCompletionMessageToolCallFunctionParam{
+ Name: call.Name,
+ Arguments: call.Input,
+ },
+ }
}
}
}
@@ -151,6 +160,11 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
},
})
}
+ // Skip empty assistant messages (no content and no finished tool calls)
+ if msg.Content().String() == "" && len(assistantMsg.ToolCalls) == 0 {
+ continue
+ }
+
openaiMessages = append(openaiMessages, openai.ChatCompletionMessageParamUnion{
OfAssistant: &assistantMsg,
})
From d83ad6b434c04dbbafdbe0edcd305d59856cb97b Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Fri, 29 Aug 2025 13:54:09 -0300
Subject: [PATCH 018/236] chore(legal): @andersonjoseph has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index dd8f16ca300d058543540a6959554fa7e0286227..a7dcc471fbe8637ec323ef377e32b90bce67e768 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -535,6 +535,14 @@
"created_at": "2025-08-28T21:53:00Z",
"repoId": 987670088,
"pullRequestNo": 921
+ },
+ {
+ "name": "andersonjoseph",
+ "id": 22438127,
+ "comment_id": 3237655829,
+ "created_at": "2025-08-29T16:54:00Z",
+ "repoId": 987670088,
+ "pullRequestNo": 926
}
]
}
\ No newline at end of file
From 4d3b1d91ea61676c6a0444d3c0fd4863cae0ebca Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Sun, 31 Aug 2025 13:19:41 -0300
Subject: [PATCH 019/236] chore(legal): @tisDDM has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index a7dcc471fbe8637ec323ef377e32b90bce67e768..53bdce0574ce5467203bf1fb74c5e928f8699f7c 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -543,6 +543,14 @@
"created_at": "2025-08-29T16:54:00Z",
"repoId": 987670088,
"pullRequestNo": 926
+ },
+ {
+ "name": "tisDDM",
+ "id": 77615100,
+ "comment_id": 3240239275,
+ "created_at": "2025-08-31T15:58:52Z",
+ "repoId": 987670088,
+ "pullRequestNo": 944
}
]
}
\ No newline at end of file
From 1ab4d28ce357014b1974604cb3213be597f190cf Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 2 Sep 2025 14:55:11 -0300
Subject: [PATCH 020/236] chore(deps): update mvdan.cc/sh (#948)
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 4 ++--
go.sum | 8 ++++----
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/go.mod b/go.mod
index 1bd1b27a7707b29632e9069368e3dfca100fbb2b..0293ab00eeec1fa64b891e07dc21a4780c4b47a5 100644
--- a/go.mod
+++ b/go.mod
@@ -44,7 +44,7 @@ require (
github.com/tidwall/sjson v1.2.5
github.com/zeebo/xxh3 v1.0.2
gopkg.in/natefinch/lumberjack.v2 v2.2.1
- mvdan.cc/sh/v3 v3.12.1-0.20250726150758-e256f53bade8
+ mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5
)
require (
@@ -152,5 +152,5 @@ require (
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
- mvdan.cc/sh/moreinterp v0.0.0-20250807215248-5a1a658912aa
+ mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5
)
diff --git a/go.sum b/go.sum
index 74cff4241beb8bf7551cc327422b8431ef95085a..5f30a3d58a5bdcee47de2c4621d6b9a26e312c32 100644
--- a/go.sum
+++ b/go.sum
@@ -443,7 +443,7 @@ modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
-mvdan.cc/sh/moreinterp v0.0.0-20250807215248-5a1a658912aa h1:sRmA9AmA5+9CbK6a7N52q9W9jAeoBy1EJ7cncm+SLxw=
-mvdan.cc/sh/moreinterp v0.0.0-20250807215248-5a1a658912aa/go.mod h1:Of9PCedbLDYT8b3EyiYG64rNnx5nOp27OLCVdDrjJyo=
-mvdan.cc/sh/v3 v3.12.1-0.20250726150758-e256f53bade8 h1:yOKqXg3uKDx7VxqnrKRUzB+InP3whTBi7jeggyFkfX0=
-mvdan.cc/sh/v3 v3.12.1-0.20250726150758-e256f53bade8/go.mod h1:Se6Cj17eYSn+sNooLZiEUnNNmNxg0imoYlTu4CyaGyg=
+mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5 h1:mO2lyKtGwu4mGQ+Qqjx0+fd5UU5BXhX/rslFmxd5aco=
+mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5/go.mod h1:Of9PCedbLDYT8b3EyiYG64rNnx5nOp27OLCVdDrjJyo=
+mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5 h1:e7Z/Lgw/zMijvQBVrfh/vUDZ+9FpuSLrJDVGBuoJtuo=
+mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5/go.mod h1:P21wo2gLLe3426sP+CmANLBaixSEbRtPl35w3YlM6dg=
From c10748b7e14956861edba44203b052a1b5b5bdd7 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Tue, 2 Sep 2025 15:06:20 -0300
Subject: [PATCH 021/236] chore(deps): update catwalk to v0.5.3 (#949)
---
go.mod | 2 +-
go.sum | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/go.mod b/go.mod
index 0293ab00eeec1fa64b891e07dc21a4780c4b47a5..e60c2fa5eb50811c258ed2e833c73083c6371465 100644
--- a/go.mod
+++ b/go.mod
@@ -14,7 +14,7 @@ require (
github.com/charlievieth/fastwalk v1.0.12
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250813213544-5cc219db8892
- github.com/charmbracelet/catwalk v0.4.12
+ github.com/charmbracelet/catwalk v0.5.3
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
diff --git a/go.sum b/go.sum
index 5f30a3d58a5bdcee47de2c4621d6b9a26e312c32..668cc533e5d8b33c7a21de01f0608cc075b18307 100644
--- a/go.sum
+++ b/go.sum
@@ -80,8 +80,8 @@ github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250813213544-5cc219db8892 h1:lqoYD2DrKhSdC9xCr59JMXtbbdR5/AZ6xfd/G8eOQJM=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250813213544-5cc219db8892/go.mod h1:TUpoECaG4/3CwFx5lTlXNpR87Yo7gOwGqucnHGfAm20=
-github.com/charmbracelet/catwalk v0.4.12 h1:HN7l/VVH+ecJbropJSoODeAawPuWk2mBApn99fs1MGM=
-github.com/charmbracelet/catwalk v0.4.12/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
+github.com/charmbracelet/catwalk v0.5.3 h1:Hw9DlX8u79K9iLQJB4Bti9/rTzMvEpBjE/GyniWxHNY=
+github.com/charmbracelet/catwalk v0.5.3/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0=
From 39de90f6e6f3f8a449ecda833d2ab652ea06d242 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Fri, 5 Sep 2025 15:30:09 -0300
Subject: [PATCH 022/236] chore(legal): @shaitanu has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 53bdce0574ce5467203bf1fb74c5e928f8699f7c..8f3354989a6fef7e5879558fa5f54b77d377b2ac 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -551,6 +551,14 @@
"created_at": "2025-08-31T15:58:52Z",
"repoId": 987670088,
"pullRequestNo": 944
+ },
+ {
+ "name": "shaitanu",
+ "id": 117723026,
+ "comment_id": 3259367914,
+ "created_at": "2025-09-05T18:29:58Z",
+ "repoId": 987670088,
+ "pullRequestNo": 968
}
]
}
\ No newline at end of file
From 60d85105ec8875b3a03ffec00eaa3621863408d2 Mon Sep 17 00:00:00 2001
From: Kujtim Hoxha
Date: Sat, 6 Sep 2025 23:09:06 +0200
Subject: [PATCH 023/236] fix: handle no content for gemini provider
---
internal/llm/agent/agent.go | 10 ++++++++--
internal/llm/provider/gemini.go | 9 +++++++++
2 files changed, 17 insertions(+), 2 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 0015e498f986c67dd4477a6fb35e8846c8442b9e..13b65cccc79ded8f1f7267063898216defb38908 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -395,7 +395,7 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
defer log.RecoverPanic("agent.Run", func() {
slog.Error("panic while generating title")
})
- titleErr := a.generateTitle(context.Background(), sessionID, content)
+ titleErr := a.generateTitle(ctx, sessionID, content)
if titleErr != nil && !errors.Is(titleErr, context.Canceled) && !errors.Is(titleErr, context.DeadlineExceeded) {
slog.Error("failed to generate title", "error", titleErr)
}
@@ -996,11 +996,17 @@ func (a *agent) UpdateModel() error {
return fmt.Errorf("provider %s not found in config", largeModelCfg.Provider)
}
+ var maxTitleTokens int64 = 40
+
+ // if the max output is too low for the gemini provider it won't return anything
+ if smallModelCfg.Provider == "gemini" {
+ maxTitleTokens = 1000
+ }
// Recreate title provider
titleOpts := []provider.ProviderClientOption{
provider.WithModel(config.SelectedModelTypeSmall),
provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptTitle, smallModelProviderCfg.ID)),
- provider.WithMaxTokens(40),
+ provider.WithMaxTokens(maxTitleTokens),
}
newTitleProvider, err := provider.NewProvider(smallModelProviderCfg, titleOpts...)
if err != nil {
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 2e02bd088b57c9434d1d534204b664f3ef7443ed..9d5164973a5ad86b4c0dee001e54b46b838b89e6 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -322,6 +322,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
for _, part := range lastMsg.Parts {
lastMsgParts = append(lastMsgParts, *part)
}
+
for resp, err := range chat.SendMessageStream(ctx, lastMsgParts...) {
if err != nil {
retry, after, retryErr := g.shouldRetry(attempts, err)
@@ -385,6 +386,9 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
}
}
}
+ } else {
+ // no content received
+ break
}
}
@@ -408,6 +412,11 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
},
}
return
+ } else {
+ eventChan <- ProviderEvent{
+ Type: EventError,
+ Error: errors.New("no content received"),
+ }
}
}
}()
From a24079e781fe41d4f725189112a143dc41b706bb Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 8 Sep 2025 10:00:48 +0000
Subject: [PATCH 024/236] chore(deps): bump actions/setup-go from 5 to 6 in the
all group (#991)
Bumps the all group with 1 update: [actions/setup-go](https://github.com/actions/setup-go).
Updates `actions/setup-go` from 5 to 6
- [Release notes](https://github.com/actions/setup-go/releases)
- [Commits](https://github.com/actions/setup-go/compare/v5...v6)
---
updated-dependencies:
- dependency-name: actions/setup-go
dependency-version: '6'
dependency-type: direct:production
update-type: version-update:semver-major
dependency-group: all
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/schema-update.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/schema-update.yml b/.github/workflows/schema-update.yml
index b37b79c6e33b2ca60ce4030712dd23dc0f948f99..bc7e19b67f5021e8d3ff00342a062f2c6c000e86 100644
--- a/.github/workflows/schema-update.yml
+++ b/.github/workflows/schema-update.yml
@@ -13,7 +13,7 @@ jobs:
- uses: actions/checkout@v5
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- - uses: actions/setup-go@v5
+ - uses: actions/setup-go@v6
with:
go-version-file: go.mod
- run: go run . schema > ./schema.json
From 92552fec7e04ada28f1e67e9050ac9b45bbe032e Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Mon, 8 Sep 2025 13:47:09 -0300
Subject: [PATCH 025/236] ci: disable `lint-sync` for now
---
.github/workflows/lint-sync.yml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/lint-sync.yml b/.github/workflows/lint-sync.yml
index ecf8580246f0165842891ee5f6e28992d15d68ff..df3578521df7640114709005a52866381a1555af 100644
--- a/.github/workflows/lint-sync.yml
+++ b/.github/workflows/lint-sync.yml
@@ -1,8 +1,8 @@
name: lint-sync
on:
- schedule:
- # every Sunday at midnight
- - cron: "0 0 * * 0"
+ # schedule:
+ # # every Sunday at midnight
+ # - cron: "0 0 * * 0"
workflow_dispatch: # allows manual triggering
permissions:
From 913ea55074c698d26ab2fa69c9298b00e53d69fc Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Mon, 8 Sep 2025 16:35:29 -0300
Subject: [PATCH 026/236] fix(openrouter): fix api key validation for
openrouter (#997)
`/models` is accessible for everyone on OpenRouter, without the need for
any authorization. The `Authorization: Bearer *` was basically always
ignore by their API.
Using a different private API to validate the key for OpenRouter.
---
internal/config/config.go | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index 7fef3b11d9b08f60d1ee9554bed27fd142536f7a..4aae4b945298021f4fe899fefe0d1ee8f16089eb 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -460,7 +460,11 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if baseURL == "" {
baseURL = "https://api.openai.com/v1"
}
- testURL = baseURL + "/models"
+ if c.Name == "OpenRouter" {
+ testURL = baseURL + "/credits"
+ } else {
+ testURL = baseURL + "/models"
+ }
headers["Authorization"] = "Bearer " + apiKey
case catwalk.TypeAnthropic:
baseURL, _ := resolver.ResolveValue(c.BaseURL)
From 65d8269dba1df14b856577cb0ab762659a207492 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Mon, 8 Sep 2025 16:38:59 -0300
Subject: [PATCH 027/236] refactor: check for id instead of name
---
internal/config/config.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index 4aae4b945298021f4fe899fefe0d1ee8f16089eb..c126e7ff11a1f63e9c1ace21984888e76af71479 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -460,7 +460,7 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if baseURL == "" {
baseURL = "https://api.openai.com/v1"
}
- if c.Name == "OpenRouter" {
+ if c.ID == "openrouter" {
testURL = baseURL + "/credits"
} else {
testURL = baseURL + "/models"
From a935301efc8ba8f4e46eb26d04a40df9ed7c1b62 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Mon, 8 Sep 2025 17:13:44 -0300
Subject: [PATCH 028/236] chore(legal): @vadiminshakov has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 8f3354989a6fef7e5879558fa5f54b77d377b2ac..72bc12dcf8a19c5cacdf7c6f6bf7740c87bdf718 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -559,6 +559,14 @@
"created_at": "2025-09-05T18:29:58Z",
"repoId": 987670088,
"pullRequestNo": 968
+ },
+ {
+ "name": "vadiminshakov",
+ "id": 26391516,
+ "comment_id": 3267808993,
+ "created_at": "2025-09-08T20:13:33Z",
+ "repoId": 987670088,
+ "pullRequestNo": 998
}
]
}
\ No newline at end of file
From c94c67d0983bd774285343969a13f35ba2685f93 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Tue, 9 Sep 2025 07:30:58 -0300
Subject: [PATCH 029/236] chore(legal): @adriens has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 72bc12dcf8a19c5cacdf7c6f6bf7740c87bdf718..44c09c1353894e405fb50a183223e40594ecfa32 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -567,6 +567,14 @@
"created_at": "2025-09-08T20:13:33Z",
"repoId": 987670088,
"pullRequestNo": 998
+ },
+ {
+ "name": "adriens",
+ "id": 5235127,
+ "comment_id": 3270041072,
+ "created_at": "2025-09-09T10:30:49Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1000
}
]
}
\ No newline at end of file
From 78540b012f6ace2a377a00daadf05176d0335ce8 Mon Sep 17 00:00:00 2001
From: bbrodriges
Date: Tue, 9 Sep 2025 16:47:53 +0300
Subject: [PATCH 030/236] security(sqlite): enable `secure_delete` pragma
(#966)
Co-authored-by: Nuno Cruces
---
internal/app/app.go | 15 ++++++++++----
internal/db/connect.go | 36 +++++++++++++++++----------------
internal/llm/agent/mcp-tools.go | 9 +++++++--
3 files changed, 37 insertions(+), 23 deletions(-)
diff --git a/internal/app/app.go b/internal/app/app.go
index 961ce5960e7d64e38c5d6548e881ed697f6283f9..cec42dc8610b4a6c72215766b7fd1c764381ef5a 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -50,7 +50,7 @@ type App struct {
// global context and cleanup functions
globalCtx context.Context
- cleanupFuncs []func()
+ cleanupFuncs []func() error
}
// New initializes a new applcation instance.
@@ -88,6 +88,9 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
// Initialize LSP clients in the background.
app.initLSPClients(ctx)
+ // cleanup database upon app shutdown
+ app.cleanupFuncs = append(app.cleanupFuncs, conn.Close)
+
// TODO: remove the concept of agent config, most likely.
if cfg.IsConfigured() {
if err := app.InitCoderAgent(); err != nil {
@@ -221,9 +224,10 @@ func (app *App) setupEvents() {
setupSubscriber(ctx, app.serviceEventsWG, "history", app.History.Subscribe, app.events)
setupSubscriber(ctx, app.serviceEventsWG, "mcp", agent.SubscribeMCPEvents, app.events)
setupSubscriber(ctx, app.serviceEventsWG, "lsp", SubscribeLSPEvents, app.events)
- cleanupFunc := func() {
+ cleanupFunc := func() error {
cancel()
app.serviceEventsWG.Wait()
+ return nil
}
app.cleanupFuncs = append(app.cleanupFuncs, cleanupFunc)
}
@@ -297,10 +301,11 @@ func (app *App) Subscribe(program *tea.Program) {
app.tuiWG.Add(1)
tuiCtx, tuiCancel := context.WithCancel(app.globalCtx)
- app.cleanupFuncs = append(app.cleanupFuncs, func() {
+ app.cleanupFuncs = append(app.cleanupFuncs, func() error {
slog.Debug("Cancelling TUI message handler")
tuiCancel()
app.tuiWG.Wait()
+ return nil
})
defer app.tuiWG.Done()
@@ -350,7 +355,9 @@ func (app *App) Shutdown() {
// Call call cleanup functions.
for _, cleanup := range app.cleanupFuncs {
if cleanup != nil {
- cleanup()
+ if err := cleanup(); err != nil {
+ slog.Error("Failed to cleanup app properly on shutdown", "error", err)
+ }
}
}
}
diff --git a/internal/db/connect.go b/internal/db/connect.go
index 110c3e0f8805b218c0c71dc11ee284edc0a23fa0..bfe768c7ae9a399afd61a9d0692841fbacbe164c 100644
--- a/internal/db/connect.go
+++ b/internal/db/connect.go
@@ -7,7 +7,8 @@ import (
"log/slog"
"path/filepath"
- _ "github.com/ncruces/go-sqlite3/driver"
+ "github.com/ncruces/go-sqlite3"
+ "github.com/ncruces/go-sqlite3/driver"
_ "github.com/ncruces/go-sqlite3/embed"
"github.com/pressly/goose/v3"
@@ -18,17 +19,6 @@ func Connect(ctx context.Context, dataDir string) (*sql.DB, error) {
return nil, fmt.Errorf("data.dir is not set")
}
dbPath := filepath.Join(dataDir, "crush.db")
- // Open the SQLite database
- db, err := sql.Open("sqlite3", dbPath)
- if err != nil {
- return nil, fmt.Errorf("failed to open database: %w", err)
- }
-
- // Verify connection
- if err = db.PingContext(ctx); err != nil {
- db.Close()
- return nil, fmt.Errorf("failed to connect to database: %w", err)
- }
// Set pragmas for better performance
pragmas := []string{
@@ -37,14 +27,25 @@ func Connect(ctx context.Context, dataDir string) (*sql.DB, error) {
"PRAGMA page_size = 4096;",
"PRAGMA cache_size = -8000;",
"PRAGMA synchronous = NORMAL;",
+ "PRAGMA secure_delete = ON;",
}
- for _, pragma := range pragmas {
- if _, err = db.ExecContext(ctx, pragma); err != nil {
- slog.Error("Failed to set pragma", pragma, err)
- } else {
- slog.Debug("Set pragma", "pragma", pragma)
+ db, err := driver.Open(dbPath, func(c *sqlite3.Conn) error {
+ for _, pragma := range pragmas {
+ if err := c.Exec(pragma); err != nil {
+ return fmt.Errorf("failed to set pragma `%s`: %w", pragma, err)
+ }
}
+ return nil
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to open database: %w", err)
+ }
+
+ // Verify connection
+ if err = db.PingContext(ctx); err != nil {
+ db.Close()
+ return nil, fmt.Errorf("failed to connect to database: %w", err)
}
goose.SetBaseFS(FS)
@@ -58,5 +59,6 @@ func Connect(ctx context.Context, dataDir string) (*sql.DB, error) {
slog.Error("Failed to apply migrations", "error", err)
return nil, fmt.Errorf("failed to apply migrations: %w", err)
}
+
return db, nil
}
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 0f6d2d0ab31ec34df16c9837335425e1f3b195bb..bb50231da028e714c783f50cc7ebd8a1f4b595db 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -4,6 +4,7 @@ import (
"cmp"
"context"
"encoding/json"
+ "errors"
"fmt"
"log/slog"
"maps"
@@ -253,11 +254,15 @@ func updateMCPState(name string, state MCPState, err error, client *client.Clien
}
// CloseMCPClients closes all MCP clients. This should be called during application shutdown.
-func CloseMCPClients() {
+func CloseMCPClients() error {
+ var errs []error
for c := range mcpClients.Seq() {
- _ = c.Close()
+ if err := c.Close(); err != nil {
+ errs = append(errs, err)
+ }
}
mcpBroker.Shutdown()
+ return errors.Join(errs...)
}
var mcpInitRequest = mcp.InitializeRequest{
From 442867dc2a9fe440804d50c2af5e961e5acc23cc Mon Sep 17 00:00:00 2001
From: Ayman Bagabas
Date: Tue, 9 Sep 2025 17:14:33 -0400
Subject: [PATCH 031/236] chore: bump bubbletea/ultraviolet to enable bracketed
paste on windows (#1003)
This trades enhanced keyboard with bracketed paste on Windows. This is due to some limitations in the Windows Console API and VT input mode and bracketed paste.
---
go.mod | 8 ++++----
go.sum | 16 ++++++++--------
2 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/go.mod b/go.mod
index e60c2fa5eb50811c258ed2e833c73083c6371465..e639f2aac4c1cf535b05000cdd2c62ce733a16ee 100644
--- a/go.mod
+++ b/go.mod
@@ -13,7 +13,7 @@ require (
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/charlievieth/fastwalk v1.0.12
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
- github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250813213544-5cc219db8892
+ github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250909203628-59b04aaa4288
github.com/charmbracelet/catwalk v0.5.3
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
@@ -74,7 +74,7 @@ require (
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/charmbracelet/colorprofile v0.3.2 // indirect
- github.com/charmbracelet/ultraviolet v0.0.0-20250813213450-50737e162af5
+ github.com/charmbracelet/ultraviolet v0.0.0-20250909162800-526253ea8b6f
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
github.com/charmbracelet/x/term v0.2.1
@@ -140,8 +140,8 @@ require (
golang.org/x/image v0.26.0 // indirect
golang.org/x/net v0.42.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
- golang.org/x/sync v0.16.0 // indirect
- golang.org/x/sys v0.35.0
+ golang.org/x/sync v0.17.0 // indirect
+ golang.org/x/sys v0.36.0
golang.org/x/term v0.34.0 // indirect
golang.org/x/text v0.28.0
golang.org/x/time v0.8.0 // indirect
diff --git a/go.sum b/go.sum
index 668cc533e5d8b33c7a21de01f0608cc075b18307..6f26c4f2c2a2365576f01ba375605c17f0b0b169 100644
--- a/go.sum
+++ b/go.sum
@@ -78,8 +78,8 @@ github.com/charlievieth/fastwalk v1.0.12 h1:pwfxe1LajixViQqo7EFLXU2+mQxb6OaO0CeN
github.com/charlievieth/fastwalk v1.0.12/go.mod h1:yGy1zbxog41ZVMcKA/i8ojXLFsuayX5VvwhQVoj9PBI=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250813213544-5cc219db8892 h1:lqoYD2DrKhSdC9xCr59JMXtbbdR5/AZ6xfd/G8eOQJM=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250813213544-5cc219db8892/go.mod h1:TUpoECaG4/3CwFx5lTlXNpR87Yo7gOwGqucnHGfAm20=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250909203628-59b04aaa4288 h1:zm9Z+XvpSTBBd0lOJNSgUxVZC6iG7m18LW9/WcmZIyM=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250909203628-59b04aaa4288/go.mod h1:0EdaOyHQmphcSV/SOxv4PjWD/Qg2XSe6l7dRKsCwxlM=
github.com/charmbracelet/catwalk v0.5.3 h1:Hw9DlX8u79K9iLQJB4Bti9/rTzMvEpBjE/GyniWxHNY=
github.com/charmbracelet/catwalk v0.5.3/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
@@ -92,8 +92,8 @@ github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mSIGnGuSdKl9qDSyfbYK50z2wc2gGMggegE=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
-github.com/charmbracelet/ultraviolet v0.0.0-20250813213450-50737e162af5 h1:7FlxuSTw5paY5Km8AK1WwfSVjAIOW4UiZI6Okva83pY=
-github.com/charmbracelet/ultraviolet v0.0.0-20250813213450-50737e162af5/go.mod h1:uQXXTlOPWiN05pLfSdajBj5FaaszPUrrr9qRFmmQ79M=
+github.com/charmbracelet/ultraviolet v0.0.0-20250909162800-526253ea8b6f h1:YQjUX3ku52kf3hkWYXHbvgnZtfMarE231UzOCT4nIx0=
+github.com/charmbracelet/ultraviolet v0.0.0-20250909162800-526253ea8b6f/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
@@ -359,8 +359,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
-golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
-golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
+golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
+golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -376,8 +376,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
-golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
+golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
+golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
From df2e64ead1cc499c222ac57ed9bab0424dfac62f Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 10 Sep 2025 08:03:38 -0300
Subject: [PATCH 032/236] refactor(fsext): improve hierarchical ignore handling
and consolidate file exclusion logic (#999)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* refactor(fsext): improve hierarchical ignore handling and consolidate file exclusion logic
- Refactor FastGlobWalker to use directoryLister for consistent ignore handling
- Add ShouldExcludeFile function for unified file exclusion checking
- Add WalkDirectories function for directory traversal with ignore support
- Improve directory pattern matching by checking both with and without trailing slash
- Add comprehensive tests for hierarchical ignore behavior and common patterns
- Remove direct dependency on go-gitignore in favor of existing directoryLister implementation
💖 Generated with Crush
Co-Authored-By: Crush
* fix: improvements
Signed-off-by: Carlos Alexandro Becker
* chore: t.Context()
Signed-off-by: Carlos Alexandro Becker
* fix: tests
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
Co-authored-by: Crush
---
internal/fsext/fileutil.go | 93 +++++++++++++--------------
internal/fsext/ignore_test.go | 96 ++++++++++++++++++++++++++++
internal/fsext/ls.go | 17 ++++-
internal/llm/provider/openai_test.go | 2 +-
internal/llm/tools/grep.go | 26 ++++++--
internal/llm/tools/grep_test.go | 61 ++++++++++--------
internal/shell/command_block_test.go | 3 +-
internal/shell/shell_test.go | 4 +-
8 files changed, 213 insertions(+), 89 deletions(-)
diff --git a/internal/fsext/fileutil.go b/internal/fsext/fileutil.go
index ee5fff66fb66e152319ea40c6abab4950a276a2f..e83cfc915219320f34cd4f813ac253be6b2c5053 100644
--- a/internal/fsext/fileutil.go
+++ b/internal/fsext/fileutil.go
@@ -11,8 +11,6 @@ import (
"github.com/bmatcuk/doublestar/v4"
"github.com/charlievieth/fastwalk"
"github.com/charmbracelet/crush/internal/home"
-
- ignore "github.com/sabhiram/go-gitignore"
)
type FileInfo struct {
@@ -58,60 +56,22 @@ func SkipHidden(path string) bool {
}
// FastGlobWalker provides gitignore-aware file walking with fastwalk
+// It uses hierarchical ignore checking like git does, checking .gitignore/.crushignore
+// files in each directory from the root to the target path.
type FastGlobWalker struct {
- gitignore *ignore.GitIgnore
- crushignore *ignore.GitIgnore
- rootPath string
+ directoryLister *directoryLister
}
func NewFastGlobWalker(searchPath string) *FastGlobWalker {
- walker := &FastGlobWalker{
- rootPath: searchPath,
- }
-
- // Load gitignore if it exists
- gitignorePath := filepath.Join(searchPath, ".gitignore")
- if _, err := os.Stat(gitignorePath); err == nil {
- if gi, err := ignore.CompileIgnoreFile(gitignorePath); err == nil {
- walker.gitignore = gi
- }
+ return &FastGlobWalker{
+ directoryLister: NewDirectoryLister(searchPath),
}
-
- // Load crushignore if it exists
- crushignorePath := filepath.Join(searchPath, ".crushignore")
- if _, err := os.Stat(crushignorePath); err == nil {
- if ci, err := ignore.CompileIgnoreFile(crushignorePath); err == nil {
- walker.crushignore = ci
- }
- }
-
- return walker
}
-// ShouldSkip checks if a path should be skipped based on gitignore, crushignore, and hidden file rules
+// ShouldSkip checks if a path should be skipped based on hierarchical gitignore,
+// crushignore, and hidden file rules
func (w *FastGlobWalker) ShouldSkip(path string) bool {
- if SkipHidden(path) {
- return true
- }
-
- relPath, err := filepath.Rel(w.rootPath, path)
- if err != nil {
- return false
- }
-
- if w.gitignore != nil {
- if w.gitignore.MatchesPath(relPath) {
- return true
- }
- }
-
- if w.crushignore != nil {
- if w.crushignore.MatchesPath(relPath) {
- return true
- }
- }
-
- return false
+ return w.directoryLister.shouldIgnore(path, nil)
}
func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool, error) {
@@ -182,6 +142,43 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
return results, truncated, nil
}
+// ShouldExcludeFile checks if a file should be excluded from processing
+// based on common patterns and ignore rules
+func ShouldExcludeFile(rootPath, filePath string) bool {
+ return NewDirectoryLister(rootPath).
+ shouldIgnore(filePath, nil)
+}
+
+// WalkDirectories walks a directory tree and calls the provided function for each directory,
+// respecting hierarchical .gitignore/.crushignore files like git does.
+func WalkDirectories(rootPath string, fn func(path string, d os.DirEntry, err error) error) error {
+ dl := NewDirectoryLister(rootPath)
+
+ conf := fastwalk.Config{
+ Follow: true,
+ ToSlash: fastwalk.DefaultToSlash(),
+ Sort: fastwalk.SortDirsFirst,
+ }
+
+ return fastwalk.Walk(&conf, rootPath, func(path string, d os.DirEntry, err error) error {
+ if err != nil {
+ return fn(path, d, err)
+ }
+
+ // Only process directories
+ if !d.IsDir() {
+ return nil
+ }
+
+ // Check if directory should be ignored
+ if dl.shouldIgnore(path, nil) {
+ return filepath.SkipDir
+ }
+
+ return fn(path, d, err)
+ })
+}
+
func PrettyPath(path string) string {
return home.Short(path)
}
diff --git a/internal/fsext/ignore_test.go b/internal/fsext/ignore_test.go
index c2490a062d2e55fc96dda78c597fc867465f032e..1b517ec0408fe69726bf4fa4bbb95c2a206e548c 100644
--- a/internal/fsext/ignore_test.go
+++ b/internal/fsext/ignore_test.go
@@ -2,6 +2,7 @@ package fsext
import (
"os"
+ "path/filepath"
"testing"
"github.com/stretchr/testify/require"
@@ -30,3 +31,98 @@ func TestCrushIgnore(t *testing.T) {
require.False(t, dl.shouldIgnore("test1.txt", nil), ".txt files should not be ignored")
require.True(t, dl.shouldIgnore("test3.tmp", nil), ".tmp files should be ignored by common patterns")
}
+
+func TestShouldExcludeFile(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory structure for testing
+ tempDir := t.TempDir()
+
+ // Create directories that should be ignored
+ nodeModules := filepath.Join(tempDir, "node_modules")
+ target := filepath.Join(tempDir, "target")
+ customIgnored := filepath.Join(tempDir, "custom_ignored")
+ normalDir := filepath.Join(tempDir, "src")
+
+ for _, dir := range []string{nodeModules, target, customIgnored, normalDir} {
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ t.Fatalf("Failed to create directory %s: %v", dir, err)
+ }
+ }
+
+ // Create .gitignore file
+ gitignoreContent := "node_modules/\ntarget/\n"
+ if err := os.WriteFile(filepath.Join(tempDir, ".gitignore"), []byte(gitignoreContent), 0o644); err != nil {
+ t.Fatalf("Failed to create .gitignore: %v", err)
+ }
+
+ // Create .crushignore file
+ crushignoreContent := "custom_ignored/\n"
+ if err := os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644); err != nil {
+ t.Fatalf("Failed to create .crushignore: %v", err)
+ }
+
+ // Test that ignored directories are properly ignored
+ require.True(t, ShouldExcludeFile(tempDir, nodeModules), "Expected node_modules to be ignored by .gitignore")
+ require.True(t, ShouldExcludeFile(tempDir, target), "Expected target to be ignored by .gitignore")
+ require.True(t, ShouldExcludeFile(tempDir, customIgnored), "Expected custom_ignored to be ignored by .crushignore")
+
+ // Test that normal directories are not ignored
+ require.False(t, ShouldExcludeFile(tempDir, normalDir), "Expected src directory to not be ignored")
+
+ // Test that the workspace root itself is not ignored
+ require.False(t, ShouldExcludeFile(tempDir, tempDir), "Expected workspace root to not be ignored")
+}
+
+func TestShouldExcludeFileHierarchical(t *testing.T) {
+ t.Parallel()
+
+ // Create a nested directory structure for testing hierarchical ignore
+ tempDir := t.TempDir()
+
+ // Create nested directories
+ subDir := filepath.Join(tempDir, "subdir")
+ nestedNormal := filepath.Join(subDir, "normal_nested")
+
+ for _, dir := range []string{subDir, nestedNormal} {
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ t.Fatalf("Failed to create directory %s: %v", dir, err)
+ }
+ }
+
+ // Create .crushignore in subdir that ignores normal_nested
+ subCrushignore := "normal_nested/\n"
+ if err := os.WriteFile(filepath.Join(subDir, ".crushignore"), []byte(subCrushignore), 0o644); err != nil {
+ t.Fatalf("Failed to create subdir .crushignore: %v", err)
+ }
+
+ // Test hierarchical ignore behavior - this should work because the .crushignore is in the parent directory
+ require.True(t, ShouldExcludeFile(tempDir, nestedNormal), "Expected normal_nested to be ignored by subdir .crushignore")
+ require.False(t, ShouldExcludeFile(tempDir, subDir), "Expected subdir itself to not be ignored")
+}
+
+func TestShouldExcludeFileCommonPatterns(t *testing.T) {
+ t.Parallel()
+
+ tempDir := t.TempDir()
+
+ // Create directories that should be ignored by common patterns
+ commonIgnored := []string{
+ filepath.Join(tempDir, ".git"),
+ filepath.Join(tempDir, "node_modules"),
+ filepath.Join(tempDir, "__pycache__"),
+ filepath.Join(tempDir, "target"),
+ filepath.Join(tempDir, ".vscode"),
+ }
+
+ for _, dir := range commonIgnored {
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ t.Fatalf("Failed to create directory %s: %v", dir, err)
+ }
+ }
+
+ // Test that common patterns are ignored even without explicit ignore files
+ for _, dir := range commonIgnored {
+ require.True(t, ShouldExcludeFile(tempDir, dir), "Expected %s to be ignored by common patterns", filepath.Base(dir))
+ }
+}
diff --git a/internal/fsext/ls.go b/internal/fsext/ls.go
index 884c5b150e64cce3da3d1e3f2e08355a53361272..2c46416f28a2777ddc9092883686c8a3461a9f7d 100644
--- a/internal/fsext/ls.go
+++ b/internal/fsext/ls.go
@@ -141,8 +141,16 @@ func (dl *directoryLister) shouldIgnore(path string, ignorePatterns []string) bo
return true
}
- if dl.getIgnore(filepath.Dir(path)).MatchesPath(relPath) {
- slog.Debug("ignoring dir pattern", "path", relPath, "dir", filepath.Dir(path))
+ parentDir := filepath.Dir(path)
+ ignoreParser := dl.getIgnore(parentDir)
+ if ignoreParser.MatchesPath(relPath) {
+ slog.Debug("ignoring dir pattern", "path", relPath, "dir", parentDir)
+ return true
+ }
+
+ // For directories, also check with trailing slash (gitignore convention)
+ if ignoreParser.MatchesPath(relPath + "/") {
+ slog.Debug("ignoring dir pattern with slash", "path", relPath+"/", "dir", parentDir)
return true
}
@@ -160,11 +168,14 @@ func (dl *directoryLister) shouldIgnore(path string, ignorePatterns []string) bo
func (dl *directoryLister) checkParentIgnores(path string) bool {
parent := filepath.Dir(filepath.Dir(path))
- for parent != dl.rootPath && parent != "." && path != "." {
+ for parent != "." && path != "." {
if dl.getIgnore(parent).MatchesPath(path) {
slog.Debug("ingoring parent dir pattern", "path", path, "dir", parent)
return true
}
+ if parent == dl.rootPath {
+ break
+ }
parent = filepath.Dir(parent)
}
return false
diff --git a/internal/llm/provider/openai_test.go b/internal/llm/provider/openai_test.go
index db2edbb7e9829af0c07ada532ee1d0cefb51463b..8088ba22b4cd49b26130cd3812e8705e8dfe1cba 100644
--- a/internal/llm/provider/openai_test.go
+++ b/internal/llm/provider/openai_test.go
@@ -75,7 +75,7 @@ func TestOpenAIClientStreamChoices(t *testing.T) {
},
}
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second)
defer cancel()
eventsChan := client.stream(ctx, messages, nil)
diff --git a/internal/llm/tools/grep.go b/internal/llm/tools/grep.go
index 4d0fbd75e1000e446523eae36c756da530b309ea..1160fc287088f960d15fa1bf847eb13f77e84b92 100644
--- a/internal/llm/tools/grep.go
+++ b/internal/llm/tools/grep.go
@@ -279,11 +279,13 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
return nil, fmt.Errorf("ripgrep not found in $PATH")
}
- cmd.Args = append(
- cmd.Args,
- "--ignore-file", filepath.Join(path, ".gitignore"),
- "--ignore-file", filepath.Join(path, ".crushignore"),
- )
+ // Only add ignore files if they exist
+ for _, ignoreFile := range []string{".gitignore", ".crushignore"} {
+ ignorePath := filepath.Join(path, ignoreFile)
+ if _, err := os.Stat(ignorePath); err == nil {
+ cmd.Args = append(cmd.Args, "--ignore-file", ignorePath)
+ }
+ }
output, err := cmd.Output()
if err != nil {
@@ -357,14 +359,24 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
}
if info.IsDir() {
- return nil // Skip directories
+ // Check if directory should be skipped
+ if walker.ShouldSkip(path) {
+ return filepath.SkipDir
+ }
+ return nil // Continue into directory
}
- // Use walker's shouldSkip method instead of just SkipHidden
+ // Use walker's shouldSkip method for files
if walker.ShouldSkip(path) {
return nil
}
+ // Skip hidden files (starting with a dot) to match ripgrep's default behavior
+ base := filepath.Base(path)
+ if base != "." && strings.HasPrefix(base, ".") {
+ return nil
+ }
+
if includePattern != nil && !includePattern.MatchString(path) {
return nil
}
diff --git a/internal/llm/tools/grep_test.go b/internal/llm/tools/grep_test.go
index cb16a61020cb4102e147da91b6627d9e7cdddec5..53c96b22df444adfba59c6b13995a104411a57be 100644
--- a/internal/llm/tools/grep_test.go
+++ b/internal/llm/tools/grep_test.go
@@ -1,8 +1,6 @@
package tools
import (
- "context"
- "encoding/json"
"os"
"path/filepath"
"regexp"
@@ -59,6 +57,7 @@ func TestGlobToRegexCaching(t *testing.T) {
}
func TestGrepWithIgnoreFiles(t *testing.T) {
+ t.Parallel()
tempDir := t.TempDir()
// Create test files
@@ -84,32 +83,42 @@ func TestGrepWithIgnoreFiles(t *testing.T) {
crushignoreContent := "node_modules/\n"
require.NoError(t, os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644))
- // Create grep tool
- grepTool := NewGrepTool(tempDir)
+ // Test both implementations
+ for name, fn := range map[string]func(pattern, path, include string) ([]grepMatch, error){
+ "regex": searchFilesWithRegex,
+ "rg": func(pattern, path, include string) ([]grepMatch, error) {
+ return searchWithRipgrep(t.Context(), pattern, path, include)
+ },
+ } {
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ if name == "rg" && getRg() == "" {
+ t.Skip("rg is not in $PATH")
+ }
+
+ matches, err := fn("hello world", tempDir, "")
+ require.NoError(t, err)
- // Create grep parameters
- params := GrepParams{
- Pattern: "hello world",
- Path: tempDir,
+ // Convert matches to a set of file paths for easier testing
+ foundFiles := make(map[string]bool)
+ for _, match := range matches {
+ foundFiles[filepath.Base(match.path)] = true
+ }
+
+ // Should find file1.txt and file2.txt
+ require.True(t, foundFiles["file1.txt"], "Should find file1.txt")
+ require.True(t, foundFiles["file2.txt"], "Should find file2.txt")
+
+ // Should NOT find ignored files
+ require.False(t, foundFiles["file3.txt"], "Should not find file3.txt (ignored by .gitignore)")
+ require.False(t, foundFiles["lib.js"], "Should not find lib.js (ignored by .crushignore)")
+ require.False(t, foundFiles["secret.key"], "Should not find secret.key (ignored by .gitignore)")
+
+ // Should find exactly 2 matches
+ require.Equal(t, 2, len(matches), "Should find exactly 2 matches")
+ })
}
- paramsJSON, err := json.Marshal(params)
- require.NoError(t, err)
-
- // Run grep
- call := ToolCall{Input: string(paramsJSON)}
- response, err := grepTool.Run(context.Background(), call)
- require.NoError(t, err)
-
- // Check results - should only find file1.txt and file2.txt
- // ignored/file3.txt should be ignored by .gitignore
- // node_modules/lib.js should be ignored by .crushignore
- // secret.key should be ignored by .gitignore
- result := response.Content
- require.Contains(t, result, "file1.txt")
- require.Contains(t, result, "file2.txt")
- require.NotContains(t, result, "file3.txt")
- require.NotContains(t, result, "lib.js")
- require.NotContains(t, result, "secret.key")
}
func TestSearchImplementations(t *testing.T) {
diff --git a/internal/shell/command_block_test.go b/internal/shell/command_block_test.go
index 22e91b14189d00bd87a1ea14767b64b4e102ae88..7cfdf6afe5f8065afb90d866dfed41ba51b95cb5 100644
--- a/internal/shell/command_block_test.go
+++ b/internal/shell/command_block_test.go
@@ -1,7 +1,6 @@
package shell
import (
- "context"
"strings"
"testing"
@@ -92,7 +91,7 @@ func TestCommandBlocking(t *testing.T) {
BlockFuncs: tt.blockFuncs,
})
- _, _, err := shell.Exec(context.Background(), tt.command)
+ _, _, err := shell.Exec(t.Context(), tt.command)
if tt.shouldBlock {
if err == nil {
diff --git a/internal/shell/shell_test.go b/internal/shell/shell_test.go
index 66586b7f41c92486f7a8977d8ab34909de187c28..ae53fb1c3ecc2c1fe3760566b122337eb0a2782f 100644
--- a/internal/shell/shell_test.go
+++ b/internal/shell/shell_test.go
@@ -16,7 +16,7 @@ func BenchmarkShellQuickCommands(b *testing.B) {
b.ReportAllocs()
for b.Loop() {
- _, _, err := shell.Exec(context.Background(), "echo test")
+ _, _, err := shell.Exec(b.Context(), "echo test")
exitCode := ExitCode(err)
if err != nil || exitCode != 0 {
b.Fatalf("Command failed: %v, exit code: %d", err, exitCode)
@@ -100,7 +100,7 @@ func TestRunContinuity(t *testing.T) {
func TestCrossPlatformExecution(t *testing.T) {
shell := NewShell(&Options{WorkingDir: "."})
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second)
defer cancel()
// Test a simple command that should work on all platforms
From fc9cdc415ccfd687a13dc8ded9b71ec73170c108 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Wed, 10 Sep 2025 11:55:34 -0300
Subject: [PATCH 033/236] chore(legal): @SubodhSenpai has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 44c09c1353894e405fb50a183223e40594ecfa32..e500dc1399f4c77abb8fa295cee1a1039126794f 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -575,6 +575,14 @@
"created_at": "2025-09-09T10:30:49Z",
"repoId": 987670088,
"pullRequestNo": 1000
+ },
+ {
+ "name": "SubodhSenpai",
+ "id": 116248387,
+ "comment_id": 3275351636,
+ "created_at": "2025-09-10T14:55:25Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1008
}
]
}
\ No newline at end of file
From 869511335b317f80f9fc64df76596dd08fe049b8 Mon Sep 17 00:00:00 2001
From: Ayman Bagabas
Date: Wed, 10 Sep 2025 11:59:15 -0400
Subject: [PATCH 034/236] fix: esc key not being recognized
---
go.mod | 4 ++--
go.sum | 8 ++++----
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/go.mod b/go.mod
index e639f2aac4c1cf535b05000cdd2c62ce733a16ee..6998ed6302a752faaa92d136e54703beb7e6c1b4 100644
--- a/go.mod
+++ b/go.mod
@@ -13,7 +13,7 @@ require (
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/charlievieth/fastwalk v1.0.12
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
- github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250909203628-59b04aaa4288
+ github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e
github.com/charmbracelet/catwalk v0.5.3
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
@@ -74,7 +74,7 @@ require (
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/charmbracelet/colorprofile v0.3.2 // indirect
- github.com/charmbracelet/ultraviolet v0.0.0-20250909162800-526253ea8b6f
+ github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
github.com/charmbracelet/x/term v0.2.1
diff --git a/go.sum b/go.sum
index 6f26c4f2c2a2365576f01ba375605c17f0b0b169..5f04e5fb6161c875a12fdf689cae3dd5ee348b67 100644
--- a/go.sum
+++ b/go.sum
@@ -78,8 +78,8 @@ github.com/charlievieth/fastwalk v1.0.12 h1:pwfxe1LajixViQqo7EFLXU2+mQxb6OaO0CeN
github.com/charlievieth/fastwalk v1.0.12/go.mod h1:yGy1zbxog41ZVMcKA/i8ojXLFsuayX5VvwhQVoj9PBI=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250909203628-59b04aaa4288 h1:zm9Z+XvpSTBBd0lOJNSgUxVZC6iG7m18LW9/WcmZIyM=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250909203628-59b04aaa4288/go.mod h1:0EdaOyHQmphcSV/SOxv4PjWD/Qg2XSe6l7dRKsCwxlM=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e h1:4BBnKWFwJ5FLyhw/ijFxKE04i9rubr8WIPR1kjO57iA=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e/go.mod h1:F7AfLKYQqpM3NNBVs7ctW417tavhvoh9SBjsgtwpzbY=
github.com/charmbracelet/catwalk v0.5.3 h1:Hw9DlX8u79K9iLQJB4Bti9/rTzMvEpBjE/GyniWxHNY=
github.com/charmbracelet/catwalk v0.5.3/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
@@ -92,8 +92,8 @@ github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mSIGnGuSdKl9qDSyfbYK50z2wc2gGMggegE=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
-github.com/charmbracelet/ultraviolet v0.0.0-20250909162800-526253ea8b6f h1:YQjUX3ku52kf3hkWYXHbvgnZtfMarE231UzOCT4nIx0=
-github.com/charmbracelet/ultraviolet v0.0.0-20250909162800-526253ea8b6f/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
+github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299 h1:vpIIy7W1Bv84GUhi3Z5oRyZZRAtdTd9kI3+TnLZrnZE=
+github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
From 46a3a3771def1f842c7beae16d5b014def868cf1 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 10 Sep 2025 14:27:18 -0300
Subject: [PATCH 035/236] feat: optimize LSP file watcher and ignore files
(#959)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* refactor: centralize file watching with single fsnotify.Watcher
Refactored the LSP watcher system to use a single shared fsnotify.Watcher
instance instead of one per LSP client, eliminating all file watching
duplication and significantly improving resource efficiency.
Key changes:
- Added GlobalWatcher singleton managing single fsnotify.Watcher
- Centralized all file system event processing and distribution
- Eliminated duplicate directory and file watching across clients
- Implemented global debouncing with per-client event filtering
- Maintained full backward compatibility with existing LSP integration
Benefits:
- Single watcher instance regardless of LSP client count
- Each directory/file watched exactly once
- Centralized event processing eliminates duplicate operations
- Significant reduction in file descriptors and memory usage
- Linear resource growth with unique files, not client count
💖 Generated with Crush
Co-Authored-By: Crush
* docs: clarify directory-only watching strategy
Enhanced documentation and comments to clearly explain that the file
watcher implementation only watches directories, not individual files.
This approach is more efficient as fsnotify automatically provides
events for all files within watched directories.
Key clarifications:
- Added comprehensive documentation explaining directory-only approach
- Clarified that fsnotify automatically covers files in watched directories
- Enhanced comments explaining new directory detection and handling
- Added test to verify only directories are watched, never individual files
- Improved code organization and readability
Benefits of directory-only watching:
- Significantly fewer file descriptors used
- Automatic coverage of new files created in watched directories
- Better performance with large codebases
- Simplified deduplication logic
💖 Generated with Crush
Co-Authored-By: Crush
* refactor: remove unnecessary directory tracking, rely on fsnotify deduplication
Simplified the GlobalWatcher by removing manual directory tracking since
fsnotify handles deduplication internally. According to fsnotify docs:
"A path can only be watched once; watching it more than once is a no-op"
Key improvements:
- Removed watchedDirs map and associated mutex (no longer needed)
- Simplified addDirectoryToWatcher method to directly call fsnotify
- Updated tests to verify fsnotify deduplication behavior
- Reduced memory usage and code complexity
- Maintained all functionality while relying on fsnotify's built-in deduplication
Benefits:
- Less memory usage (no directory tracking map)
- Simpler code with fewer mutexes and less complexity
- Relies on well-tested fsnotify deduplication instead of custom logic
- Better performance due to reduced synchronization overhead
💖 Generated with Crush
Co-Authored-By: Crush
* refactor: remove workspace path tracking, embrace full idempotency
Removed unnecessary workspace path tracking since directory walking
and fsnotify.Add() calls are idempotent. Multiple WatchWorkspace calls
with the same path are now safe and simple.
Key improvements:
- Removed workspacePaths map and workspacesMu mutex
- Simplified WatchWorkspace to be fully idempotent
- Reduced GlobalWatcher struct size and complexity
- Updated tests to verify idempotent behavior instead of deduplication
- Embraced "simple and idempotent" over "complex and optimized"
Benefits:
- Even less memory usage (no workspace tracking)
- Simpler code with fewer mutexes (down to 2 from original 4)
- Fully idempotent operations - safe to call multiple times
- Better maintainability with less state to manage
- Relies entirely on fsnotify's proven deduplication
Philosophy: Let fsnotify handle what it's designed to handle, keep our
code simple and idempotent rather than trying to micro-optimize.
💖 Generated with Crush
Co-Authored-By: Crush
* refactor: remove redundant file type validation in file opening
Removed duplicate file extension checking since HandlesFile() already
validates that the LSP client handles the file type. This eliminates
redundant hardcoded extension checks and potential inconsistencies.
Key improvements:
- Removed shouldOpen extension validation logic
- Simplified file opening to trust HandlesFile() validation
- Eliminated hardcoded extension lists that could become stale
- Reduced code duplication between global_watcher.go and watcher.go
- More consistent behavior across different file opening paths
Benefits:
- Single source of truth for file type handling (LSP client config)
- Less code to maintain and keep in sync
- More flexible - supports any file types configured for LSP clients
- Eliminates potential bugs from hardcoded extension mismatches
- Cleaner, more maintainable code
The file type validation now happens exactly once at the right place:
when checking if a client HandlesFile(), not again during file opening.
💖 Generated with Crush
Co-Authored-By: Crush
* feat: add hierarchical .gitignore/.crushignore support to LSP file watcher
Implements proper hierarchical ignore file support that checks for .gitignore
and .crushignore files in each directory from the target path up to the
workspace root, following Git's ignore semantics.
Key improvements:
- Hierarchical ignore checking: walks directory tree from workspace root to target
- Supports both .gitignore and .crushignore patterns
- Handles trailing slash patterns correctly (e.g., "node_modules/" matches directories)
- Uses go-gitignore library for proper pattern matching
- Maintains workspace root tracking for multi-workspace support
- Comprehensive test coverage for ignore functionality
This ensures the LSP file watcher respects ignore patterns at all directory
levels, not just the workspace root, providing consistent behavior with Git
and other tools that support hierarchical ignore files.
💖 Generated with Crush
Co-Authored-By: Crush
* chore: small improvement
Signed-off-by: Carlos Alexandro Becker
* refactor: simplify global watcher to single workspace
Remove multi-workspace concept that was never used in practice.
All LSP clients watch the same single workspace directory, so the
complexity of tracking multiple workspace roots was unnecessary.
Changes:
- Replace workspaceRoots map with single workspaceRoot string
- Remove unnecessary mutex protection (workspace set once at startup)
- Simplify shouldIgnoreDirectory logic
- Update tests to match simplified structure
💖 Generated with Crush
Co-Authored-By: Crush
* refactor: major simplification of file watcher logic
Remove unnecessary complexity and consolidate duplicate code:
- Remove unnecessary watcherMu mutex (watcher set once at init)
- Consolidate duplicate file opening logic between GlobalWatcher and WorkspaceWatcher
- Simplify AddRegistrations by removing complex workspace scanning
- Replace custom glob matching with proven doublestar library
- Remove unused shouldExcludeDir function
- Streamline file preloading to only handle high-priority files
Benefits:
- ~200 lines of code removed
- Better reliability using doublestar for pattern matching
- Improved performance with event-driven approach vs bulk scanning
- Single source of truth for file operations
- Reduced memory usage and fewer goroutines
💖 Generated with Crush
Co-Authored-By: Crush
* refactor: more cleanup
Signed-off-by: Carlos Alexandro Becker
* refactor: use csync
Signed-off-by: Carlos Alexandro Becker
* refactor: renaming some methods/structs
Signed-off-by: Carlos Alexandro Becker
* refactor: simplify
Signed-off-by: Carlos Alexandro Becker
* fix: errs/logs
Signed-off-by: Carlos Alexandro Becker
* refactor: simplify LSP watcher architecture and improve organization
- Rename WorkspaceWatcher to Client for clarity
- Add Start() function for one-time global watcher setup
- Extract ignore file logic to separate ignore.go module
- Add thread-safe csync.String type with comprehensive tests
- Simplify startup flow by initializing watcher once in app.go
- Improve naming consistency (getGlobalWatcher → instance, etc.)
💖 Generated with Crush
Co-Authored-By: Crush
* chore: remove unused csync strings utilities
💖 Generated with Crush
Co-Authored-By: Crush
Signed-off-by: Carlos Alexandro Becker
* docs: add semantic commit guidelines to development guide
💖 Generated with Crush
Co-Authored-By: Crush
* fix: exclude .git directories from LSP file watching
Explicitly exclude .git directories from file system watching to improve
performance and avoid unnecessary events from Git operations.
💖 Generated with Crush
Co-Authored-By: Crush
* refactor: use fsext
Signed-off-by: Carlos Alexandro Becker
* fix: grep
Signed-off-by: Carlos Alexandro Becker
* merge
Signed-off-by: Carlos Alexandro Becker
* Apply suggestion from @Copilot
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---------
Signed-off-by: Carlos Alexandro Becker
Co-authored-by: Crush
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
CRUSH.md | 4 +
internal/app/app.go | 9 +
internal/app/lsp.go | 6 +-
internal/lsp/watcher/global_watcher.go | 364 ++++++++++++++
internal/lsp/watcher/global_watcher_test.go | 297 ++++++++++++
internal/lsp/watcher/watcher.go | 505 ++------------------
6 files changed, 719 insertions(+), 466 deletions(-)
create mode 100644 internal/lsp/watcher/global_watcher.go
create mode 100644 internal/lsp/watcher/global_watcher_test.go
diff --git a/CRUSH.md b/CRUSH.md
index 5a3104b6685fb5e246c77d416d4a12adeda91734..102ad43ca5758beee6515ab9da4054ddc92b9a9f 100644
--- a/CRUSH.md
+++ b/CRUSH.md
@@ -59,3 +59,7 @@ func TestYourFunction(t *testing.T) {
- If `goimports` is not available, use `gofmt`.
- You can also use `task fmt` to run `gofumpt -w .` on the entire project,
as long as `gofumpt` is on the `PATH`.
+
+## Committing
+
+- ALWAYS use semantic commits (`fix:`, `feat:`, `chore:`, `refactor:`, `docs:`, `sec:`, etc).
diff --git a/internal/app/app.go b/internal/app/app.go
index cec42dc8610b4a6c72215766b7fd1c764381ef5a..21ddcd25eff1c9aeebb9d6700f9340ab0932e7ab 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -21,6 +21,7 @@ import (
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/lsp"
+ "github.com/charmbracelet/crush/internal/lsp/watcher"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/session"
@@ -85,6 +86,11 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
app.setupEvents()
+ // Start the global watcher
+ if err := watcher.Start(); err != nil {
+ return nil, fmt.Errorf("app: %w", err)
+ }
+
// Initialize LSP clients in the background.
app.initLSPClients(ctx)
@@ -352,6 +358,9 @@ func (app *App) Shutdown() {
cancel()
}
+ // Shutdown the global watcher
+ watcher.Shutdown()
+
// Call call cleanup functions.
for _, cleanup := range app.cleanupFuncs {
if cleanup != nil {
diff --git a/internal/app/lsp.go b/internal/app/lsp.go
index b1f35dedc02c9ae842a8e0d2d52b51eaf38bd2ee..8a9b06c1e784770371bc4000a2101af11aa44d64 100644
--- a/internal/app/lsp.go
+++ b/internal/app/lsp.go
@@ -71,7 +71,7 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
watchCtx, cancelFunc := context.WithCancel(ctx)
// Create the workspace watcher.
- workspaceWatcher := watcher.NewWorkspaceWatcher(name, lspClient)
+ workspaceWatcher := watcher.New(name, lspClient)
// Store the cancel function to be called during cleanup.
app.watcherCancelFuncs.Append(cancelFunc)
@@ -87,14 +87,14 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
}
// runWorkspaceWatcher executes the workspace watcher for an LSP client.
-func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.WorkspaceWatcher) {
+func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.Client) {
defer app.lspWatcherWG.Done()
defer log.RecoverPanic("LSP-"+name, func() {
// Try to restart the client.
app.restartLSPClient(ctx, name)
})
- workspaceWatcher.WatchWorkspace(ctx, app.config.WorkingDir())
+ workspaceWatcher.Watch(ctx, app.config.WorkingDir())
slog.Info("Workspace watcher stopped", "client", name)
}
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
new file mode 100644
index 0000000000000000000000000000000000000000..29b19f316ba0f654ae779526b5926b1fe9785819
--- /dev/null
+++ b/internal/lsp/watcher/global_watcher.go
@@ -0,0 +1,364 @@
+package watcher
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+ "os"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/fsnotify/fsnotify"
+)
+
+// global manages a single fsnotify.Watcher instance shared across all LSP clients.
+//
+// IMPORTANT: This implementation only watches directories, not individual files.
+// The fsnotify library automatically provides events for all files within watched
+// directories, making this approach much more efficient than watching individual files.
+//
+// Key benefits of directory-only watching:
+// - Significantly fewer file descriptors used
+// - Automatic coverage of new files created in watched directories
+// - Better performance with large codebases
+// - fsnotify handles deduplication internally (no need to track watched dirs)
+type global struct {
+ watcher *fsnotify.Watcher
+
+ // Map of workspace watchers by client name
+ watchers *csync.Map[string, *Client]
+
+ // Single workspace root directory for ignore checking
+ root string
+
+ started atomic.Bool
+
+ // Debouncing for file events (shared across all clients)
+ debounceTime time.Duration
+ debounceMap *csync.Map[string, *time.Timer]
+
+ // Context for shutdown
+ ctx context.Context
+ cancel context.CancelFunc
+
+ // Wait group for cleanup
+ wg sync.WaitGroup
+}
+
+// instance returns the singleton global watcher instance
+var instance = sync.OnceValue(func() *global {
+ ctx, cancel := context.WithCancel(context.Background())
+ gw := &global{
+ watchers: csync.NewMap[string, *Client](),
+ debounceTime: 300 * time.Millisecond,
+ debounceMap: csync.NewMap[string, *time.Timer](),
+ ctx: ctx,
+ cancel: cancel,
+ }
+
+ // Initialize the fsnotify watcher
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ slog.Error("lsp watcher: Failed to create global file watcher", "error", err)
+ return gw
+ }
+
+ gw.watcher = watcher
+
+ return gw
+})
+
+// register registers a workspace watcher with the global watcher
+func (gw *global) register(name string, watcher *Client) {
+ gw.watchers.Set(name, watcher)
+ slog.Debug("lsp watcher: Registered workspace watcher", "name", name)
+}
+
+// unregister removes a workspace watcher from the global watcher
+func (gw *global) unregister(name string) {
+ gw.watchers.Del(name)
+ slog.Debug("lsp watcher: Unregistered workspace watcher", "name", name)
+}
+
+// Start walks the given path and sets up the watcher on it.
+//
+// Note: We only watch directories, not individual files. fsnotify automatically provides
+// events for all files within watched directories. Multiple calls with the same workspace
+// are safe since fsnotify handles directory deduplication internally.
+func Start() error {
+ gw := instance()
+
+ // technically workspace root is always the same...
+ if gw.started.Load() {
+ slog.Debug("lsp watcher: watcher already set up, skipping")
+ return nil
+ }
+
+ cfg := config.Get()
+ root := cfg.WorkingDir()
+ slog.Debug("lsp watcher: set workspace directory to global watcher", "path", root)
+
+ // Store the workspace root for hierarchical ignore checking
+ gw.root = root
+ gw.started.Store(true)
+
+ // Start the event processing goroutine now that we're initialized
+ gw.wg.Add(1)
+ go gw.processEvents()
+
+ // Walk the workspace and add only directories to the watcher
+ // fsnotify will automatically provide events for all files within these directories
+ // Multiple calls with the same directories are safe (fsnotify deduplicates)
+ err := fsext.WalkDirectories(root, func(path string, d os.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+
+ // Add directory to watcher (fsnotify handles deduplication automatically)
+ if err := gw.addDirectoryToWatcher(path); err != nil {
+ slog.Error("lsp watcher: Error watching directory", "path", path, "error", err)
+ }
+
+ return nil
+ })
+ if err != nil {
+ return fmt.Errorf("lsp watcher: error walking workspace %s: %w", root, err)
+ }
+
+ return nil
+}
+
+// addDirectoryToWatcher adds a directory to the fsnotify watcher.
+// fsnotify handles deduplication internally, so we don't need to track watched directories.
+func (gw *global) addDirectoryToWatcher(dirPath string) error {
+ if gw.watcher == nil {
+ return fmt.Errorf("lsp watcher: global watcher not initialized")
+ }
+
+ // Add directory to fsnotify watcher - fsnotify handles deduplication
+ // "A path can only be watched once; watching it more than once is a no-op"
+ err := gw.watcher.Add(dirPath)
+ if err != nil {
+ return fmt.Errorf("lsp watcher: failed to watch directory %s: %w", dirPath, err)
+ }
+
+ slog.Debug("lsp watcher: watching directory", "path", dirPath)
+ return nil
+}
+
+// processEvents processes file system events and handles them centrally.
+// Since we only watch directories, we automatically get events for all files
+// within those directories. When new directories are created, we add them
+// to the watcher to ensure complete coverage.
+func (gw *global) processEvents() {
+ defer gw.wg.Done()
+ cfg := config.Get()
+
+ if gw.watcher == nil || !gw.started.Load() {
+ slog.Error("lsp watcher: Global watcher not initialized")
+ return
+ }
+
+ for {
+ select {
+ case <-gw.ctx.Done():
+ return
+
+ case event, ok := <-gw.watcher.Events:
+ if !ok {
+ return
+ }
+
+ // Handle directory creation globally (only once)
+ // When new directories are created, we need to add them to the watcher
+ // to ensure we get events for files created within them
+ if event.Op&fsnotify.Create != 0 {
+ if info, err := os.Stat(event.Name); err == nil && info.IsDir() {
+ if !fsext.ShouldExcludeFile(gw.root, event.Name) {
+ if err := gw.addDirectoryToWatcher(event.Name); err != nil {
+ slog.Error("lsp watcher: Error adding new directory to watcher", "path", event.Name, "error", err)
+ }
+ } else if cfg != nil && cfg.Options.DebugLSP {
+ slog.Debug("lsp watcher: Skipping ignored new directory", "path", event.Name)
+ }
+ }
+ }
+
+ if cfg != nil && cfg.Options.DebugLSP {
+ slog.Debug("lsp watcher: Global watcher received event", "path", event.Name, "op", event.Op.String())
+ }
+
+ // Process the event centrally
+ gw.handleFileEvent(event)
+
+ case err, ok := <-gw.watcher.Errors:
+ if !ok {
+ return
+ }
+ slog.Error("lsp watcher: Global watcher error", "error", err)
+ }
+ }
+}
+
+// handleFileEvent processes a file system event and distributes notifications to relevant clients
+func (gw *global) handleFileEvent(event fsnotify.Event) {
+ cfg := config.Get()
+ uri := string(protocol.URIFromPath(event.Name))
+
+ // Handle file creation for all relevant clients (only once)
+ if event.Op&fsnotify.Create != 0 {
+ if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
+ if !fsext.ShouldExcludeFile(gw.root, event.Name) {
+ gw.openMatchingFileForClients(event.Name)
+ }
+ }
+ }
+
+ // Process the event for each relevant client
+ for client, watcher := range gw.watchers.Seq2() {
+ if !watcher.client.HandlesFile(event.Name) {
+ continue // client doesn't handle this filetype
+ }
+
+ // Debug logging per client
+ if cfg.Options.DebugLSP {
+ matched, kind := watcher.isPathWatched(event.Name)
+ slog.Debug("lsp watcher: File event for client",
+ "path", event.Name,
+ "operation", event.Op.String(),
+ "watched", matched,
+ "kind", kind,
+ "client", client,
+ )
+ }
+
+ // Check if this path should be watched according to server registrations
+ if watched, watchKind := watcher.isPathWatched(event.Name); watched {
+ switch {
+ case event.Op&fsnotify.Write != 0:
+ if watchKind&protocol.WatchChange != 0 {
+ gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Changed))
+ }
+ case event.Op&fsnotify.Create != 0:
+ // File creation was already handled globally above
+ // Just send the notification if needed
+ info, err := os.Stat(event.Name)
+ if err != nil {
+ if !os.IsNotExist(err) {
+ slog.Debug("lsp watcher: Error getting file info", "path", event.Name, "error", err)
+ }
+ continue
+ }
+ if !info.IsDir() && watchKind&protocol.WatchCreate != 0 {
+ gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Created))
+ }
+ case event.Op&fsnotify.Remove != 0:
+ if watchKind&protocol.WatchDelete != 0 {
+ gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
+ }
+ case event.Op&fsnotify.Rename != 0:
+ // For renames, first delete
+ if watchKind&protocol.WatchDelete != 0 {
+ gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
+ }
+
+ // Then check if the new file exists and create an event
+ if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
+ if watchKind&protocol.WatchCreate != 0 {
+ gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Created))
+ }
+ }
+ }
+ }
+ }
+}
+
+// openMatchingFileForClients opens a newly created file for all clients that handle it (only once per file)
+func (gw *global) openMatchingFileForClients(path string) {
+ // Skip directories
+ info, err := os.Stat(path)
+ if err != nil || info.IsDir() {
+ return
+ }
+
+ // Skip excluded files
+ if fsext.ShouldExcludeFile(gw.root, path) {
+ return
+ }
+
+ // Open the file for each client that handles it and has matching patterns
+ for _, watcher := range gw.watchers.Seq2() {
+ if watcher.client.HandlesFile(path) {
+ watcher.openMatchingFile(gw.ctx, path)
+ }
+ }
+}
+
+// debounceHandleFileEventForClient handles file events with debouncing for a specific client
+func (gw *global) debounceHandleFileEventForClient(watcher *Client, uri string, changeType protocol.FileChangeType) {
+ // Create a unique key based on URI, change type, and client name
+ key := fmt.Sprintf("%s:%d:%s", uri, changeType, watcher.name)
+
+ // Cancel existing timer if any
+ if timer, exists := gw.debounceMap.Get(key); exists {
+ timer.Stop()
+ }
+
+ // Create new timer
+ gw.debounceMap.Set(key, time.AfterFunc(gw.debounceTime, func() {
+ gw.handleFileEventForClient(watcher, uri, changeType)
+
+ // Cleanup timer after execution
+ gw.debounceMap.Del(key)
+ }))
+}
+
+// handleFileEventForClient sends file change notifications to a specific client
+func (gw *global) handleFileEventForClient(watcher *Client, uri string, changeType protocol.FileChangeType) {
+ // If the file is open and it's a change event, use didChange notification
+ filePath, err := protocol.DocumentURI(uri).Path()
+ if err != nil {
+ slog.Error("lsp watcher: Error converting URI to path", "uri", uri, "error", err)
+ return
+ }
+
+ if changeType == protocol.FileChangeType(protocol.Deleted) {
+ watcher.client.ClearDiagnosticsForURI(protocol.DocumentURI(uri))
+ } else if changeType == protocol.FileChangeType(protocol.Changed) && watcher.client.IsFileOpen(filePath) {
+ err := watcher.client.NotifyChange(gw.ctx, filePath)
+ if err != nil {
+ slog.Error("lsp watcher: Error notifying change", "error", err)
+ }
+ return
+ }
+
+ // Notify LSP server about the file event using didChangeWatchedFiles
+ if err := watcher.notifyFileEvent(gw.ctx, uri, changeType); err != nil {
+ slog.Error("lsp watcher: Error notifying LSP server about file event", "error", err)
+ }
+}
+
+// shutdown gracefully shuts down the global watcher
+func (gw *global) shutdown() {
+ if gw.cancel != nil {
+ gw.cancel()
+ }
+
+ if gw.watcher != nil {
+ gw.watcher.Close()
+ gw.watcher = nil
+ }
+
+ gw.wg.Wait()
+ slog.Debug("lsp watcher: Global watcher shutdown complete")
+}
+
+// Shutdown shuts down the singleton global watcher
+func Shutdown() {
+ instance().shutdown()
+}
diff --git a/internal/lsp/watcher/global_watcher_test.go b/internal/lsp/watcher/global_watcher_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..09124cd6a570b9b46b003b06b5f76dcbcbef22ff
--- /dev/null
+++ b/internal/lsp/watcher/global_watcher_test.go
@@ -0,0 +1,297 @@
+package watcher
+
+import (
+ "context"
+ "os"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/fsnotify/fsnotify"
+)
+
+func TestGlobalWatcher(t *testing.T) {
+ t.Parallel()
+
+ // Test that we can get the global watcher instance
+ gw1 := instance()
+ if gw1 == nil {
+ t.Fatal("Expected global watcher instance, got nil")
+ }
+
+ // Test that subsequent calls return the same instance (singleton)
+ gw2 := instance()
+ if gw1 != gw2 {
+ t.Fatal("Expected same global watcher instance, got different instances")
+ }
+
+ // Test registration and unregistration
+ mockWatcher := &Client{
+ name: "test-watcher",
+ }
+
+ gw1.register("test", mockWatcher)
+
+ // Check that it was registered
+ registered, _ := gw1.watchers.Get("test")
+
+ if registered != mockWatcher {
+ t.Fatal("Expected workspace watcher to be registered")
+ }
+
+ // Test unregistration
+ gw1.unregister("test")
+
+ unregistered, _ := gw1.watchers.Get("test")
+
+ if unregistered != nil {
+ t.Fatal("Expected workspace watcher to be unregistered")
+ }
+}
+
+func TestGlobalWatcherWorkspaceIdempotent(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory for testing
+ tempDir := t.TempDir()
+
+ // Create a new global watcher instance for this test
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ // Create a real fsnotify watcher for testing
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ t.Fatalf("Failed to create fsnotify watcher: %v", err)
+ }
+ defer watcher.Close()
+
+ gw := &global{
+ watcher: watcher,
+ watchers: csync.NewMap[string, *Client](),
+ debounceTime: 300 * time.Millisecond,
+ debounceMap: csync.NewMap[string, *time.Timer](),
+ ctx: ctx,
+ cancel: cancel,
+ }
+
+ // Test that watching the same workspace multiple times is safe (idempotent)
+ err1 := gw.addDirectoryToWatcher(tempDir)
+ if err1 != nil {
+ t.Fatalf("First addDirectoryToWatcher call failed: %v", err1)
+ }
+
+ err2 := gw.addDirectoryToWatcher(tempDir)
+ if err2 != nil {
+ t.Fatalf("Second addDirectoryToWatcher call failed: %v", err2)
+ }
+
+ err3 := gw.addDirectoryToWatcher(tempDir)
+ if err3 != nil {
+ t.Fatalf("Third addDirectoryToWatcher call failed: %v", err3)
+ }
+
+ // All calls should succeed - fsnotify handles deduplication internally
+ // This test verifies that multiple WatchWorkspace calls are safe
+}
+
+func TestGlobalWatcherOnlyWatchesDirectories(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory structure for testing
+ tempDir := t.TempDir()
+ subDir := filepath.Join(tempDir, "subdir")
+ if err := os.Mkdir(subDir, 0o755); err != nil {
+ t.Fatalf("Failed to create subdirectory: %v", err)
+ }
+
+ // Create some files
+ file1 := filepath.Join(tempDir, "file1.txt")
+ file2 := filepath.Join(subDir, "file2.txt")
+ if err := os.WriteFile(file1, []byte("content1"), 0o644); err != nil {
+ t.Fatalf("Failed to create file1: %v", err)
+ }
+ if err := os.WriteFile(file2, []byte("content2"), 0o644); err != nil {
+ t.Fatalf("Failed to create file2: %v", err)
+ }
+
+ // Create a new global watcher instance for this test
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ // Create a real fsnotify watcher for testing
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ t.Fatalf("Failed to create fsnotify watcher: %v", err)
+ }
+ defer watcher.Close()
+
+ gw := &global{
+ watcher: watcher,
+ watchers: csync.NewMap[string, *Client](),
+ debounceTime: 300 * time.Millisecond,
+ debounceMap: csync.NewMap[string, *time.Timer](),
+ ctx: ctx,
+ cancel: cancel,
+ }
+
+ // Watch the workspace
+ err = gw.addDirectoryToWatcher(tempDir)
+ if err != nil {
+ t.Fatalf("addDirectoryToWatcher failed: %v", err)
+ }
+
+ // Verify that our expected directories exist and can be watched
+ expectedDirs := []string{tempDir, subDir}
+
+ for _, expectedDir := range expectedDirs {
+ info, err := os.Stat(expectedDir)
+ if err != nil {
+ t.Fatalf("Expected directory %s doesn't exist: %v", expectedDir, err)
+ }
+ if !info.IsDir() {
+ t.Fatalf("Expected %s to be a directory, but it's not", expectedDir)
+ }
+
+ // Try to add it again - fsnotify should handle this gracefully
+ err = gw.addDirectoryToWatcher(expectedDir)
+ if err != nil {
+ t.Fatalf("Failed to add directory %s to watcher: %v", expectedDir, err)
+ }
+ }
+
+ // Verify that files exist but we don't try to watch them directly
+ testFiles := []string{file1, file2}
+ for _, file := range testFiles {
+ info, err := os.Stat(file)
+ if err != nil {
+ t.Fatalf("Test file %s doesn't exist: %v", file, err)
+ }
+ if info.IsDir() {
+ t.Fatalf("Expected %s to be a file, but it's a directory", file)
+ }
+ }
+}
+
+func TestFsnotifyDeduplication(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory for testing
+ tempDir := t.TempDir()
+
+ // Create a real fsnotify watcher
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ t.Fatalf("Failed to create fsnotify watcher: %v", err)
+ }
+ defer watcher.Close()
+
+ // Add the same directory multiple times
+ err1 := watcher.Add(tempDir)
+ if err1 != nil {
+ t.Fatalf("First Add failed: %v", err1)
+ }
+
+ err2 := watcher.Add(tempDir)
+ if err2 != nil {
+ t.Fatalf("Second Add failed: %v", err2)
+ }
+
+ err3 := watcher.Add(tempDir)
+ if err3 != nil {
+ t.Fatalf("Third Add failed: %v", err3)
+ }
+
+ // All should succeed - fsnotify handles deduplication internally
+ // This test verifies the fsnotify behavior we're relying on
+}
+
+func TestGlobalWatcherRespectsIgnoreFiles(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory structure for testing
+ tempDir := t.TempDir()
+
+ // Create directories that should be ignored
+ nodeModules := filepath.Join(tempDir, "node_modules")
+ target := filepath.Join(tempDir, "target")
+ customIgnored := filepath.Join(tempDir, "custom_ignored")
+ normalDir := filepath.Join(tempDir, "src")
+
+ for _, dir := range []string{nodeModules, target, customIgnored, normalDir} {
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ t.Fatalf("Failed to create directory %s: %v", dir, err)
+ }
+ }
+
+ // Create .gitignore file
+ gitignoreContent := "node_modules/\ntarget/\n"
+ if err := os.WriteFile(filepath.Join(tempDir, ".gitignore"), []byte(gitignoreContent), 0o644); err != nil {
+ t.Fatalf("Failed to create .gitignore: %v", err)
+ }
+
+ // Create .crushignore file
+ crushignoreContent := "custom_ignored/\n"
+ if err := os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644); err != nil {
+ t.Fatalf("Failed to create .crushignore: %v", err)
+ }
+
+ // Create a new global watcher instance for this test
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ // Create a real fsnotify watcher for testing
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ t.Fatalf("Failed to create fsnotify watcher: %v", err)
+ }
+ defer watcher.Close()
+
+ gw := &global{
+ watcher: watcher,
+ watchers: csync.NewMap[string, *Client](),
+ debounceTime: 300 * time.Millisecond,
+ debounceMap: csync.NewMap[string, *time.Timer](),
+ ctx: ctx,
+ cancel: cancel,
+ }
+
+ // Watch the workspace
+ err = gw.addDirectoryToWatcher(tempDir)
+ if err != nil {
+ t.Fatalf("addDirectoryToWatcher failed: %v", err)
+ }
+
+ // This test verifies that the watcher can successfully add directories to fsnotify
+ // The actual ignore logic is tested in the fsext package
+ // Here we just verify that the watcher integration works
+}
+
+func TestGlobalWatcherShutdown(t *testing.T) {
+ t.Parallel()
+
+ // Create a new context for this test
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ // Create a temporary global watcher for testing
+ gw := &global{
+ watchers: csync.NewMap[string, *Client](),
+ debounceTime: 300 * time.Millisecond,
+ debounceMap: csync.NewMap[string, *time.Timer](),
+ ctx: ctx,
+ cancel: cancel,
+ }
+
+ // Test shutdown doesn't panic
+ gw.shutdown()
+
+ // Verify context was cancelled
+ select {
+ case <-gw.ctx.Done():
+ // Expected
+ case <-time.After(100 * time.Millisecond):
+ t.Fatal("Expected context to be cancelled after shutdown")
+ }
+}
diff --git a/internal/lsp/watcher/watcher.go b/internal/lsp/watcher/watcher.go
index ad03099ae9a2b1e516fdcab820052c1ca858bd2a..18b790349a10f0827f45f8ccb9fb6968980a9d4e 100644
--- a/internal/lsp/watcher/watcher.go
+++ b/internal/lsp/watcher/watcher.go
@@ -7,7 +7,6 @@ import (
"os"
"path/filepath"
"strings"
- "sync"
"time"
"github.com/bmatcuk/doublestar/v4"
@@ -16,58 +15,39 @@ import (
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/lsp/protocol"
- "github.com/fsnotify/fsnotify"
)
-// WorkspaceWatcher manages LSP file watching
-type WorkspaceWatcher struct {
+// Client manages LSP file watching for a specific client
+// It now delegates actual file watching to the GlobalWatcher
+type Client struct {
client *lsp.Client
name string
workspacePath string
- debounceTime time.Duration
- debounceMap *csync.Map[string, *time.Timer]
-
// File watchers registered by the server
- registrations []protocol.FileSystemWatcher
- registrationMu sync.RWMutex
+ registrations *csync.Slice[protocol.FileSystemWatcher]
}
-func init() {
- // Ensure the watcher is initialized with a reasonable file limit
- if _, err := Ulimit(); err != nil {
- slog.Error("Error setting file limit", "error", err)
- }
-}
-
-// NewWorkspaceWatcher creates a new workspace watcher
-func NewWorkspaceWatcher(name string, client *lsp.Client) *WorkspaceWatcher {
- return &WorkspaceWatcher{
+// New creates a new workspace watcher for the given client.
+func New(name string, client *lsp.Client) *Client {
+ return &Client{
name: name,
client: client,
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- registrations: []protocol.FileSystemWatcher{},
+ registrations: csync.NewSlice[protocol.FileSystemWatcher](),
}
}
-// AddRegistrations adds file watchers to track
-func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watchers []protocol.FileSystemWatcher) {
+// register adds file watchers to track
+func (w *Client) register(ctx context.Context, id string, watchers []protocol.FileSystemWatcher) {
cfg := config.Get()
- slog.Debug("Adding file watcher registrations")
- w.registrationMu.Lock()
- defer w.registrationMu.Unlock()
-
- // Add new watchers
- w.registrations = append(w.registrations, watchers...)
+ w.registrations.Append(watchers...)
- // Print detailed registration information for debugging
if cfg.Options.DebugLSP {
slog.Debug("Adding file watcher registrations",
"id", id,
"watchers", len(watchers),
- "total", len(w.registrations),
+ "total", w.registrations.Len(),
)
for i, watcher := range watchers {
@@ -103,116 +83,29 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
}
}
- // Determine server type for specialized handling
- serverName := w.name
- slog.Debug("Server type detected", "serverName", serverName)
-
- // Check if this server has sent file watchers
- hasFileWatchers := len(watchers) > 0
-
- // For servers that need file preloading, we'll use a smart approach
- if shouldPreloadFiles(serverName) || !hasFileWatchers {
+ // For servers that need file preloading, open high-priority files only
+ if shouldPreloadFiles(w.name) {
go func() {
- startTime := time.Now()
- filesOpened := 0
-
- // Determine max files to open based on server type
- maxFilesToOpen := 50 // Default conservative limit
-
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- // TypeScript servers benefit from seeing more files
- maxFilesToOpen = 100
- case "java", "jdtls":
- // Java servers need to see many files for project model
- maxFilesToOpen = 200
- }
-
- // First, open high-priority files
- highPriorityFilesOpened := w.openHighPriorityFiles(ctx, serverName)
- filesOpened += highPriorityFilesOpened
-
+ highPriorityFilesOpened := w.openHighPriorityFiles(ctx, w.name)
if cfg.Options.DebugLSP {
slog.Debug("Opened high-priority files",
"count", highPriorityFilesOpened,
- "serverName", serverName)
- }
-
- // If we've already opened enough high-priority files, we might not need more
- if filesOpened >= maxFilesToOpen {
- if cfg.Options.DebugLSP {
- slog.Debug("Reached file limit with high-priority files",
- "filesOpened", filesOpened,
- "maxFiles", maxFilesToOpen)
- }
- return
- }
-
- // For the remaining slots, walk the directory and open matching files
-
- err := filepath.WalkDir(w.workspacePath, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip directories that should be excluded
- if d.IsDir() {
- if path != w.workspacePath && shouldExcludeDir(path) {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping excluded directory", "path", path)
- }
- return filepath.SkipDir
- }
- } else {
- // Process files, but limit the total number
- if filesOpened < maxFilesToOpen {
- // Only process if it's not already open (high-priority files were opened earlier)
- if !w.client.IsFileOpen(path) {
- w.openMatchingFile(ctx, path)
- filesOpened++
-
- // Add a small delay after every 10 files to prevent overwhelming the server
- if filesOpened%10 == 0 {
- time.Sleep(50 * time.Millisecond)
- }
- }
- } else {
- // We've reached our limit, stop walking
- return filepath.SkipAll
- }
- }
-
- return nil
- })
-
- elapsedTime := time.Since(startTime)
- if cfg.Options.DebugLSP {
- slog.Debug("Limited workspace scan complete",
- "filesOpened", filesOpened,
- "maxFiles", maxFilesToOpen,
- "elapsedTime", elapsedTime.Seconds(),
- "workspacePath", w.workspacePath,
- )
- }
-
- if err != nil && cfg.Options.DebugLSP {
- slog.Debug("Error scanning workspace for files to open", "error", err)
+ "serverName", w.name)
}
}()
- } else if cfg.Options.DebugLSP {
- slog.Debug("Using on-demand file loading for server", "server", serverName)
}
}
// openHighPriorityFiles opens important files for the server type
// Returns the number of files opened
-func (w *WorkspaceWatcher) openHighPriorityFiles(ctx context.Context, serverName string) int {
+func (w *Client) openHighPriorityFiles(ctx context.Context, serverName string) int {
cfg := config.Get()
filesOpened := 0
// Define patterns for high-priority files based on server type
var patterns []string
+ // TODO: move this to LSP config
switch serverName {
case "typescript", "typescript-language-server", "tsserver", "vtsls":
patterns = []string{
@@ -329,160 +222,35 @@ func (w *WorkspaceWatcher) openHighPriorityFiles(ctx context.Context, serverName
return filesOpened
}
-// WatchWorkspace sets up file watching for a workspace
-func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath string) {
- cfg := config.Get()
+// Watch sets up file watching for a workspace using the global watcher
+func (w *Client) Watch(ctx context.Context, workspacePath string) {
w.workspacePath = workspacePath
slog.Debug("Starting workspace watcher", "workspacePath", workspacePath, "serverName", w.name)
+ // Register this workspace watcher with the global watcher
+ instance().register(w.name, w)
+ defer instance().unregister(w.name)
+
// Register handler for file watcher registrations from the server
lsp.RegisterFileWatchHandler(func(id string, watchers []protocol.FileSystemWatcher) {
- w.AddRegistrations(ctx, id, watchers)
+ w.register(ctx, id, watchers)
})
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- slog.Error("Error creating watcher", "error", err)
- }
- defer watcher.Close()
-
- // Watch the workspace recursively
- err = filepath.WalkDir(workspacePath, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip excluded directories (except workspace root)
- if d.IsDir() && path != workspacePath {
- if shouldExcludeDir(path) {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping excluded directory", "path", path)
- }
- return filepath.SkipDir
- }
- }
-
- // Add directories to watcher
- if d.IsDir() {
- err = watcher.Add(path)
- if err != nil {
- slog.Error("Error watching path", "path", path, "error", err)
- }
- }
-
- return nil
- })
- if err != nil {
- slog.Error("Error walking workspace", "error", err)
- }
-
- // Event loop
- for {
- select {
- case <-ctx.Done():
- return
- case event, ok := <-watcher.Events:
- if !ok {
- return
- }
-
- if !w.client.HandlesFile(event.Name) {
- continue // client doesn't handle this filetype
- }
-
- uri := string(protocol.URIFromPath(event.Name))
-
- // Add new directories to the watcher
- if event.Op&fsnotify.Create != 0 {
- if info, err := os.Stat(event.Name); err == nil {
- if info.IsDir() {
- // Skip excluded directories
- if !shouldExcludeDir(event.Name) {
- if err := watcher.Add(event.Name); err != nil {
- slog.Error("Error adding directory to watcher", "path", event.Name, "error", err)
- }
- }
- } else {
- // For newly created files
- if !shouldExcludeFile(event.Name) {
- w.openMatchingFile(ctx, event.Name)
- }
- }
- }
- }
-
- // Debug logging
- if cfg.Options.DebugLSP {
- matched, kind := w.isPathWatched(event.Name)
- slog.Debug("File event",
- "path", event.Name,
- "operation", event.Op.String(),
- "watched", matched,
- "kind", kind,
- )
- }
-
- // Check if this path should be watched according to server registrations
- if watched, watchKind := w.isPathWatched(event.Name); watched {
- switch {
- case event.Op&fsnotify.Write != 0:
- if watchKind&protocol.WatchChange != 0 {
- w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Changed))
- }
- case event.Op&fsnotify.Create != 0:
- // Already handled earlier in the event loop
- // Just send the notification if needed
- info, err := os.Stat(event.Name)
- if err != nil {
- if !os.IsNotExist(err) {
- // Only log if it's not a "file not found" error
- slog.Debug("Error getting file info", "path", event.Name, "error", err)
- }
- continue
- }
- if !info.IsDir() && watchKind&protocol.WatchCreate != 0 {
- w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Created))
- }
- case event.Op&fsnotify.Remove != 0:
- if watchKind&protocol.WatchDelete != 0 {
- w.handleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Deleted))
- }
- case event.Op&fsnotify.Rename != 0:
- // For renames, first delete
- if watchKind&protocol.WatchDelete != 0 {
- w.handleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Deleted))
- }
-
- // Then check if the new file exists and create an event
- if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
- if watchKind&protocol.WatchCreate != 0 {
- w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Created))
- }
- }
- }
- }
- case err, ok := <-watcher.Errors:
- if !ok {
- return
- }
- slog.Error("Error watching file", "error", err)
- }
- }
+ // Wait for context cancellation
+ <-ctx.Done()
+ slog.Debug("Workspace watcher stopped", "name", w.name)
}
// isPathWatched checks if a path should be watched based on server registrations
-func (w *WorkspaceWatcher) isPathWatched(path string) (bool, protocol.WatchKind) {
- w.registrationMu.RLock()
- defer w.registrationMu.RUnlock()
-
- // If no explicit registrations, watch everything
- if len(w.registrations) == 0 {
+// If no explicit registrations, watch everything
+func (w *Client) isPathWatched(path string) (bool, protocol.WatchKind) {
+ if w.registrations.Len() == 0 {
return true, protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
}
// Check each registration
- for _, reg := range w.registrations {
+ for reg := range w.registrations.Seq() {
isMatch := w.matchesPattern(path, reg.GlobPattern)
if isMatch {
kind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
@@ -496,110 +264,19 @@ func (w *WorkspaceWatcher) isPathWatched(path string) (bool, protocol.WatchKind)
return false, 0
}
-// matchesGlob handles advanced glob patterns including ** and alternatives
+// matchesGlob handles glob patterns using the doublestar library
func matchesGlob(pattern, path string) bool {
- // Handle file extension patterns with braces like *.{go,mod,sum}
- if strings.Contains(pattern, "{") && strings.Contains(pattern, "}") {
- // Extract extensions from pattern like "*.{go,mod,sum}"
- parts := strings.SplitN(pattern, "{", 2)
- if len(parts) == 2 {
- prefix := parts[0]
- extPart := strings.SplitN(parts[1], "}", 2)
- if len(extPart) == 2 {
- extensions := strings.Split(extPart[0], ",")
- suffix := extPart[1]
-
- // Check if the path matches any of the extensions
- for _, ext := range extensions {
- extPattern := prefix + ext + suffix
- isMatch := matchesSimpleGlob(extPattern, path)
- if isMatch {
- return true
- }
- }
- return false
- }
- }
- }
-
- return matchesSimpleGlob(pattern, path)
-}
-
-// matchesSimpleGlob handles glob patterns with ** wildcards
-func matchesSimpleGlob(pattern, path string) bool {
- // Handle special case for **/*.ext pattern (common in LSP)
- if after, ok := strings.CutPrefix(pattern, "**/"); ok {
- rest := after
-
- // If the rest is a simple file extension pattern like *.go
- if strings.HasPrefix(rest, "*.") {
- ext := strings.TrimPrefix(rest, "*")
- isMatch := strings.HasSuffix(path, ext)
- return isMatch
- }
-
- // Otherwise, try to check if the path ends with the rest part
- isMatch := strings.HasSuffix(path, rest)
-
- // If it matches directly, great!
- if isMatch {
- return true
- }
-
- // Otherwise, check if any path component matches
- pathComponents := strings.Split(path, "/")
- for i := range pathComponents {
- subPath := strings.Join(pathComponents[i:], "/")
- if strings.HasSuffix(subPath, rest) {
- return true
- }
- }
-
- return false
- }
-
- // Handle other ** wildcard pattern cases
- if strings.Contains(pattern, "**") {
- parts := strings.Split(pattern, "**")
-
- // Validate the path starts with the first part
- if !strings.HasPrefix(path, parts[0]) && parts[0] != "" {
- return false
- }
-
- // For patterns like "**/*.go", just check the suffix
- if len(parts) == 2 && parts[0] == "" {
- isMatch := strings.HasSuffix(path, parts[1])
- return isMatch
- }
-
- // For other patterns, handle middle part
- remaining := strings.TrimPrefix(path, parts[0])
- if len(parts) == 2 {
- isMatch := strings.HasSuffix(remaining, parts[1])
- return isMatch
- }
- }
-
- // Handle simple * wildcard for file extension patterns (*.go, *.sum, etc)
- if strings.HasPrefix(pattern, "*.") {
- ext := strings.TrimPrefix(pattern, "*")
- isMatch := strings.HasSuffix(path, ext)
- return isMatch
- }
-
- // Fall back to simple matching for simpler patterns
- matched, err := filepath.Match(pattern, path)
+ // Use doublestar for all glob matching - it handles ** and other complex patterns
+ matched, err := doublestar.Match(pattern, path)
if err != nil {
slog.Error("Error matching pattern", "pattern", pattern, "path", path, "error", err)
return false
}
-
return matched
}
// matchesPattern checks if a path matches the glob pattern
-func (w *WorkspaceWatcher) matchesPattern(path string, pattern protocol.GlobPattern) bool {
+func (w *Client) matchesPattern(path string, pattern protocol.GlobPattern) bool {
patternInfo, err := pattern.AsPattern()
if err != nil {
slog.Error("Error parsing pattern", "pattern", pattern, "error", err)
@@ -637,53 +314,8 @@ func (w *WorkspaceWatcher) matchesPattern(path string, pattern protocol.GlobPatt
return isMatch
}
-// debounceHandleFileEvent handles file events with debouncing to reduce notifications
-func (w *WorkspaceWatcher) debounceHandleFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) {
- // Create a unique key based on URI and change type
- key := fmt.Sprintf("%s:%d", uri, changeType)
-
- // Cancel existing timer if any
- if timer, exists := w.debounceMap.Get(key); exists {
- timer.Stop()
- }
-
- // Create new timer
- w.debounceMap.Set(key, time.AfterFunc(w.debounceTime, func() {
- w.handleFileEvent(ctx, uri, changeType)
-
- // Cleanup timer after execution
- w.debounceMap.Del(key)
- }))
-}
-
-// handleFileEvent sends file change notifications
-func (w *WorkspaceWatcher) handleFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) {
- // If the file is open and it's a change event, use didChange notification
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- // XXX: Do we want to return here, or send the error up the stack?
- slog.Error("Error converting URI to path", "uri", uri, "error", err)
- return
- }
-
- if changeType == protocol.FileChangeType(protocol.Deleted) {
- w.client.ClearDiagnosticsForURI(protocol.DocumentURI(uri))
- } else if changeType == protocol.FileChangeType(protocol.Changed) && w.client.IsFileOpen(filePath) {
- err := w.client.NotifyChange(ctx, filePath)
- if err != nil {
- slog.Error("Error notifying change", "error", err)
- }
- return
- }
-
- // Notify LSP server about the file event using didChangeWatchedFiles
- if err := w.notifyFileEvent(ctx, uri, changeType); err != nil {
- slog.Error("Error notifying LSP server about file event", "error", err)
- }
-}
-
// notifyFileEvent sends a didChangeWatchedFiles notification for a file event
-func (w *WorkspaceWatcher) notifyFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) error {
+func (w *Client) notifyFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) error {
cfg := config.Get()
if cfg.Options.DebugLSP {
slog.Debug("Notifying file event",
@@ -724,21 +356,6 @@ func shouldPreloadFiles(serverName string) bool {
// Common patterns for directories and files to exclude
// TODO: make configurable
var (
- excludedDirNames = map[string]bool{
- ".git": true,
- "node_modules": true,
- "dist": true,
- "build": true,
- "out": true,
- "bin": true,
- ".idea": true,
- ".vscode": true,
- ".cache": true,
- "coverage": true,
- "target": true, // Rust build output
- "vendor": true, // Go vendor directory
- }
-
excludedFileExtensions = map[string]bool{
".swp": true,
".swo": true,
@@ -780,23 +397,6 @@ var (
maxFileSize int64 = 5 * 1024 * 1024
)
-// shouldExcludeDir returns true if the directory should be excluded from watching/opening
-func shouldExcludeDir(dirPath string) bool {
- dirName := filepath.Base(dirPath)
-
- // Skip dot directories
- if strings.HasPrefix(dirName, ".") {
- return true
- }
-
- // Skip common excluded directories
- if excludedDirNames[dirName] {
- return true
- }
-
- return false
-}
-
// shouldExcludeFile returns true if the file should be excluded from opening
func shouldExcludeFile(filePath string) bool {
fileName := filepath.Base(filePath)
@@ -838,7 +438,7 @@ func shouldExcludeFile(filePath string) bool {
}
// openMatchingFile opens a file if it matches any of the registered patterns
-func (w *WorkspaceWatcher) openMatchingFile(ctx context.Context, path string) {
+func (w *Client) openMatchingFile(ctx context.Context, path string) {
cfg := config.Get()
// Skip directories
info, err := os.Stat(path)
@@ -885,31 +485,10 @@ func (w *WorkspaceWatcher) openMatchingFile(ctx context.Context, path string) {
return
}
- // Check file extension for common source files
- ext := strings.ToLower(filepath.Ext(path))
-
- // Only preload source files for the specific language
- var shouldOpen bool
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- shouldOpen = ext == ".ts" || ext == ".js" || ext == ".tsx" || ext == ".jsx"
- case "gopls":
- shouldOpen = ext == ".go"
- case "rust-analyzer":
- shouldOpen = ext == ".rs"
- case "python", "pyright", "pylsp":
- shouldOpen = ext == ".py"
- case "clangd":
- shouldOpen = ext == ".c" || ext == ".cpp" || ext == ".h" || ext == ".hpp"
- case "java", "jdtls":
- shouldOpen = ext == ".java"
- }
-
- if shouldOpen {
- // Don't need to check if it's already open - the client.OpenFile handles that
- if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- slog.Error("Error opening file", "path", path, "error", err)
- }
+ // File type is already validated by HandlesFile() and isPathWatched() checks earlier,
+ // so we know this client handles this file type. Just open it.
+ if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
+ slog.Error("Error opening file", "path", path, "error", err)
}
}
From d78ed25f1faa7e449303590c795692e27b93d3f5 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 10 Sep 2025 16:04:42 -0300
Subject: [PATCH 036/236] fix: add back init setting ulimit
this was removed in a merge conflict in #1011
Signed-off-by: Carlos Alexandro Becker
---
internal/lsp/watcher/watcher.go | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/internal/lsp/watcher/watcher.go b/internal/lsp/watcher/watcher.go
index 18b790349a10f0827f45f8ccb9fb6968980a9d4e..139d144e1e5c65c11962e73201b42b15cd09f98a 100644
--- a/internal/lsp/watcher/watcher.go
+++ b/internal/lsp/watcher/watcher.go
@@ -28,6 +28,13 @@ type Client struct {
registrations *csync.Slice[protocol.FileSystemWatcher]
}
+func init() {
+ // Ensure the watcher is initialized with a reasonable file limit
+ if _, err := Ulimit(); err != nil {
+ slog.Error("Error setting file limit", "error", err)
+ }
+}
+
// New creates a new workspace watcher for the given client.
func New(name string, client *lsp.Client) *Client {
return &Client{
From 65fe42ea59c0a606e32334e344b49d46dbf13041 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Thu, 11 Sep 2025 06:32:01 -0300
Subject: [PATCH 038/236] chore(legal): @tauraamui has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index e500dc1399f4c77abb8fa295cee1a1039126794f..42913f1e28960628c7baf72d7800ac0eaf349ea2 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -583,6 +583,14 @@
"created_at": "2025-09-10T14:55:25Z",
"repoId": 987670088,
"pullRequestNo": 1008
+ },
+ {
+ "name": "tauraamui",
+ "id": 3159648,
+ "comment_id": 3279503814,
+ "created_at": "2025-09-11T09:31:52Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1016
}
]
}
\ No newline at end of file
From 5e366e15c2f490d9e412a221ea1d955d87cd5599 Mon Sep 17 00:00:00 2001
From: Vadim Inshakov
Date: Tue, 9 Sep 2025 00:38:18 +0500
Subject: [PATCH 039/236] no need to check cancelled ctx
---
internal/llm/agent/agent.go | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 13b65cccc79ded8f1f7267063898216defb38908..b1d9dd8c98420aa963f6e6fa5df8fbefc5a632b2 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -374,10 +374,7 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
a.activeRequests.Del(sessionID)
cancel()
a.Publish(pubsub.CreatedEvent, result)
- select {
- case events <- result:
- case <-genCtx.Done():
- }
+ events <- result
close(events)
}()
return events, nil
From c4a976e7db184a8e0d12c0bd07a521f64f32e1c6 Mon Sep 17 00:00:00 2001
From: Vadim Inshakov
Date: Tue, 9 Sep 2025 01:03:10 +0500
Subject: [PATCH 040/236] add buffer size
---
internal/llm/agent/agent.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index b1d9dd8c98420aa963f6e6fa5df8fbefc5a632b2..02ff02c2df5c85b688b892971472e22fa4aed0b7 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -343,7 +343,7 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
if !a.Model().SupportsImages && attachments != nil {
attachments = nil
}
- events := make(chan AgentEvent)
+ events := make(chan AgentEvent, 1)
if a.IsSessionBusy(sessionID) {
existing, ok := a.promptQueue.Get(sessionID)
if !ok {
From 49d86a70ae6d9dbc0195bd220b3090b73c3f9b6f Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Thu, 11 Sep 2025 11:53:14 -0300
Subject: [PATCH 041/236] chore(readme): fix build badge url
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index c54727d841b72fdfe7d430afbf4d67761398fcd1..907e182527690bb67ad4ed89b9d9f501d93c16ab 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@

-
+
Your new coding bestie, now available in your favourite terminal.
Your tools, your code, and your workflows, wired into your LLM of choice.
From a4da218e56cfb87695723f72f2cb0eb336919a51 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Thu, 11 Sep 2025 13:51:32 -0300
Subject: [PATCH 042/236] fix: ensure it's possible to quit (`ctrl+c`) even
when a dialog is open (#1007)
---
internal/tui/tui.go | 18 ++++++++++--------
1 file changed, 10 insertions(+), 8 deletions(-)
diff --git a/internal/tui/tui.go b/internal/tui/tui.go
index 1c987722b58b883862b71c582ed3800afc946de7..0986aca31dcd779ca6fe611e1d71eff8ad6908e9 100644
--- a/internal/tui/tui.go
+++ b/internal/tui/tui.go
@@ -406,6 +406,16 @@ func (a *appModel) handleWindowResize(width, height int) tea.Cmd {
// handleKeyPressMsg processes keyboard input and routes to appropriate handlers.
func (a *appModel) handleKeyPressMsg(msg tea.KeyPressMsg) tea.Cmd {
+ // Check this first as the user should be able to quit no matter what.
+ if key.Matches(msg, a.keyMap.Quit) {
+ if a.dialog.ActiveDialogID() == quit.QuitDialogID {
+ return tea.Quit
+ }
+ return util.CmdHandler(dialogs.OpenDialogMsg{
+ Model: quit.NewQuitDialog(),
+ })
+ }
+
if a.completions.Open() {
// completions
keyMap := a.completions.KeyMap()
@@ -430,14 +440,6 @@ func (a *appModel) handleKeyPressMsg(msg tea.KeyPressMsg) tea.Cmd {
a.showingFullHelp = !a.showingFullHelp
return a.handleWindowResize(a.wWidth, a.wHeight)
// dialogs
- case key.Matches(msg, a.keyMap.Quit):
- if a.dialog.ActiveDialogID() == quit.QuitDialogID {
- return tea.Quit
- }
- return util.CmdHandler(dialogs.OpenDialogMsg{
- Model: quit.NewQuitDialog(),
- })
-
case key.Matches(msg, a.keyMap.Commands):
// if the app is not configured show no commands
if !a.isConfigured {
From 1f255fcf4af3ca2ec8331d578e0a68a13365f4be Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Thu, 11 Sep 2025 14:38:06 -0300
Subject: [PATCH 043/236] chore(legal): @kim0 has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 42913f1e28960628c7baf72d7800ac0eaf349ea2..d893fbfe2cd3f16295b636031d97c3cd5a163c51 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -591,6 +591,14 @@
"created_at": "2025-09-11T09:31:52Z",
"repoId": 987670088,
"pullRequestNo": 1016
+ },
+ {
+ "name": "kim0",
+ "id": 59667,
+ "comment_id": 3282025022,
+ "created_at": "2025-09-11T17:37:57Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1017
}
]
}
\ No newline at end of file
From 6da6c7d1cf173cfd1a7c020175bd49b0615b4b6c Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Tue, 9 Sep 2025 13:01:29 +0200
Subject: [PATCH 045/236] fix: allow searching by provider name
---
internal/tui/exp/list/filterable.go | 8 +--
internal/tui/exp/list/filterable_group.go | 65 ++++++++++++++++-------
2 files changed, 48 insertions(+), 25 deletions(-)
diff --git a/internal/tui/exp/list/filterable.go b/internal/tui/exp/list/filterable.go
index 2566ccb947e3826598988f1cc7ae297452e75bb1..e639786db5777aaeda237e959dffe36d9c6a7583 100644
--- a/internal/tui/exp/list/filterable.go
+++ b/internal/tui/exp/list/filterable.go
@@ -15,12 +15,8 @@ import (
"github.com/sahilm/fuzzy"
)
-var (
- // Pre-compiled regex for checking if a string contains alphabetic characters.
- alphaRegex = regexp.MustCompile(`[a-zA-Z]`)
- // Pre-compiled regex for checking if a string is alphanumeric.
- alphanumericRegex = regexp.MustCompile(`^[a-zA-Z0-9]*$`)
-)
+// Pre-compiled regex for checking if a string is alphanumeric.
+var alphanumericRegex = regexp.MustCompile(`^[a-zA-Z0-9]*$`)
type FilterableItem interface {
Item
diff --git a/internal/tui/exp/list/filterable_group.go b/internal/tui/exp/list/filterable_group.go
index 9f0efc49162004cebfbc57c2c5ce96319895574e..0e960fc9088be19f821e345a28cd03802573611b 100644
--- a/internal/tui/exp/list/filterable_group.go
+++ b/internal/tui/exp/list/filterable_group.go
@@ -198,30 +198,57 @@ func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
var newGroups []Group[T]
for _, g := range f.groups {
- words := make([]string, len(g.Items))
- for i, item := range g.Items {
- words[i] = strings.ToLower(item.FilterValue())
+ // Check if group name matches the query
+ // Extract the group name from the section - we'll use the section's view content
+ // as a fallback since ItemSection doesn't implement FilterableItem
+ var groupName string
+ if section, ok := g.Section.(*itemSectionModel); ok {
+ groupName = strings.ToLower(section.title)
+ } else {
+ // Fallback to using the section's ID or view content
+ groupName = strings.ToLower(g.Section.ID())
}
-
- matches := fuzzy.Find(query, words)
-
- sort.SliceStable(matches, func(i, j int) bool {
- return matches[i].Score > matches[j].Score
- })
-
- var matchedItems []T
- for _, match := range matches {
- item := g.Items[match.Index]
- if i, ok := any(item).(HasMatchIndexes); ok {
- i.MatchIndexes(match.MatchedIndexes)
+ groupMatches := fuzzy.Find(query, []string{groupName})
+
+ if len(groupMatches) > 0 && groupMatches[0].Score > 0 {
+ // If group name matches, include all items from this group
+ // Clear any existing match indexes for items since the group matched
+ for _, item := range g.Items {
+ if i, ok := any(item).(HasMatchIndexes); ok {
+ i.MatchIndexes(make([]int, 0))
+ }
}
- matchedItems = append(matchedItems, item)
- }
- if len(matchedItems) > 0 {
newGroups = append(newGroups, Group[T]{
Section: g.Section,
- Items: matchedItems,
+ Items: g.Items,
})
+ } else {
+ // Group name doesn't match, check individual items
+ words := make([]string, len(g.Items))
+ for i, item := range g.Items {
+ words[i] = strings.ToLower(item.FilterValue())
+ }
+
+ matches := fuzzy.Find(query, words)
+
+ sort.SliceStable(matches, func(i, j int) bool {
+ return matches[i].Score > matches[j].Score
+ })
+
+ var matchedItems []T
+ for _, match := range matches {
+ item := g.Items[match.Index]
+ if i, ok := any(item).(HasMatchIndexes); ok {
+ i.MatchIndexes(match.MatchedIndexes)
+ }
+ matchedItems = append(matchedItems, item)
+ }
+ if len(matchedItems) > 0 {
+ newGroups = append(newGroups, Group[T]{
+ Section: g.Section,
+ Items: matchedItems,
+ })
+ }
}
}
cmds = append(cmds, f.groupedList.SetGroups(newGroups))
From 15fe8dfd84c2a6ee398158acbc4af3f7e002c27a Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Wed, 10 Sep 2025 13:14:35 +0200
Subject: [PATCH 046/236] fix: allow multi word search
---
internal/tui/exp/list/filterable_group.go | 159 ++++++++++++++++------
1 file changed, 116 insertions(+), 43 deletions(-)
diff --git a/internal/tui/exp/list/filterable_group.go b/internal/tui/exp/list/filterable_group.go
index 0e960fc9088be19f821e345a28cd03802573611b..10298b92041e6a1cfb3ad1ae4a5ca9f1c38b98d5 100644
--- a/internal/tui/exp/list/filterable_group.go
+++ b/internal/tui/exp/list/filterable_group.go
@@ -180,7 +180,12 @@ func (f *filterableGroupList[T]) inputHeight() int {
return lipgloss.Height(f.inputStyle.Render(f.input.View()))
}
-func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
+type groupMatch[T FilterableItem] struct {
+ group Group[T]
+ score int
+}
+
+func (f *filterableGroupList[T]) clearItemState() []tea.Cmd {
var cmds []tea.Cmd
for _, item := range slices.Collect(f.items.Seq()) {
if i, ok := any(item).(layout.Focusable); ok {
@@ -190,67 +195,135 @@ func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
i.MatchIndexes(make([]int, 0))
}
}
+ return cmds
+}
- f.selectedItem = ""
+func (f *filterableGroupList[T]) getGroupName(g Group[T]) string {
+ if section, ok := g.Section.(*itemSectionModel); ok {
+ return strings.ToLower(section.title)
+ }
+ return strings.ToLower(g.Section.ID())
+}
+
+func (f *filterableGroupList[T]) setMatchIndexes(item T, indexes []int) {
+ if i, ok := any(item).(HasMatchIndexes); ok {
+ i.MatchIndexes(indexes)
+ }
+}
+
+func (f *filterableGroupList[T]) findMatchingGroups(firstWord string) []groupMatch[T] {
+ var matchedGroups []groupMatch[T]
+ for _, g := range f.groups {
+ groupName := f.getGroupName(g)
+ matches := fuzzy.Find(firstWord, []string{groupName})
+ if len(matches) > 0 && matches[0].Score > 0 {
+ matchedGroups = append(matchedGroups, groupMatch[T]{
+ group: g,
+ score: matches[0].Score,
+ })
+ }
+ }
+ // Sort by score (higher scores first - exact matches will have higher scores)
+ sort.SliceStable(matchedGroups, func(i, j int) bool {
+ return matchedGroups[i].score > matchedGroups[j].score
+ })
+ return matchedGroups
+}
+
+func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string) []T {
if query == "" {
- return f.groupedList.SetGroups(f.groups)
+ // No query, return all items with cleared match indexes
+ var items []T
+ for _, item := range group.Items {
+ f.setMatchIndexes(item, make([]int, 0))
+ items = append(items, item)
+ }
+ return items
}
+ // Build search words
+ words := make([]string, len(group.Items))
+ for i, item := range group.Items {
+ words[i] = strings.ToLower(item.FilterValue())
+ }
+
+ // Perform fuzzy search
+ matches := fuzzy.Find(query, words)
+ sort.SliceStable(matches, func(i, j int) bool {
+ return matches[i].Score > matches[j].Score
+ })
+
+ if len(matches) > 0 {
+ // Found matches, return only those with highlights
+ var matchedItems []T
+ for _, match := range matches {
+ item := group.Items[match.Index]
+ f.setMatchIndexes(item, match.MatchedIndexes)
+ matchedItems = append(matchedItems, item)
+ }
+ return matchedItems
+ }
+
+ // No matches, return all items without highlights
+ var allItems []T
+ for _, item := range group.Items {
+ f.setMatchIndexes(item, make([]int, 0))
+ allItems = append(allItems, item)
+ }
+ return allItems
+}
+
+func (f *filterableGroupList[T]) searchAllGroups(query string) []Group[T] {
var newGroups []Group[T]
for _, g := range f.groups {
- // Check if group name matches the query
- // Extract the group name from the section - we'll use the section's view content
- // as a fallback since ItemSection doesn't implement FilterableItem
- var groupName string
- if section, ok := g.Section.(*itemSectionModel); ok {
- groupName = strings.ToLower(section.title)
- } else {
- // Fallback to using the section's ID or view content
- groupName = strings.ToLower(g.Section.ID())
- }
- groupMatches := fuzzy.Find(query, []string{groupName})
-
- if len(groupMatches) > 0 && groupMatches[0].Score > 0 {
- // If group name matches, include all items from this group
- // Clear any existing match indexes for items since the group matched
- for _, item := range g.Items {
- if i, ok := any(item).(HasMatchIndexes); ok {
- i.MatchIndexes(make([]int, 0))
- }
- }
+ matchedItems := f.filterItemsInGroup(g, query)
+ if len(matchedItems) > 0 {
newGroups = append(newGroups, Group[T]{
Section: g.Section,
- Items: g.Items,
+ Items: matchedItems,
})
- } else {
- // Group name doesn't match, check individual items
- words := make([]string, len(g.Items))
- for i, item := range g.Items {
- words[i] = strings.ToLower(item.FilterValue())
- }
+ }
+ }
+ return newGroups
+}
- matches := fuzzy.Find(query, words)
+func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
+ cmds := f.clearItemState()
+ f.selectedItem = ""
- sort.SliceStable(matches, func(i, j int) bool {
- return matches[i].Score > matches[j].Score
- })
+ if query == "" {
+ return f.groupedList.SetGroups(f.groups)
+ }
- var matchedItems []T
- for _, match := range matches {
- item := g.Items[match.Index]
- if i, ok := any(item).(HasMatchIndexes); ok {
- i.MatchIndexes(match.MatchedIndexes)
- }
- matchedItems = append(matchedItems, item)
- }
+ lowerQuery := strings.ToLower(query)
+ queryWords := strings.Fields(lowerQuery)
+ firstWord := queryWords[0]
+
+ // Find groups that match the first word
+ matchedGroups := f.findMatchingGroups(firstWord)
+
+ var newGroups []Group[T]
+ if len(matchedGroups) > 0 {
+ // Filter within matched groups using remaining words
+ remainingQuery := ""
+ if len(queryWords) > 1 {
+ remainingQuery = strings.Join(queryWords[1:], " ")
+ }
+
+ for _, matchedGroup := range matchedGroups {
+ matchedItems := f.filterItemsInGroup(matchedGroup.group, remainingQuery)
if len(matchedItems) > 0 {
newGroups = append(newGroups, Group[T]{
- Section: g.Section,
+ Section: matchedGroup.group.Section,
Items: matchedItems,
})
}
}
+ } else {
+ // No group matches, search all groups
+ newGroups = f.searchAllGroups(lowerQuery)
}
+
cmds = append(cmds, f.groupedList.SetGroups(newGroups))
return tea.Batch(cmds...)
}
From fb5e0584a75a443ee7f76a52281f7e72a353e5d3 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Thu, 11 Sep 2025 12:21:22 +0200
Subject: [PATCH 047/236] fix: fix the group search
---
internal/tui/exp/list/filterable_group.go | 23 ++++++++++++++++-------
internal/tui/exp/list/items.go | 4 +++-
2 files changed, 19 insertions(+), 8 deletions(-)
diff --git a/internal/tui/exp/list/filterable_group.go b/internal/tui/exp/list/filterable_group.go
index 10298b92041e6a1cfb3ad1ae4a5ca9f1c38b98d5..57aa3da3b19420a5635e35419d2865ea033eb27f 100644
--- a/internal/tui/exp/list/filterable_group.go
+++ b/internal/tui/exp/list/filterable_group.go
@@ -264,13 +264,7 @@ func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string
return matchedItems
}
- // No matches, return all items without highlights
- var allItems []T
- for _, item := range group.Items {
- f.setMatchIndexes(item, make([]int, 0))
- allItems = append(allItems, item)
- }
- return allItems
+ return []T{}
}
func (f *filterableGroupList[T]) searchAllGroups(query string) []Group[T] {
@@ -319,6 +313,21 @@ func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
})
}
}
+
+ // add any matching items from other groups
+ allGroups := f.searchAllGroups(lowerQuery)
+ for _, g := range allGroups {
+ exists := false
+ for _, existing := range newGroups {
+ if existing.Section.ID() == g.Section.ID() {
+ exists = true
+ break
+ }
+ }
+ if !exists {
+ newGroups = append(newGroups, g)
+ }
+ }
} else {
// No group matches, search all groups
newGroups = f.searchAllGroups(lowerQuery)
diff --git a/internal/tui/exp/list/items.go b/internal/tui/exp/list/items.go
index b55c1dd723a245aa47ffdeb6a9ffb72fd9d8e27b..9e7259dc10a61c95e970d9f1fc93b0d61d7a65a8 100644
--- a/internal/tui/exp/list/items.go
+++ b/internal/tui/exp/list/items.go
@@ -327,18 +327,20 @@ type itemSectionModel struct {
width int
title string
inx int
+ id string
info string
}
// ID implements ItemSection.
func (m *itemSectionModel) ID() string {
- return uuid.NewString()
+ return m.id
}
func NewItemSection(title string) ItemSection {
return &itemSectionModel{
title: title,
inx: -1,
+ id: uuid.NewString(),
}
}
From ad50cf423769b7738d53d1cf057f6999f6f64bfb Mon Sep 17 00:00:00 2001
From: Adam Stringer
Date: Fri, 12 Sep 2025 10:36:14 +0100
Subject: [PATCH 048/236] feat(config): define disabled tools option which
filters out agent tools access (#1016)
* test(config): ensure all tools are included if no disabled defined
* test(config): verify explicitly disabled tools are gone
* refactor(config): share manual slice iteration and selection logic
* refactor(config): move static slice of readonly tools into helper
* test(config): ensure allowed tools for task is at most in the read only
---
internal/config/config.go | 55 ++++++++++++++++++++++++++++-----
internal/config/load_test.go | 39 +++++++++++++++++++++++
internal/lsp/protocol/tsjson.go | 9 +++---
3 files changed, 91 insertions(+), 12 deletions(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index c126e7ff11a1f63e9c1ace21984888e76af71479..4e42a56e361c81feca31cd95bd778d14c312cd20 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -143,6 +143,7 @@ type Options struct {
DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
+ DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
}
type MCPs map[string]MCPConfig
@@ -415,7 +416,51 @@ func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
return nil
}
+func allToolNames() []string {
+ return []string{
+ "bash",
+ "download",
+ "edit",
+ "multiedit",
+ "fetch",
+ "glob",
+ "grep",
+ "ls",
+ "sourcegraph",
+ "view",
+ "write",
+ }
+}
+
+func resolveAllowedTools(allTools []string, disabledTools []string) []string {
+ if disabledTools == nil {
+ return allTools
+ }
+ // filter out disabled tools (exclude mode)
+ return filterSlice(allTools, disabledTools, false)
+}
+
+func resolveReadOnlyTools(tools []string) []string {
+ readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
+ // filter to only include tools that are in allowedtools (include mode)
+ return filterSlice(tools, readOnlyTools, true)
+}
+
+func filterSlice(data []string, mask []string, include bool) []string {
+ filtered := []string{}
+ for _, s := range data {
+ // if include is true, we include items that ARE in the mask
+ // if include is false, we include items that are NOT in the mask
+ if include == slices.Contains(mask, s) {
+ filtered = append(filtered, s)
+ }
+ }
+ return filtered
+}
+
func (c *Config) SetupAgents() {
+ allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
+
agents := map[string]Agent{
"coder": {
ID: "coder",
@@ -423,7 +468,7 @@ func (c *Config) SetupAgents() {
Description: "An agent that helps with executing coding tasks.",
Model: SelectedModelTypeLarge,
ContextPaths: c.Options.ContextPaths,
- // All tools allowed
+ AllowedTools: allowedTools,
},
"task": {
ID: "task",
@@ -431,13 +476,7 @@ func (c *Config) SetupAgents() {
Description: "An agent that helps with searching for context and finding implementation details.",
Model: SelectedModelTypeLarge,
ContextPaths: c.Options.ContextPaths,
- AllowedTools: []string{
- "glob",
- "grep",
- "ls",
- "sourcegraph",
- "view",
- },
+ AllowedTools: resolveReadOnlyTools(allowedTools),
// NO MCPs or LSPs by default
AllowedMCP: map[string][]string{},
AllowedLSP: []string{},
diff --git a/internal/config/load_test.go b/internal/config/load_test.go
index a83ab2b94fa29ade149b968c700f22b34b4e86fd..e0ce94f3995fb64cc8f66348723a4e6c62a0ea2b 100644
--- a/internal/config/load_test.go
+++ b/internal/config/load_test.go
@@ -11,6 +11,7 @@ import (
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/env"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -453,6 +454,44 @@ func TestConfig_IsConfigured(t *testing.T) {
})
}
+func TestConfig_setupAgentsWithNoDisabledTools(t *testing.T) {
+ cfg := &Config{
+ Options: &Options{
+ DisabledTools: []string{},
+ },
+ }
+
+ cfg.SetupAgents()
+ coderAgent, ok := cfg.Agents["coder"]
+ require.True(t, ok)
+ assert.Equal(t, allToolNames(), coderAgent.AllowedTools)
+
+ taskAgent, ok := cfg.Agents["task"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"glob", "grep", "ls", "sourcegraph", "view"}, taskAgent.AllowedTools)
+}
+
+func TestConfig_setupAgentsWithDisabledTools(t *testing.T) {
+ cfg := &Config{
+ Options: &Options{
+ DisabledTools: []string{
+ "edit",
+ "download",
+ "grep",
+ },
+ },
+ }
+
+ cfg.SetupAgents()
+ coderAgent, ok := cfg.Agents["coder"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"bash", "multiedit", "fetch", "glob", "ls", "sourcegraph", "view", "write"}, coderAgent.AllowedTools)
+
+ taskAgent, ok := cfg.Agents["task"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"glob", "ls", "sourcegraph", "view"}, taskAgent.AllowedTools)
+}
+
func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
knownProviders := []catwalk.Provider{
{
diff --git a/internal/lsp/protocol/tsjson.go b/internal/lsp/protocol/tsjson.go
index 24eb515c0482f6259a1ebdfb997e26877f1b9dde..3cf7275245a5dc532c52e03024652fceda6e713a 100644
--- a/internal/lsp/protocol/tsjson.go
+++ b/internal/lsp/protocol/tsjson.go
@@ -10,10 +10,11 @@ package protocol
// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.9/protocol/metaModel.json
// LSP metaData.version = 3.17.0.
-import "bytes"
-import "encoding/json"
-
-import "fmt"
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+)
// UnmarshalError indicates that a JSON value did not conform to
// one of the expected cases of an LSP union type.
From 08a039dc342991f6549fa4b517f06012bb7c6aa1 Mon Sep 17 00:00:00 2001
From: actions-user
Date: Fri, 12 Sep 2025 09:36:40 +0000
Subject: [PATCH 049/236] chore: auto-update generated files
---
schema.json | 12 +++++++++++-
1 file changed, 11 insertions(+), 1 deletion(-)
diff --git a/schema.json b/schema.json
index 6762b3c5b77ec8ef701b8b7add2a4826e9e9823c..060f9738884da739a186898d859ac5618c35b5b8 100644
--- a/schema.json
+++ b/schema.json
@@ -271,10 +271,20 @@
"examples": [
".crush"
]
+ },
+ "disabled_tools": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Tools to disable"
}
},
"additionalProperties": false,
- "type": "object"
+ "type": "object",
+ "required": [
+ "disabled_tools"
+ ]
},
"Permissions": {
"properties": {
From 861db845ebe9491ea66b08047ca95470dcb4e078 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 12 Sep 2025 10:53:41 -0300
Subject: [PATCH 050/236] fix: improve group filtering (#1024)
---
internal/tui/exp/list/filterable_group.go | 112 ++++++----------------
1 file changed, 27 insertions(+), 85 deletions(-)
diff --git a/internal/tui/exp/list/filterable_group.go b/internal/tui/exp/list/filterable_group.go
index 57aa3da3b19420a5635e35419d2865ea033eb27f..6e9a5dc7eaad66d32ec34baf7e41d35ab3233048 100644
--- a/internal/tui/exp/list/filterable_group.go
+++ b/internal/tui/exp/list/filterable_group.go
@@ -180,11 +180,6 @@ func (f *filterableGroupList[T]) inputHeight() int {
return lipgloss.Height(f.inputStyle.Render(f.input.View()))
}
-type groupMatch[T FilterableItem] struct {
- group Group[T]
- score int
-}
-
func (f *filterableGroupList[T]) clearItemState() []tea.Cmd {
var cmds []tea.Cmd
for _, item := range slices.Collect(f.items.Seq()) {
@@ -211,25 +206,6 @@ func (f *filterableGroupList[T]) setMatchIndexes(item T, indexes []int) {
}
}
-func (f *filterableGroupList[T]) findMatchingGroups(firstWord string) []groupMatch[T] {
- var matchedGroups []groupMatch[T]
- for _, g := range f.groups {
- groupName := f.getGroupName(g)
- matches := fuzzy.Find(firstWord, []string{groupName})
- if len(matches) > 0 && matches[0].Score > 0 {
- matchedGroups = append(matchedGroups, groupMatch[T]{
- group: g,
- score: matches[0].Score,
- })
- }
- }
- // Sort by score (higher scores first - exact matches will have higher scores)
- sort.SliceStable(matchedGroups, func(i, j int) bool {
- return matchedGroups[i].score > matchedGroups[j].score
- })
- return matchedGroups
-}
-
func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string) []T {
if query == "" {
// No query, return all items with cleared match indexes
@@ -241,24 +217,31 @@ func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string
return items
}
- // Build search words
- words := make([]string, len(group.Items))
+ name := f.getGroupName(group) + " "
+
+ names := make([]string, len(group.Items))
for i, item := range group.Items {
- words[i] = strings.ToLower(item.FilterValue())
+ names[i] = strings.ToLower(name + item.FilterValue())
}
- // Perform fuzzy search
- matches := fuzzy.Find(query, words)
+ matches := fuzzy.Find(query, names)
sort.SliceStable(matches, func(i, j int) bool {
return matches[i].Score > matches[j].Score
})
if len(matches) > 0 {
- // Found matches, return only those with highlights
var matchedItems []T
for _, match := range matches {
item := group.Items[match.Index]
- f.setMatchIndexes(item, match.MatchedIndexes)
+ var idxs []int
+ for _, idx := range match.MatchedIndexes {
+ // adjusts removing group name highlights
+ if idx < len(name) {
+ continue
+ }
+ idxs = append(idxs, idx-len(name))
+ }
+ f.setMatchIndexes(item, idxs)
matchedItems = append(matchedItems, item)
}
return matchedItems
@@ -267,20 +250,6 @@ func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string
return []T{}
}
-func (f *filterableGroupList[T]) searchAllGroups(query string) []Group[T] {
- var newGroups []Group[T]
- for _, g := range f.groups {
- matchedItems := f.filterItemsInGroup(g, query)
- if len(matchedItems) > 0 {
- newGroups = append(newGroups, Group[T]{
- Section: g.Section,
- Items: matchedItems,
- })
- }
- }
- return newGroups
-}
-
func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
cmds := f.clearItemState()
f.selectedItem = ""
@@ -289,51 +258,24 @@ func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
return f.groupedList.SetGroups(f.groups)
}
- lowerQuery := strings.ToLower(query)
- queryWords := strings.Fields(lowerQuery)
- firstWord := queryWords[0]
+ query = strings.ToLower(strings.ReplaceAll(query, " ", ""))
- // Find groups that match the first word
- matchedGroups := f.findMatchingGroups(firstWord)
-
- var newGroups []Group[T]
- if len(matchedGroups) > 0 {
- // Filter within matched groups using remaining words
- remainingQuery := ""
- if len(queryWords) > 1 {
- remainingQuery = strings.Join(queryWords[1:], " ")
- }
-
- for _, matchedGroup := range matchedGroups {
- matchedItems := f.filterItemsInGroup(matchedGroup.group, remainingQuery)
- if len(matchedItems) > 0 {
- newGroups = append(newGroups, Group[T]{
- Section: matchedGroup.group.Section,
- Items: matchedItems,
- })
- }
+ var result []Group[T]
+ for _, g := range f.groups {
+ if matches := fuzzy.Find(query, []string{f.getGroupName(g)}); len(matches) > 0 && matches[0].Score > 0 {
+ result = append(result, g)
+ continue
}
-
- // add any matching items from other groups
- allGroups := f.searchAllGroups(lowerQuery)
- for _, g := range allGroups {
- exists := false
- for _, existing := range newGroups {
- if existing.Section.ID() == g.Section.ID() {
- exists = true
- break
- }
- }
- if !exists {
- newGroups = append(newGroups, g)
- }
+ matchedItems := f.filterItemsInGroup(g, query)
+ if len(matchedItems) > 0 {
+ result = append(result, Group[T]{
+ Section: g.Section,
+ Items: matchedItems,
+ })
}
- } else {
- // No group matches, search all groups
- newGroups = f.searchAllGroups(lowerQuery)
}
- cmds = append(cmds, f.groupedList.SetGroups(newGroups))
+ cmds = append(cmds, f.groupedList.SetGroups(result))
return tea.Batch(cmds...)
}
From 6766386feddc6ce363ae51f22d1f2ba37c15c5bb Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Wed, 10 Sep 2025 18:46:40 +0200
Subject: [PATCH 051/236] fix: handle providers that do not send unique tool
call IDs
---
internal/llm/provider/openai.go | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index eb5a84867aecf0a76b30a7c022ccb14bf6a2139a..1a780a74f79d0fe3c41db06145b8a9b79c72802c 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -340,6 +340,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
toolCalls := make([]message.ToolCall, 0)
msgToolCalls := make(map[int64]openai.ChatCompletionMessageToolCall)
toolMap := make(map[string]openai.ChatCompletionMessageToolCall)
+ toolCallIDMap := make(map[string]string)
for openaiStream.Next() {
chunk := openaiStream.Current()
// Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index
@@ -367,6 +368,15 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
currentContent += choice.Delta.Content
} else if len(choice.Delta.ToolCalls) > 0 {
toolCall := choice.Delta.ToolCalls[0]
+ if strings.HasPrefix(toolCall.ID, "functions.") {
+ exID, ok := toolCallIDMap[toolCall.ID]
+ if !ok {
+ newID := uuid.NewString()
+ toolCallIDMap[toolCall.ID] = newID
+ toolCall.ID = newID
+ }
+ toolCall.ID = exID
+ }
newToolCall := false
if existingToolCall, ok := msgToolCalls[toolCall.Index]; ok { // tool call exists
if toolCall.ID != "" && toolCall.ID != existingToolCall.ID {
From 49a1e214596d7ba7b209be16ba707f2cf315e3af Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Wed, 10 Sep 2025 19:19:06 +0200
Subject: [PATCH 052/236] fix: add else
---
internal/llm/provider/openai.go | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index 1a780a74f79d0fe3c41db06145b8a9b79c72802c..bfdeda2bc1cd0738b3370a2132d4945ad64d9fb5 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -374,8 +374,9 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
newID := uuid.NewString()
toolCallIDMap[toolCall.ID] = newID
toolCall.ID = newID
+ } else {
+ toolCall.ID = exID
}
- toolCall.ID = exID
}
newToolCall := false
if existingToolCall, ok := msgToolCalls[toolCall.Index]; ok { // tool call exists
From 8e0d9b5b9aaed5841d44457858716de08405917e Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Thu, 11 Sep 2025 12:53:26 +0200
Subject: [PATCH 053/236] fix: agent tool not working when switching models
---
internal/app/app.go | 1 -
internal/llm/agent/agent.go | 52 +++++++++++++++++++++------------
internal/llm/agent/mcp-tools.go | 4 +--
3 files changed, 36 insertions(+), 21 deletions(-)
diff --git a/internal/app/app.go b/internal/app/app.go
index 21ddcd25eff1c9aeebb9d6700f9340ab0932e7ab..39e94feecb1f738fc46ff3012edac415c3759266 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -278,7 +278,6 @@ func (app *App) InitCoderAgent() error {
}
var err error
app.CoderAgent, err = agent.NewAgent(
- app.globalCtx,
coderAgentCfg,
app.Permissions,
app.Sessions,
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 02ff02c2df5c85b688b892971472e22fa4aed0b7..cda31cfd5c747a620ce174f6e62a02a01ea3feb5 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -72,6 +72,8 @@ type agent struct {
mcpTools []McpTool
tools *csync.LazySlice[tools.BaseTool]
+ // We need this to be able to update it when model changes
+ agentToolFn func() (tools.BaseTool, error)
provider provider.Provider
providerID string
@@ -91,7 +93,6 @@ var agentPromptMap = map[string]prompt.PromptID{
}
func NewAgent(
- ctx context.Context,
agentCfg config.Agent,
// These services are needed in the tools
permissions permission.Service,
@@ -102,18 +103,19 @@ func NewAgent(
) (Service, error) {
cfg := config.Get()
- var agentTool tools.BaseTool
+ var agentToolFn func() (tools.BaseTool, error)
if agentCfg.ID == "coder" {
- taskAgentCfg := config.Get().Agents["task"]
- if taskAgentCfg.ID == "" {
- return nil, fmt.Errorf("task agent not found in config")
- }
- taskAgent, err := NewAgent(ctx, taskAgentCfg, permissions, sessions, messages, history, lspClients)
- if err != nil {
- return nil, fmt.Errorf("failed to create task agent: %w", err)
+ agentToolFn = func() (tools.BaseTool, error) {
+ taskAgentCfg := config.Get().Agents["task"]
+ if taskAgentCfg.ID == "" {
+ return nil, fmt.Errorf("task agent not found in config")
+ }
+ taskAgent, err := NewAgent(taskAgentCfg, permissions, sessions, messages, history, lspClients)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create task agent: %w", err)
+ }
+ return NewAgentTool(taskAgent, sessions, messages), nil
}
-
- agentTool = NewAgentTool(taskAgent, sessions, messages)
}
providerCfg := config.Get().GetProviderForModel(agentCfg.Model)
@@ -195,7 +197,7 @@ func NewAgent(
}
mcpToolsOnce.Do(func() {
- mcpTools = doGetMCPTools(ctx, permissions, cfg)
+ mcpTools = doGetMCPTools(permissions, cfg)
})
allTools = append(allTools, mcpTools...)
@@ -203,10 +205,6 @@ func NewAgent(
allTools = append(allTools, tools.NewDiagnosticsTool(lspClients))
}
- if agentTool != nil {
- allTools = append(allTools, agentTool)
- }
-
if agentCfg.AllowedTools == nil {
return allTools
}
@@ -230,6 +228,7 @@ func NewAgent(
titleProvider: titleProvider,
summarizeProvider: summarizeProvider,
summarizeProviderID: string(providerCfg.ID),
+ agentToolFn: agentToolFn,
activeRequests: csync.NewMap[string, context.CancelFunc](),
tools: csync.NewLazySlice(toolFn),
promptQueue: csync.NewMap[string, []string](),
@@ -500,6 +499,18 @@ func (a *agent) createUserMessage(ctx context.Context, sessionID, content string
})
}
+func (a *agent) getAllTools() ([]tools.BaseTool, error) {
+ allTools := slices.Collect(a.tools.Seq())
+ if a.agentToolFn != nil {
+ agentTool, agentToolErr := a.agentToolFn()
+ if agentToolErr != nil {
+ return nil, agentToolErr
+ }
+ allTools = append(allTools, agentTool)
+ }
+ return allTools, nil
+}
+
func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msgHistory []message.Message) (message.Message, *message.Message, error) {
ctx = context.WithValue(ctx, tools.SessionIDContextKey, sessionID)
@@ -514,8 +525,12 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
return assistantMsg, nil, fmt.Errorf("failed to create assistant message: %w", err)
}
+ allTools, toolsErr := a.getAllTools()
+ if toolsErr != nil {
+ return assistantMsg, nil, toolsErr
+ }
// Now collect tools (which may block on MCP initialization)
- eventChan := a.provider.StreamResponse(ctx, msgHistory, slices.Collect(a.tools.Seq()))
+ eventChan := a.provider.StreamResponse(ctx, msgHistory, allTools)
// Add the session and message ID into the context if needed by tools.
ctx = context.WithValue(ctx, tools.MessageIDContextKey, assistantMsg.ID)
@@ -554,7 +569,8 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
default:
// Continue processing
var tool tools.BaseTool
- for availableTool := range a.tools.Seq() {
+ allTools, _ := a.getAllTools()
+ for _, availableTool := range allTools {
if availableTool.Info().Name == toolCall.Name {
tool = availableTool
break
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index bb50231da028e714c783f50cc7ebd8a1f4b595db..d0389ee321b55181c9e38546da8e256422fdc34f 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -275,7 +275,7 @@ var mcpInitRequest = mcp.InitializeRequest{
},
}
-func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) []tools.BaseTool {
+func doGetMCPTools(permissions permission.Service, cfg *config.Config) []tools.BaseTool {
var wg sync.WaitGroup
result := csync.NewSlice[tools.BaseTool]()
@@ -309,7 +309,7 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
}
}()
- ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
+ ctx, cancel := context.WithTimeout(context.Background(), mcpTimeout(m))
defer cancel()
c, err := createAndInitializeClient(ctx, name, m)
if err != nil {
From 2e2b743eefb61ed0011d8a747d2388fc6aab7b29 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 15:49:12 +0200
Subject: [PATCH 054/236] chore: revert ctx change
---
internal/llm/agent/agent.go | 5 +++--
internal/llm/agent/mcp-tools.go | 4 ++--
2 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index cda31cfd5c747a620ce174f6e62a02a01ea3feb5..cb4c515dfc4caf379dfa6e9ee83409af5983b0a4 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -93,6 +93,7 @@ var agentPromptMap = map[string]prompt.PromptID{
}
func NewAgent(
+ ctx context.Context,
agentCfg config.Agent,
// These services are needed in the tools
permissions permission.Service,
@@ -110,7 +111,7 @@ func NewAgent(
if taskAgentCfg.ID == "" {
return nil, fmt.Errorf("task agent not found in config")
}
- taskAgent, err := NewAgent(taskAgentCfg, permissions, sessions, messages, history, lspClients)
+ taskAgent, err := NewAgent(ctx, taskAgentCfg, permissions, sessions, messages, history, lspClients)
if err != nil {
return nil, fmt.Errorf("failed to create task agent: %w", err)
}
@@ -197,7 +198,7 @@ func NewAgent(
}
mcpToolsOnce.Do(func() {
- mcpTools = doGetMCPTools(permissions, cfg)
+ mcpTools = doGetMCPTools(ctx, permissions, cfg)
})
allTools = append(allTools, mcpTools...)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index d0389ee321b55181c9e38546da8e256422fdc34f..bb50231da028e714c783f50cc7ebd8a1f4b595db 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -275,7 +275,7 @@ var mcpInitRequest = mcp.InitializeRequest{
},
}
-func doGetMCPTools(permissions permission.Service, cfg *config.Config) []tools.BaseTool {
+func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) []tools.BaseTool {
var wg sync.WaitGroup
result := csync.NewSlice[tools.BaseTool]()
@@ -309,7 +309,7 @@ func doGetMCPTools(permissions permission.Service, cfg *config.Config) []tools.B
}
}()
- ctx, cancel := context.WithTimeout(context.Background(), mcpTimeout(m))
+ ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
defer cancel()
c, err := createAndInitializeClient(ctx, name, m)
if err != nil {
From 8d46cf0e51ada7596ba81a522c94c3d8fd7ff442 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 15:52:23 +0200
Subject: [PATCH 055/236] fix: fix agent
---
internal/app/app.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/internal/app/app.go b/internal/app/app.go
index 39e94feecb1f738fc46ff3012edac415c3759266..21ddcd25eff1c9aeebb9d6700f9340ab0932e7ab 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -278,6 +278,7 @@ func (app *App) InitCoderAgent() error {
}
var err error
app.CoderAgent, err = agent.NewAgent(
+ app.globalCtx,
coderAgentCfg,
app.Permissions,
app.Sessions,
From bab2bec7315bf48ae9394b1330a786e0a961b38e Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 12:26:08 +0200
Subject: [PATCH 056/236] fix: set reasoning param when selecting model
---
internal/tui/components/dialogs/models/models.go | 12 ++++++++----
1 file changed, 8 insertions(+), 4 deletions(-)
diff --git a/internal/tui/components/dialogs/models/models.go b/internal/tui/components/dialogs/models/models.go
index 3d9443332dad2a0e23f4aedcc9ddc45249914c64..56d9eac17c277e8cbbb7c4349bbf420c56fb8610 100644
--- a/internal/tui/components/dialogs/models/models.go
+++ b/internal/tui/components/dialogs/models/models.go
@@ -170,8 +170,10 @@ func (m *modelDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(ModelSelectedMsg{
Model: config.SelectedModel{
- Model: selectedItem.Model.ID,
- Provider: string(selectedItem.Provider.ID),
+ Model: selectedItem.Model.ID,
+ Provider: string(selectedItem.Provider.ID),
+ ReasoningEffort: selectedItem.Model.DefaultReasoningEffort,
+ MaxTokens: selectedItem.Model.DefaultMaxTokens,
},
ModelType: modelType,
}),
@@ -379,8 +381,10 @@ func (m *modelDialogCmp) saveAPIKeyAndContinue(apiKey string) tea.Cmd {
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(ModelSelectedMsg{
Model: config.SelectedModel{
- Model: selectedModel.Model.ID,
- Provider: string(selectedModel.Provider.ID),
+ Model: selectedModel.Model.ID,
+ Provider: string(selectedModel.Provider.ID),
+ ReasoningEffort: selectedModel.Model.DefaultReasoningEffort,
+ MaxTokens: selectedModel.Model.DefaultMaxTokens,
},
ModelType: m.selectedModelType,
}),
From 9bc8c6b7b2718a6da4858a84a8bf158d24b45d2a Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 12:42:48 +0200
Subject: [PATCH 057/236] chore: do not hide the reasoning content
---
internal/tui/components/chat/messages/messages.go | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/internal/tui/components/chat/messages/messages.go b/internal/tui/components/chat/messages/messages.go
index ec55800aab85a2dbb07153c12300dbad892b3b6a..5cc15d0303fb152f299aef9a2cdc596b9ffb57d4 100644
--- a/internal/tui/components/chat/messages/messages.go
+++ b/internal/tui/components/chat/messages/messages.go
@@ -281,15 +281,14 @@ func (m *messageCmp) renderThinkingContent() string {
if reasoningContent.StartedAt > 0 {
duration := m.message.ThinkingDuration()
if reasoningContent.FinishedAt > 0 {
- if duration.String() == "0s" {
- return ""
- }
m.anim.SetLabel("")
opts := core.StatusOpts{
Title: "Thought for",
Description: duration.String(),
}
- return t.S().Base.PaddingLeft(1).Render(core.Status(opts, m.textWidth()-1))
+ if duration.String() != "0s" {
+ footer = t.S().Base.PaddingLeft(1).Render(core.Status(opts, m.textWidth()-1))
+ }
} else if finishReason != nil && finishReason.Reason == message.FinishReasonCanceled {
footer = t.S().Base.PaddingLeft(1).Render(m.toMarkdown("*Canceled*"))
} else {
From 4561c2a0620ef75832f905400fc83b75528a0e1f Mon Sep 17 00:00:00 2001
From: Ayman Bagabas
Date: Fri, 12 Sep 2025 10:34:12 -0400
Subject: [PATCH 058/236] chore: bump ultraviolet to fix double encoding keys
on windows
---
go.mod | 2 +-
go.sum | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/go.mod b/go.mod
index 6998ed6302a752faaa92d136e54703beb7e6c1b4..c98a8dadf7eea28938015e8e82271527b2c5a5d8 100644
--- a/go.mod
+++ b/go.mod
@@ -74,7 +74,7 @@ require (
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/charmbracelet/colorprofile v0.3.2 // indirect
- github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299
+ github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
github.com/charmbracelet/x/term v0.2.1
diff --git a/go.sum b/go.sum
index 5f04e5fb6161c875a12fdf689cae3dd5ee348b67..24bcee6f9da82ac0e9d380d7b21e048e1efc45b4 100644
--- a/go.sum
+++ b/go.sum
@@ -92,8 +92,8 @@ github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mSIGnGuSdKl9qDSyfbYK50z2wc2gGMggegE=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
-github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299 h1:vpIIy7W1Bv84GUhi3Z5oRyZZRAtdTd9kI3+TnLZrnZE=
-github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
+github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf h1:2fs3BT8BFjpJ4134Tq4VoBm/fE9FB2f2P/FhmzsWelQ=
+github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
From dc5475f2986cb256c98e143e5d798216d5dd1f4f Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Fri, 12 Sep 2025 11:36:26 -0300
Subject: [PATCH 059/236] docs(readme): add bluesky to socials
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 907e182527690bb67ad4ed89b9d9f501d93c16ab..7667ce9145967fd5f5865868b5d6108358327fe4 100644
--- a/README.md
+++ b/README.md
@@ -478,6 +478,7 @@ We’d love to hear your thoughts on this project. Need help? We gotchu. You can
- [Discord][discord]
- [Slack](https://charm.land/slack)
- [The Fediverse](https://mastodon.social/@charmcli)
+- [Bluesky](https://bsky.app/profile/charm.land)
[discord]: https://charm.land/discord
From 3710b3c683f3c2acfde9ca103d517fa7a110a6d2 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Wed, 10 Sep 2025 18:46:40 +0200
Subject: [PATCH 060/236] feat: add reasoning dialog
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
💘 Generated with Crush
Co-Authored-By: Crush
---
.../components/dialogs/commands/commands.go | 66 +++--
.../components/dialogs/reasoning/reasoning.go | 268 ++++++++++++++++++
internal/tui/page/chat/chat.go | 50 ++++
3 files changed, 359 insertions(+), 25 deletions(-)
create mode 100644 internal/tui/components/dialogs/reasoning/reasoning.go
diff --git a/internal/tui/components/dialogs/commands/commands.go b/internal/tui/components/dialogs/commands/commands.go
index 756e687c693da971e9ddd8bb72f08b9fc23eedae..664158fc392a87d8a7725bfa964748f7ef4f8e67 100644
--- a/internal/tui/components/dialogs/commands/commands.go
+++ b/internal/tui/components/dialogs/commands/commands.go
@@ -60,17 +60,18 @@ type commandDialogCmp struct {
}
type (
- SwitchSessionsMsg struct{}
- NewSessionsMsg struct{}
- SwitchModelMsg struct{}
- QuitMsg struct{}
- OpenFilePickerMsg struct{}
- ToggleHelpMsg struct{}
- ToggleCompactModeMsg struct{}
- ToggleThinkingMsg struct{}
- OpenExternalEditorMsg struct{}
- ToggleYoloModeMsg struct{}
- CompactMsg struct {
+ SwitchSessionsMsg struct{}
+ NewSessionsMsg struct{}
+ SwitchModelMsg struct{}
+ QuitMsg struct{}
+ OpenFilePickerMsg struct{}
+ ToggleHelpMsg struct{}
+ ToggleCompactModeMsg struct{}
+ ToggleThinkingMsg struct{}
+ OpenReasoningDialogMsg struct{}
+ OpenExternalEditorMsg struct{}
+ ToggleYoloModeMsg struct{}
+ CompactMsg struct {
SessionID string
}
)
@@ -300,26 +301,41 @@ func (c *commandDialogCmp) defaultCommands() []Command {
})
}
- // Only show thinking toggle for Anthropic models that can reason
+ // Add reasoning toggle for models that support it
cfg := config.Get()
if agentCfg, ok := cfg.Agents["coder"]; ok {
providerCfg := cfg.GetProviderForModel(agentCfg.Model)
model := cfg.GetModelByType(agentCfg.Model)
- if providerCfg != nil && model != nil &&
- providerCfg.Type == catwalk.TypeAnthropic && model.CanReason {
+ if providerCfg != nil && model != nil && model.CanReason {
selectedModel := cfg.Models[agentCfg.Model]
- status := "Enable"
- if selectedModel.Think {
- status = "Disable"
+
+ // Anthropic models: thinking toggle
+ if providerCfg.Type == catwalk.TypeAnthropic {
+ status := "Enable"
+ if selectedModel.Think {
+ status = "Disable"
+ }
+ commands = append(commands, Command{
+ ID: "toggle_thinking",
+ Title: status + " Thinking Mode",
+ Description: "Toggle model thinking for reasoning-capable models",
+ Handler: func(cmd Command) tea.Cmd {
+ return util.CmdHandler(ToggleThinkingMsg{})
+ },
+ })
+ }
+
+ // OpenAI models: reasoning effort dialog
+ if providerCfg.Type == catwalk.TypeOpenAI && model.HasReasoningEffort {
+ commands = append(commands, Command{
+ ID: "select_reasoning_effort",
+ Title: "Select Reasoning Effort",
+ Description: "Choose reasoning effort level (low/medium/high)",
+ Handler: func(cmd Command) tea.Cmd {
+ return util.CmdHandler(OpenReasoningDialogMsg{})
+ },
+ })
}
- commands = append(commands, Command{
- ID: "toggle_thinking",
- Title: status + " Thinking Mode",
- Description: "Toggle model thinking for reasoning-capable models",
- Handler: func(cmd Command) tea.Cmd {
- return util.CmdHandler(ToggleThinkingMsg{})
- },
- })
}
}
// Only show toggle compact mode command if window width is larger than compact breakpoint (90)
diff --git a/internal/tui/components/dialogs/reasoning/reasoning.go b/internal/tui/components/dialogs/reasoning/reasoning.go
new file mode 100644
index 0000000000000000000000000000000000000000..ba49abd8c58a0e7eb84235e7b68f5f5193a96b1b
--- /dev/null
+++ b/internal/tui/components/dialogs/reasoning/reasoning.go
@@ -0,0 +1,268 @@
+package reasoning
+
+import (
+ "github.com/charmbracelet/bubbles/v2/help"
+ "github.com/charmbracelet/bubbles/v2/key"
+ tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/lipgloss/v2"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/tui/components/core"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs"
+ "github.com/charmbracelet/crush/internal/tui/exp/list"
+ "github.com/charmbracelet/crush/internal/tui/styles"
+ "github.com/charmbracelet/crush/internal/tui/util"
+)
+
+const (
+ ReasoningDialogID dialogs.DialogID = "reasoning"
+
+ defaultWidth int = 50
+)
+
+type listModel = list.FilterableList[list.CompletionItem[EffortOption]]
+
+type EffortOption struct {
+ Title string
+ Effort string
+}
+
+type ReasoningDialog interface {
+ dialogs.DialogModel
+}
+
+type reasoningDialogCmp struct {
+ width int
+ wWidth int // Width of the terminal window
+ wHeight int // Height of the terminal window
+
+ effortList listModel
+ keyMap ReasoningDialogKeyMap
+ help help.Model
+}
+
+type ReasoningEffortSelectedMsg struct {
+ Effort string
+}
+
+type ReasoningDialogKeyMap struct {
+ Next key.Binding
+ Previous key.Binding
+ Select key.Binding
+ Close key.Binding
+}
+
+func DefaultReasoningDialogKeyMap() ReasoningDialogKeyMap {
+ return ReasoningDialogKeyMap{
+ Next: key.NewBinding(
+ key.WithKeys("down", "j", "ctrl+n"),
+ key.WithHelp("↓/j/ctrl+n", "next"),
+ ),
+ Previous: key.NewBinding(
+ key.WithKeys("up", "k", "ctrl+p"),
+ key.WithHelp("↑/k/ctrl+p", "previous"),
+ ),
+ Select: key.NewBinding(
+ key.WithKeys("enter"),
+ key.WithHelp("enter", "select"),
+ ),
+ Close: key.NewBinding(
+ key.WithKeys("esc", "ctrl+c"),
+ key.WithHelp("esc/ctrl+c", "close"),
+ ),
+ }
+}
+
+func (k ReasoningDialogKeyMap) ShortHelp() []key.Binding {
+ return []key.Binding{k.Select, k.Close}
+}
+
+func (k ReasoningDialogKeyMap) FullHelp() [][]key.Binding {
+ return [][]key.Binding{
+ {k.Next, k.Previous},
+ {k.Select, k.Close},
+ }
+}
+
+func NewReasoningDialog() ReasoningDialog {
+ keyMap := DefaultReasoningDialogKeyMap()
+ listKeyMap := list.DefaultKeyMap()
+ listKeyMap.Down.SetEnabled(false)
+ listKeyMap.Up.SetEnabled(false)
+ listKeyMap.DownOneItem = keyMap.Next
+ listKeyMap.UpOneItem = keyMap.Previous
+
+ t := styles.CurrentTheme()
+ inputStyle := t.S().Base.PaddingLeft(1).PaddingBottom(1)
+ effortList := list.NewFilterableList(
+ []list.CompletionItem[EffortOption]{},
+ list.WithFilterInputStyle(inputStyle),
+ list.WithFilterListOptions(
+ list.WithKeyMap(listKeyMap),
+ list.WithWrapNavigation(),
+ list.WithResizeByList(),
+ ),
+ )
+ help := help.New()
+ help.Styles = t.S().Help
+
+ return &reasoningDialogCmp{
+ effortList: effortList,
+ width: defaultWidth,
+ keyMap: keyMap,
+ help: help,
+ }
+}
+
+func (r *reasoningDialogCmp) Init() tea.Cmd {
+ return r.populateEffortOptions()
+}
+
+func (r *reasoningDialogCmp) populateEffortOptions() tea.Cmd {
+ cfg := config.Get()
+ if agentCfg, ok := cfg.Agents["coder"]; ok {
+ selectedModel := cfg.Models[agentCfg.Model]
+ model := cfg.GetModelByType(agentCfg.Model)
+
+ // Get current reasoning effort
+ currentEffort := selectedModel.ReasoningEffort
+ if currentEffort == "" && model != nil {
+ currentEffort = model.DefaultReasoningEffort
+ }
+
+ efforts := []EffortOption{
+ {
+ Title: "Low",
+ Effort: "low",
+ },
+ {
+ Title: "Medium",
+ Effort: "medium",
+ },
+ {
+ Title: "High",
+ Effort: "high",
+ },
+ }
+
+ effortItems := []list.CompletionItem[EffortOption]{}
+ selectedID := ""
+ for _, effort := range efforts {
+ opts := []list.CompletionItemOption{
+ list.WithCompletionID(effort.Effort),
+ }
+ if effort.Effort == currentEffort {
+ opts = append(opts, list.WithCompletionShortcut("current"))
+ selectedID = effort.Effort
+ }
+ effortItems = append(effortItems, list.NewCompletionItem(
+ effort.Title,
+ effort,
+ opts...,
+ ))
+ }
+
+ cmd := r.effortList.SetItems(effortItems)
+ // Set the current effort as the selected item
+ if currentEffort != "" && selectedID != "" {
+ return tea.Sequence(cmd, r.effortList.SetSelected(selectedID))
+ }
+ return cmd
+ }
+ return nil
+}
+
+func (r *reasoningDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
+ switch msg := msg.(type) {
+ case tea.WindowSizeMsg:
+ r.wWidth = msg.Width
+ r.wHeight = msg.Height
+ return r, r.effortList.SetSize(r.listWidth(), r.listHeight())
+ case tea.KeyPressMsg:
+ switch {
+ case key.Matches(msg, r.keyMap.Select):
+ selectedItem := r.effortList.SelectedItem()
+ if selectedItem == nil {
+ return r, nil // No item selected, do nothing
+ }
+ effort := (*selectedItem).Value()
+ return r, tea.Sequence(
+ util.CmdHandler(dialogs.CloseDialogMsg{}),
+ func() tea.Msg {
+ return ReasoningEffortSelectedMsg{
+ Effort: effort.Effort,
+ }
+ },
+ )
+ case key.Matches(msg, r.keyMap.Close):
+ return r, util.CmdHandler(dialogs.CloseDialogMsg{})
+ default:
+ u, cmd := r.effortList.Update(msg)
+ r.effortList = u.(listModel)
+ return r, cmd
+ }
+ }
+ return r, nil
+}
+
+func (r *reasoningDialogCmp) View() string {
+ t := styles.CurrentTheme()
+ listView := r.effortList
+
+ header := t.S().Base.Padding(0, 1, 1, 1).Render(core.Title("Select Reasoning Effort", r.width-4))
+ content := lipgloss.JoinVertical(
+ lipgloss.Left,
+ header,
+ listView.View(),
+ "",
+ t.S().Base.Width(r.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(r.help.View(r.keyMap)),
+ )
+ return r.style().Render(content)
+}
+
+func (r *reasoningDialogCmp) Cursor() *tea.Cursor {
+ if cursor, ok := r.effortList.(util.Cursor); ok {
+ cursor := cursor.Cursor()
+ if cursor != nil {
+ cursor = r.moveCursor(cursor)
+ }
+ return cursor
+ }
+ return nil
+}
+
+func (r *reasoningDialogCmp) listWidth() int {
+ return r.width - 2 // 4 for padding
+}
+
+func (r *reasoningDialogCmp) listHeight() int {
+ listHeight := len(r.effortList.Items()) + 2 + 4 // height based on items + 2 for the input + 4 for the sections
+ return min(listHeight, r.wHeight/2)
+}
+
+func (r *reasoningDialogCmp) moveCursor(cursor *tea.Cursor) *tea.Cursor {
+ row, col := r.Position()
+ offset := row + 3
+ cursor.Y += offset
+ cursor.X = cursor.X + col + 2
+ return cursor
+}
+
+func (r *reasoningDialogCmp) style() lipgloss.Style {
+ t := styles.CurrentTheme()
+ return t.S().Base.
+ Width(r.width).
+ Border(lipgloss.RoundedBorder()).
+ BorderForeground(t.BorderFocus)
+}
+
+func (r *reasoningDialogCmp) Position() (int, int) {
+ row := r.wHeight/4 - 2 // just a bit above the center
+ col := r.wWidth / 2
+ col -= r.width / 2
+ return row, col
+}
+
+func (r *reasoningDialogCmp) ID() dialogs.DialogID {
+ return ReasoningDialogID
+}
diff --git a/internal/tui/page/chat/chat.go b/internal/tui/page/chat/chat.go
index c843ce7b8cf3702eac4a9ce1b081204fe73f05c5..88523388e31824a65d7e9922b89a1886a5fbcc0d 100644
--- a/internal/tui/page/chat/chat.go
+++ b/internal/tui/page/chat/chat.go
@@ -9,6 +9,7 @@ import (
"github.com/charmbracelet/bubbles/v2/key"
"github.com/charmbracelet/bubbles/v2/spinner"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/history"
@@ -26,9 +27,11 @@ import (
"github.com/charmbracelet/crush/internal/tui/components/completions"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/core/layout"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/commands"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/filepicker"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/models"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs/reasoning"
"github.com/charmbracelet/crush/internal/tui/page"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
@@ -255,6 +258,10 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return p, tea.Batch(p.SetSize(p.width, p.height), cmd)
case commands.ToggleThinkingMsg:
return p, p.toggleThinking()
+ case commands.OpenReasoningDialogMsg:
+ return p, p.openReasoningDialog()
+ case reasoning.ReasoningEffortSelectedMsg:
+ return p, p.handleReasoningEffortSelected(msg.Effort)
case commands.OpenExternalEditorMsg:
u, cmd := p.editor.Update(msg)
p.editor = u.(editor.Editor)
@@ -549,6 +556,49 @@ func (p *chatPage) toggleThinking() tea.Cmd {
}
}
+func (p *chatPage) openReasoningDialog() tea.Cmd {
+ return func() tea.Msg {
+ cfg := config.Get()
+ agentCfg := cfg.Agents["coder"]
+ model := cfg.GetModelByType(agentCfg.Model)
+ providerCfg := cfg.GetProviderForModel(agentCfg.Model)
+
+ if providerCfg != nil && model != nil &&
+ providerCfg.Type == catwalk.TypeOpenAI && model.HasReasoningEffort {
+ // Return the OpenDialogMsg directly so it bubbles up to the main TUI
+ return dialogs.OpenDialogMsg{
+ Model: reasoning.NewReasoningDialog(),
+ }
+ }
+ return nil
+ }
+}
+
+func (p *chatPage) handleReasoningEffortSelected(effort string) tea.Cmd {
+ return func() tea.Msg {
+ cfg := config.Get()
+ agentCfg := cfg.Agents["coder"]
+ currentModel := cfg.Models[agentCfg.Model]
+
+ // Update the model configuration
+ currentModel.ReasoningEffort = effort
+ cfg.Models[agentCfg.Model] = currentModel
+
+ // Update the agent with the new configuration
+ if err := p.app.UpdateAgentModel(); err != nil {
+ return util.InfoMsg{
+ Type: util.InfoTypeError,
+ Msg: "Failed to update reasoning effort: " + err.Error(),
+ }
+ }
+
+ return util.InfoMsg{
+ Type: util.InfoTypeInfo,
+ Msg: "Reasoning effort set to " + effort,
+ }
+ }
+}
+
func (p *chatPage) setCompactMode(compact bool) {
if p.compact == compact {
return
From 9e42d8c3b72c61ab2ef58b58494057a9ae8078b8 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Fri, 12 Sep 2025 12:00:21 -0300
Subject: [PATCH 061/236] chore(legal): @Amolith has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index d893fbfe2cd3f16295b636031d97c3cd5a163c51..53b95be41fa0ad52087a63aba18bc241cbf20d68 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -599,6 +599,14 @@
"created_at": "2025-09-11T17:37:57Z",
"repoId": 987670088,
"pullRequestNo": 1017
+ },
+ {
+ "name": "Amolith",
+ "id": 29460675,
+ "comment_id": 3285628360,
+ "created_at": "2025-09-12T15:00:12Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1025
}
]
}
\ No newline at end of file
From f186e5876751564617ebaf3facccb0f378d544d3 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 21:21:01 +0200
Subject: [PATCH 062/236] fix: add mcps after the filter
---
internal/llm/agent/agent.go | 20 +++++++++++---------
1 file changed, 11 insertions(+), 9 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index cb4c515dfc4caf379dfa6e9ee83409af5983b0a4..af2bb2f1a95f4bf08e9c6a55429acc02816961da 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -197,15 +197,6 @@ func NewAgent(
tools.NewWriteTool(lspClients, permissions, history, cwd),
}
- mcpToolsOnce.Do(func() {
- mcpTools = doGetMCPTools(ctx, permissions, cfg)
- })
- allTools = append(allTools, mcpTools...)
-
- if len(lspClients) > 0 {
- allTools = append(allTools, tools.NewDiagnosticsTool(lspClients))
- }
-
if agentCfg.AllowedTools == nil {
return allTools
}
@@ -216,6 +207,17 @@ func NewAgent(
filteredTools = append(filteredTools, tool)
}
}
+
+ if agentCfg.ID == "coder" {
+ mcpToolsOnce.Do(func() {
+ mcpTools = doGetMCPTools(ctx, permissions, cfg)
+ })
+ filteredTools = append(filteredTools, mcpTools...)
+ if len(lspClients) > 0 {
+ filteredTools = append(filteredTools, tools.NewDiagnosticsTool(lspClients))
+ }
+
+ }
return filteredTools
}
From 9c3a1b4c2996e10f35071f56ef5056576ba9b458 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 21:23:50 +0200
Subject: [PATCH 063/236] chore: lint
---
internal/llm/agent/agent.go | 1 -
1 file changed, 1 deletion(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index af2bb2f1a95f4bf08e9c6a55429acc02816961da..f3c4e9b1edbe451184c655031ba92e17494b2655 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -216,7 +216,6 @@ func NewAgent(
if len(lspClients) > 0 {
filteredTools = append(filteredTools, tools.NewDiagnosticsTool(lspClients))
}
-
}
return filteredTools
}
From c571aefa310c0a4643d7619ec0a32434ce31df32 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 21:30:46 +0200
Subject: [PATCH 064/236] chore: refactor
---
internal/llm/agent/agent.go | 24 ++++++++++++++++--------
1 file changed, 16 insertions(+), 8 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index f3c4e9b1edbe451184c655031ba92e17494b2655..b86f4dc94436041b76561c97dd52b04610c19bff 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -197,8 +197,22 @@ func NewAgent(
tools.NewWriteTool(lspClients, permissions, history, cwd),
}
+ mcpToolsOnce.Do(func() {
+ mcpTools = doGetMCPTools(ctx, permissions, cfg)
+ })
+
+ withCoderTools := func(t []tools.BaseTool) []tools.BaseTool {
+ if agentCfg.ID == "coder" {
+ t = append(t, mcpTools...)
+ if len(lspClients) > 0 {
+ t = append(t, tools.NewDiagnosticsTool(lspClients))
+ }
+ }
+ return t
+ }
+
if agentCfg.AllowedTools == nil {
- return allTools
+ return withCoderTools(allTools)
}
var filteredTools []tools.BaseTool
@@ -209,15 +223,9 @@ func NewAgent(
}
if agentCfg.ID == "coder" {
- mcpToolsOnce.Do(func() {
- mcpTools = doGetMCPTools(ctx, permissions, cfg)
- })
filteredTools = append(filteredTools, mcpTools...)
- if len(lspClients) > 0 {
- filteredTools = append(filteredTools, tools.NewDiagnosticsTool(lspClients))
- }
}
- return filteredTools
+ return withCoderTools(filteredTools)
}
return &agent{
From c7d33353631a30b88cbef07cd8fb0e4c2f44ed0c Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 12 Sep 2025 21:33:42 +0200
Subject: [PATCH 065/236] chore: remove duplicate tools
---
internal/llm/agent/agent.go | 4 ----
1 file changed, 4 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index b86f4dc94436041b76561c97dd52b04610c19bff..85439c3c0e8cc99ee7c07cfeb669e9402b3acce7 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -221,10 +221,6 @@ func NewAgent(
filteredTools = append(filteredTools, tool)
}
}
-
- if agentCfg.ID == "coder" {
- filteredTools = append(filteredTools, mcpTools...)
- }
return withCoderTools(filteredTools)
}
From 230e4de83c23292ab42598dbffa327405bcb3c42 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 10 Sep 2025 17:44:04 -0300
Subject: [PATCH 066/236] feat: add ability to disable providers auto-update
from catwalk
---
internal/config/config.go | 15 ++-
internal/config/load.go | 15 ++-
internal/config/provider.go | 120 ++++++++++++------
internal/config/provider_empty_test.go | 6 +-
internal/config/provider_test.go | 6 +-
internal/tui/components/chat/splash/splash.go | 3 +-
.../tui/components/dialogs/models/list.go | 5 +-
.../tui/components/dialogs/models/models.go | 3 +-
schema.json | 5 +
9 files changed, 116 insertions(+), 62 deletions(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index 4e42a56e361c81feca31cd95bd778d14c312cd20..17ed626838cb555db163ee6c4db47d9d1be61b2a 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -137,13 +137,14 @@ type Permissions struct {
}
type Options struct {
- ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
- TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
- Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
- DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
- DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
- DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
- DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
+ ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
+ TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
+ Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
+ DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
+ DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
+ DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
+ DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
+ DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
}
type MCPs map[string]MCPConfig
diff --git a/internal/config/load.go b/internal/config/load.go
index a703a049c7697be9209d3994c857ff0548f60b8b..9e1c9d0f7b739d7d6bdd974657b6efb5ea52d2ee 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -10,6 +10,7 @@ import (
"path/filepath"
"runtime"
"slices"
+ "strconv"
"strings"
"github.com/charmbracelet/catwalk/pkg/catwalk"
@@ -66,9 +67,9 @@ func Load(workingDir, dataDir string, debug bool) (*Config, error) {
)
// Load known providers, this loads the config from catwalk
- providers, err := Providers()
- if err != nil || len(providers) == 0 {
- return nil, fmt.Errorf("failed to load providers: %w", err)
+ providers, err := Providers(cfg)
+ if err != nil {
+ return nil, err
}
cfg.knownProviders = providers
@@ -76,7 +77,7 @@ func Load(workingDir, dataDir string, debug bool) (*Config, error) {
// Configure providers
valueResolver := NewShellVariableResolver(env)
cfg.resolver = valueResolver
- if err := cfg.configureProviders(env, valueResolver, providers); err != nil {
+ if err := cfg.configureProviders(env, valueResolver, cfg.knownProviders); err != nil {
return nil, fmt.Errorf("failed to configure providers: %w", err)
}
@@ -85,7 +86,7 @@ func Load(workingDir, dataDir string, debug bool) (*Config, error) {
return cfg, nil
}
- if err := cfg.configureSelectedModels(providers); err != nil {
+ if err := cfg.configureSelectedModels(cfg.knownProviders); err != nil {
return nil, fmt.Errorf("failed to configure selected models: %w", err)
}
cfg.SetupAgents()
@@ -340,6 +341,10 @@ func (c *Config) setDefaults(workingDir, dataDir string) {
c.Options.ContextPaths = append(defaultContextPaths, c.Options.ContextPaths...)
slices.Sort(c.Options.ContextPaths)
c.Options.ContextPaths = slices.Compact(c.Options.ContextPaths)
+
+ if str, ok := os.LookupEnv("CRUSH_DISABLE_PROVIDER_AUTO_UPDATE"); ok {
+ c.Options.DisableProviderAutoUpdate, _ = strconv.ParseBool(str)
+ }
}
var defaultLSPFileTypes = map[string][]string{
diff --git a/internal/config/provider.go b/internal/config/provider.go
index 68ede5095506b21dc4d744e309aaa836917345e5..2248c8949a9880a4f555db8c2c5098742a5772b0 100644
--- a/internal/config/provider.go
+++ b/internal/config/provider.go
@@ -12,6 +12,7 @@ import (
"time"
"github.com/charmbracelet/catwalk/pkg/catwalk"
+ "github.com/charmbracelet/catwalk/pkg/embedded"
"github.com/charmbracelet/crush/internal/home"
)
@@ -22,6 +23,7 @@ type ProviderClient interface {
var (
providerOnce sync.Once
providerList []catwalk.Provider
+ providerErr error
)
// file to cache provider data
@@ -75,55 +77,93 @@ func loadProvidersFromCache(path string) ([]catwalk.Provider, error) {
return providers, nil
}
-func Providers() ([]catwalk.Provider, error) {
- catwalkURL := cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL)
- client := catwalk.NewWithURL(catwalkURL)
- path := providerCacheFileData()
- return loadProvidersOnce(client, path)
-}
-
-func loadProvidersOnce(client ProviderClient, path string) ([]catwalk.Provider, error) {
- var err error
+func Providers(cfg *Config) ([]catwalk.Provider, error) {
providerOnce.Do(func() {
- providerList, err = loadProviders(client, path)
+ catwalkURL := cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL)
+ client := catwalk.NewWithURL(catwalkURL)
+ path := providerCacheFileData()
+
+ autoUpdateDisabled := cfg.Options.DisableProviderAutoUpdate
+ providerList, providerErr = loadProviders(autoUpdateDisabled, client, path)
})
- if err != nil {
- return nil, err
- }
- return providerList, nil
+ return providerList, providerErr
}
-func loadProviders(client ProviderClient, path string) (providerList []catwalk.Provider, err error) {
- // if cache is not stale, load from it
- stale, exists := isCacheStale(path)
- if !stale {
- slog.Info("Using cached provider data", "path", path)
- providerList, err = loadProvidersFromCache(path)
- if len(providerList) > 0 && err == nil {
- go func() {
- slog.Info("Updating provider cache in background", "path", path)
- updated, uerr := client.GetProviders()
- if len(updated) > 0 && uerr == nil {
- _ = saveProvidersInCache(path, updated)
- }
- }()
- return
+func loadProviders(autoUpdateDisabled bool, client ProviderClient, path string) ([]catwalk.Provider, error) {
+ cacheIsStale, cacheExists := isCacheStale(path)
+
+ catwalkGetAndSave := func() ([]catwalk.Provider, error) {
+ providers, err := client.GetProviders()
+ if err != nil {
+ return nil, fmt.Errorf("failed to fetch providers from catwalk: %w", err)
+ }
+ if len(providers) == 0 {
+ return nil, fmt.Errorf("empty providers list from catwalk")
+ }
+ if err := saveProvidersInCache(path, providers); err != nil {
+ return nil, err
}
+ return providers, nil
}
- slog.Info("Getting live provider data", "path", path)
- providerList, err = client.GetProviders()
- if len(providerList) > 0 && err == nil {
- err = saveProvidersInCache(path, providerList)
- return
+ backgroundCacheUpdate := func() {
+ go func() {
+ slog.Info("Updating providers cache in background", "path", path)
+
+ providers, err := client.GetProviders()
+ if err != nil {
+ slog.Error("Failed to fetch providers in background from Catwalk", "error", err)
+ return
+ }
+ if len(providers) == 0 {
+ slog.Error("Empty providers list from Catwalk")
+ return
+ }
+ if err := saveProvidersInCache(path, providers); err != nil {
+ slog.Error("Failed to update providers.json in background", "error", err)
+ }
+ }()
}
- if !exists {
- err = fmt.Errorf("failed to load providers")
- return
+
+ switch {
+ case autoUpdateDisabled:
+ slog.Warn("Providers auto-update is disabled")
+
+ if cacheExists {
+ slog.Warn("Using locally cached providers")
+ return loadProvidersFromCache(path)
+ }
+
+ slog.Warn("Saving embedded providers to cache")
+ providers := embedded.GetAll()
+ if err := saveProvidersInCache(path, providers); err != nil {
+ return nil, err
+ }
+ return providers, nil
+
+ case cacheExists && !cacheIsStale:
+ slog.Info("Recent providers cache is available.", "path", path)
+
+ providers, err := loadProvidersFromCache(path)
+ if err != nil {
+ return nil, err
+ }
+ if len(providers) == 0 {
+ return catwalkGetAndSave()
+ }
+ backgroundCacheUpdate()
+ return providers, nil
+
+ default:
+ slog.Info("Cache is not available or is stale. Fetching providers from Catwalk.", "path", path)
+
+ providers, err := catwalkGetAndSave()
+ if err != nil {
+ catwalkUrl := fmt.Sprintf("%s/providers", cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL))
+ return nil, fmt.Errorf("crush was unable to fetch an updated list of providers from %s. Consider setting CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1 to use embedded version from the time of this Crush release. %w", catwalkUrl, err)
+ }
+ return providers, nil
}
- slog.Info("Loading provider data from cache", "path", path)
- providerList, err = loadProvidersFromCache(path)
- return
}
func isCacheStale(path string) (stale, exists bool) {
diff --git a/internal/config/provider_empty_test.go b/internal/config/provider_empty_test.go
index cb71cabfa5a01cb16b6ef2b6708d1780e31951a9..3cd55ae7921171a580dccc91aa1d22d2f7934271 100644
--- a/internal/config/provider_empty_test.go
+++ b/internal/config/provider_empty_test.go
@@ -19,8 +19,8 @@ func TestProvider_loadProvidersEmptyResult(t *testing.T) {
client := &emptyProviderClient{}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
- require.EqualError(t, err, "failed to load providers")
+ providers, err := loadProviders(false, client, tmpPath)
+ require.Contains(t, err.Error(), "crush was unable to fetch an updated list of providers")
require.Empty(t, providers)
require.Len(t, providers, 0)
@@ -39,7 +39,7 @@ func TestProvider_loadProvidersEmptyCache(t *testing.T) {
require.NoError(t, os.WriteFile(tmpPath, data, 0o644))
// Should refresh and get real providers instead of using empty cache
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
diff --git a/internal/config/provider_test.go b/internal/config/provider_test.go
index ed2568d68a840798872af60c5132707e84a5cbbf..8b499919bca666915a89d38c1e5014a911f4d2d1 100644
--- a/internal/config/provider_test.go
+++ b/internal/config/provider_test.go
@@ -28,7 +28,7 @@ func (m *mockProviderClient) GetProviders() ([]catwalk.Provider, error) {
func TestProvider_loadProvidersNoIssues(t *testing.T) {
client := &mockProviderClient{shouldFail: false}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
@@ -57,7 +57,7 @@ func TestProvider_loadProvidersWithIssues(t *testing.T) {
if err != nil {
t.Fatalf("Failed to write old providers to file: %v", err)
}
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
@@ -67,7 +67,7 @@ func TestProvider_loadProvidersWithIssues(t *testing.T) {
func TestProvider_loadProvidersWithIssuesAndNoCache(t *testing.T) {
client := &mockProviderClient{shouldFail: true}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.Error(t, err)
require.Nil(t, providers, "Expected nil providers when loading fails and no cache exists")
}
diff --git a/internal/tui/components/chat/splash/splash.go b/internal/tui/components/chat/splash/splash.go
index 7fa46cdd279a2cbe98a86654a23e81a49bc8aebf..b49bd862876f6b3eb880bfe732b956026421aabe 100644
--- a/internal/tui/components/chat/splash/splash.go
+++ b/internal/tui/components/chat/splash/splash.go
@@ -397,7 +397,8 @@ func (s *splashCmp) setPreferredModel(selectedItem models.ModelOption) tea.Cmd {
}
func (s *splashCmp) getProvider(providerID catwalk.InferenceProvider) (*catwalk.Provider, error) {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
if err != nil {
return nil, err
}
diff --git a/internal/tui/components/dialogs/models/list.go b/internal/tui/components/dialogs/models/list.go
index 66b55d85b299cb0bacb4cc2466c7b4146248ba05..77398c4d17d85126ab155a9e9c5b2085c0691672 100644
--- a/internal/tui/components/dialogs/models/list.go
+++ b/internal/tui/components/dialogs/models/list.go
@@ -49,7 +49,8 @@ func NewModelListComponent(keyMap list.KeyMap, inputPlaceholder string, shouldRe
func (m *ModelListComponent) Init() tea.Cmd {
var cmds []tea.Cmd
if len(m.providers) == 0 {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
filteredProviders := []catwalk.Provider{}
for _, p := range providers {
hasAPIKeyEnv := strings.HasPrefix(p.APIKey, "$")
@@ -119,7 +120,7 @@ func (m *ModelListComponent) SetModelType(modelType int) tea.Cmd {
// First, add any configured providers that are not in the known providers list
// These should appear at the top of the list
- knownProviders, err := config.Providers()
+ knownProviders, err := config.Providers(cfg)
if err != nil {
return util.ReportError(err)
}
diff --git a/internal/tui/components/dialogs/models/models.go b/internal/tui/components/dialogs/models/models.go
index 56d9eac17c277e8cbbb7c4349bbf420c56fb8610..7c2863706c29180cffcfb88c385a012e39df464c 100644
--- a/internal/tui/components/dialogs/models/models.go
+++ b/internal/tui/components/dialogs/models/models.go
@@ -352,7 +352,8 @@ func (m *modelDialogCmp) isProviderConfigured(providerID string) bool {
}
func (m *modelDialogCmp) getProvider(providerID catwalk.InferenceProvider) (*catwalk.Provider, error) {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
if err != nil {
return nil, err
}
diff --git a/schema.json b/schema.json
index 060f9738884da739a186898d859ac5618c35b5b8..9dee9055050c8e29fb689e9700b33aa8e9842cd2 100644
--- a/schema.json
+++ b/schema.json
@@ -278,6 +278,11 @@
},
"type": "array",
"description": "Tools to disable"
+ },
+ "disable_provider_auto_update": {
+ "type": "boolean",
+ "description": "Disable providers auto-update",
+ "default": false
}
},
"additionalProperties": false,
From c0edda610fd7acd8234fa2cd3ec4c40f4ba6c2c2 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 10 Sep 2025 18:27:31 -0300
Subject: [PATCH 067/236] feat: add `crush update-providers` command
---
internal/cmd/root.go | 1 +
internal/cmd/update_providers.go | 60 ++++++++++++++++++++++++++++++++
internal/config/provider.go | 36 +++++++++++++++++++
3 files changed, 97 insertions(+)
create mode 100644 internal/cmd/update_providers.go
diff --git a/internal/cmd/root.go b/internal/cmd/root.go
index ee167814a1688ae45238d92f0cae78a7e86c0ccd..3ecb23e5acd68c1666cf9798b17bcc408b9290e1 100644
--- a/internal/cmd/root.go
+++ b/internal/cmd/root.go
@@ -28,6 +28,7 @@ func init() {
rootCmd.Flags().BoolP("yolo", "y", false, "Automatically accept all permissions (dangerous mode)")
rootCmd.AddCommand(runCmd)
+ rootCmd.AddCommand(updateProvidersCmd)
}
var rootCmd = &cobra.Command{
diff --git a/internal/cmd/update_providers.go b/internal/cmd/update_providers.go
new file mode 100644
index 0000000000000000000000000000000000000000..1e771642db785794a5abca24a14b33376cdc0724
--- /dev/null
+++ b/internal/cmd/update_providers.go
@@ -0,0 +1,60 @@
+package cmd
+
+import (
+ "fmt"
+ "log/slog"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/exp/charmtone"
+ "github.com/spf13/cobra"
+)
+
+var updateProvidersCmd = &cobra.Command{
+ Use: "update-providers [path-or-url]",
+ Short: "Update providers",
+ Long: `Update the list of providers from a specified local path or remote URL.`,
+ Example: `
+# Update providers remotely from Catwalk
+crush update-providers
+
+# Update providers from a custom URL
+crush update-providers https://example.com/
+
+# Update providers from a local file
+crush update-providers /path/to/local-providers.json
+
+# Update providers from embedded version
+crush update-providers embedded
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ // NOTE(@andreynering): We want to skip logging output do stdout here.
+ slog.SetDefault(slog.New(slog.DiscardHandler))
+
+ var pathOrUrl string
+ if len(args) > 0 {
+ pathOrUrl = args[0]
+ }
+
+ if err := config.UpdateProviders(pathOrUrl); err != nil {
+ return err
+ }
+
+ // NOTE(@andreynering): This style is more-or-less copied from Fang's
+ // error message, adapted for success.
+ headerStyle := lipgloss.NewStyle().
+ Foreground(charmtone.Butter).
+ Background(charmtone.Guac).
+ Bold(true).
+ Padding(0, 1).
+ Margin(1).
+ MarginLeft(2).
+ SetString("SUCCESS")
+ textStyle := lipgloss.NewStyle().
+ MarginLeft(2).
+ SetString("Providers updated successfully.")
+
+ fmt.Printf("%s\n%s\n\n", headerStyle.Render(), textStyle.Render())
+ return nil
+ },
+}
diff --git a/internal/config/provider.go b/internal/config/provider.go
index 2248c8949a9880a4f555db8c2c5098742a5772b0..ae5169d58350fc41b9dba725ffd1c8315590afad 100644
--- a/internal/config/provider.go
+++ b/internal/config/provider.go
@@ -8,6 +8,7 @@ import (
"os"
"path/filepath"
"runtime"
+ "strings"
"sync"
"time"
@@ -77,6 +78,41 @@ func loadProvidersFromCache(path string) ([]catwalk.Provider, error) {
return providers, nil
}
+func UpdateProviders(pathOrUrl string) error {
+ var providers []catwalk.Provider
+ pathOrUrl = cmp.Or(pathOrUrl, os.Getenv("CATWALK_URL"), defaultCatwalkURL)
+
+ switch {
+ case pathOrUrl == "embedded":
+ providers = embedded.GetAll()
+ case strings.HasPrefix(pathOrUrl, "http://") || strings.HasPrefix(pathOrUrl, "https://"):
+ var err error
+ providers, err = catwalk.NewWithURL(pathOrUrl).GetProviders()
+ if err != nil {
+ return fmt.Errorf("failed to fetch providers from Catwalk: %w", err)
+ }
+ default:
+ content, err := os.ReadFile(pathOrUrl)
+ if err != nil {
+ return fmt.Errorf("failed to read file: %w", err)
+ }
+ if err := json.Unmarshal(content, &providers); err != nil {
+ return fmt.Errorf("failed to unmarshal provider data: %w", err)
+ }
+ if len(providers) == 0 {
+ return fmt.Errorf("no providers found in the provided source")
+ }
+ }
+
+ cachePath := providerCacheFileData()
+ if err := saveProvidersInCache(cachePath, providers); err != nil {
+ return fmt.Errorf("failed to save providers to cache: %w", err)
+ }
+
+ slog.Info("Providers updated successfully", "count", len(providers), "from", pathOrUrl, "to", cachePath)
+ return nil
+}
+
func Providers(cfg *Config) ([]catwalk.Provider, error) {
providerOnce.Do(func() {
catwalkURL := cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL)
From 04b571071999560ad975ba2a7de283cc0ba4833a Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Thu, 11 Sep 2025 17:28:22 -0300
Subject: [PATCH 068/236] docs(readme): document how to disable providers
auto-update
---
README.md | 43 +++++++++++++++++++++++++++++++++++++++++++
1 file changed, 43 insertions(+)
diff --git a/README.md b/README.md
index 7667ce9145967fd5f5865868b5d6108358327fe4..bed79137a9b1af4a1b200f02edae8f786b4d2bc0 100644
--- a/README.md
+++ b/README.md
@@ -470,6 +470,49 @@ config:
}
```
+## Disabling providers auto-update
+
+Crush automatically fetches the updated list of providers and models from
+[Catwalk](https://github.com/charmbracelet/catwalk). We know some environments
+have restricted internet access, so if you need to disable this you have two
+options:
+
+Set `disable_provider_auto_update` into your `crush.json` config:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "disable_provider_auto_update": true
+ }
+}
+```
+
+Or alternatively set the `CRUSH_DISABLE_PROVIDER_AUTO_UPDATE` environment
+variable:
+
+```bash
+export CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1
+```
+
+With that set, you'll have to manually update providers from time to time. It's
+possible to do that with the `crush update-providers` command:
+
+```bash
+# Update providers remotely from Catwalk.
+crush update-providers
+
+# Update providers from a custom Catwalk base URL.
+crush update-providers https://example.com/
+
+# Update providers from a local file.
+crush update-providers /path/to/local-providers.json
+
+# Update providers from the embedded version.
+# (Crush ships with an embedded copy of Catwalk on the time of the release).
+crush update-providers embedded
+```
+
## Whatcha think?
We’d love to hear your thoughts on this project. Need help? We gotchu. You can find us on:
From bd4503ee32b216df36321711fed6b0a20a743a2a Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Fri, 12 Sep 2025 16:52:18 -0400
Subject: [PATCH 069/236] chore: provider error message copyedit (#1029)
---
internal/config/provider.go | 2 +-
internal/config/provider_empty_test.go | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/internal/config/provider.go b/internal/config/provider.go
index ae5169d58350fc41b9dba725ffd1c8315590afad..671c348f71da3a79f65c14c624bdaf2adc011411 100644
--- a/internal/config/provider.go
+++ b/internal/config/provider.go
@@ -196,7 +196,7 @@ func loadProviders(autoUpdateDisabled bool, client ProviderClient, path string)
providers, err := catwalkGetAndSave()
if err != nil {
catwalkUrl := fmt.Sprintf("%s/providers", cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL))
- return nil, fmt.Errorf("crush was unable to fetch an updated list of providers from %s. Consider setting CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1 to use embedded version from the time of this Crush release. %w", catwalkUrl, err)
+ return nil, fmt.Errorf("Crush was unable to fetch an updated list of providers from %s. Consider setting CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1 to use the embedded providers bundled at the time of this Crush release. You can also update providers manually. For more info see crush update-providers --help. %w", catwalkUrl, err) //nolint:staticcheck
}
return providers, nil
}
diff --git a/internal/config/provider_empty_test.go b/internal/config/provider_empty_test.go
index 3cd55ae7921171a580dccc91aa1d22d2f7934271..f3691c320ad4e3509b327374c8ce7f5285c39590 100644
--- a/internal/config/provider_empty_test.go
+++ b/internal/config/provider_empty_test.go
@@ -20,7 +20,7 @@ func TestProvider_loadProvidersEmptyResult(t *testing.T) {
tmpPath := t.TempDir() + "/providers.json"
providers, err := loadProviders(false, client, tmpPath)
- require.Contains(t, err.Error(), "crush was unable to fetch an updated list of providers")
+ require.Contains(t, err.Error(), "Crush was unable to fetch an updated list of providers")
require.Empty(t, providers)
require.Len(t, providers, 0)
From 8c0d7b692f27ba8cfef0e76e92e6c89a78aa37a8 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Fri, 12 Sep 2025 20:58:23 -0400
Subject: [PATCH 071/236] docs(readme): copyedits to provider disabling section
---
README.md | 65 +++++++++++++++++++++++++++++++------------------------
1 file changed, 37 insertions(+), 28 deletions(-)
diff --git a/README.md b/README.md
index bed79137a9b1af4a1b200f02edae8f786b4d2bc0..497cac7205f941e05490efc6c04823a60dfa251d 100644
--- a/README.md
+++ b/README.md
@@ -157,8 +157,8 @@ Configuration itself is stored as a JSON object:
```json
{
- "this-setting": {"this": "that"},
- "that-setting": ["ceci", "cela"]
+ "this-setting": { "this": "that" },
+ "that-setting": ["ceci", "cela"]
}
```
@@ -390,9 +390,9 @@ Custom Anthropic-compatible providers follow this format:
Crush currently supports running Anthropic models through Bedrock, with caching disabled.
-* A Bedrock provider will appear once you have AWS configured, i.e. `aws configure`
-* Crush also expects the `AWS_REGION` or `AWS_DEFAULT_REGION` to be set
-* To use a specific AWS profile set `AWS_PROFILE` in your environment, i.e. `AWS_PROFILE=myprofile crush`
+- A Bedrock provider will appear once you have AWS configured, i.e. `aws configure`
+- Crush also expects the `AWS_REGION` or `AWS_DEFAULT_REGION` to be set
+- To use a specific AWS profile set `AWS_PROFILE` in your environment, i.e. `AWS_PROFILE=myprofile crush`
### Vertex AI Platform
@@ -428,17 +428,6 @@ To add specific models to the configuration, configure as such:
}
```
-## A Note on Claude Max and GitHub Copilot
-
-Crush only supports model providers through official, compliant APIs. We do not
-support or endorse any methods that rely on personal Claude Max and GitHub Copilot
-accounts or OAuth workarounds, which may violate Anthropic and Microsoft’s
-Terms of Service.
-
-We’re committed to building sustainable, trusted integrations with model
-providers. If you’re a provider interested in working with us,
-[reach out](mailto:vt100@charm.sh).
-
## Logging
Sometimes you need to look at logs. Luckily, Crush logs all sorts of
@@ -470,14 +459,20 @@ config:
}
```
-## Disabling providers auto-update
+## Disabling Provider Auto-Updates
-Crush automatically fetches the updated list of providers and models from
-[Catwalk](https://github.com/charmbracelet/catwalk). We know some environments
-have restricted internet access, so if you need to disable this you have two
-options:
+By default, Crush automatically checks for the latest and greatest list of
+providers and models from [Catwalk](https://github.com/charmbracelet/catwalk),
+the open source Crush provider database. This means that when new providers and
+models are available, or when model metadata changes, Crush automatically
+updates your local configuration.
-Set `disable_provider_auto_update` into your `crush.json` config:
+For those with restricted internet access, or those who prefer to work in
+air-gapped environments, this might not be want you want, and this feature can
+be disabled.
+
+To disable automatic provider updates, set `disable_provider_auto_update` into
+your `crush.json` config:
```json
{
@@ -488,15 +483,16 @@ Set `disable_provider_auto_update` into your `crush.json` config:
}
```
-Or alternatively set the `CRUSH_DISABLE_PROVIDER_AUTO_UPDATE` environment
-variable:
+Or set the `CRUSH_DISABLE_PROVIDER_AUTO_UPDATE` environment variable:
```bash
export CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1
```
-With that set, you'll have to manually update providers from time to time. It's
-possible to do that with the `crush update-providers` command:
+### Manually updating providers
+
+Manually updating providers is possible with the `crush update-providers`
+command:
```bash
# Update providers remotely from Catwalk.
@@ -508,11 +504,24 @@ crush update-providers https://example.com/
# Update providers from a local file.
crush update-providers /path/to/local-providers.json
-# Update providers from the embedded version.
-# (Crush ships with an embedded copy of Catwalk on the time of the release).
+# Reset providers to the embedded version, embedded at crush at build time.
crush update-providers embedded
+
+# For more info:
+crush update-providers --help
```
+## A Note on Claude Max and GitHub Copilot
+
+Crush only supports model providers through official, compliant APIs. We do not
+support or endorse any methods that rely on personal Claude Max and GitHub
+Copilot accounts or OAuth workarounds, which violate Anthropic and
+Microsoft’s Terms of Service.
+
+We’re committed to building sustainable, trusted integrations with model
+providers. If you’re a provider interested in working with us,
+[reach out](mailto:vt100@charm.sh).
+
## Whatcha think?
We’d love to hear your thoughts on this project. Need help? We gotchu. You can find us on:
From 544f3b722a53d4fb72424c795f5dd70d196b6355 Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Sat, 13 Sep 2025 21:03:10 +0200
Subject: [PATCH 072/236] feat: fix too many open files issue (#1033)
* feat: fix too many open files issue
* fix: go.sum
* chore: cleanup go.mod
---------
Co-authored-by: Christian Rocha
Co-authored-by: kujtimiihoxha
---
go.mod | 3 +-
go.sum | 3 +
internal/lsp/watcher/global_watcher.go | 237 +++++++++-----------
internal/lsp/watcher/global_watcher_test.go | 143 ++++++------
4 files changed, 184 insertions(+), 202 deletions(-)
diff --git a/go.mod b/go.mod
index c98a8dadf7eea28938015e8e82271527b2c5a5d8..ff4eab1623e5a73e91d403ad89dc7b40150112eb 100644
--- a/go.mod
+++ b/go.mod
@@ -23,7 +23,6 @@ require (
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
- github.com/fsnotify/fsnotify v1.9.0
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
github.com/joho/godotenv v1.5.1
@@ -85,6 +84,7 @@ require (
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
+ github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-logfmt/logfmt v0.6.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
@@ -113,6 +113,7 @@ require (
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rivo/uniseg v0.4.7
+ github.com/rjeczalik/notify v0.9.3
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
diff --git a/go.sum b/go.sum
index 24bcee6f9da82ac0e9d380d7b21e048e1efc45b4..74a8425c0431ce926f07e0bb3c3bd7ac540367c7 100644
--- a/go.sum
+++ b/go.sum
@@ -237,6 +237,8 @@ github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qq
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
+github.com/rjeczalik/notify v0.9.3 h1:6rJAzHTGKXGj76sbRgDiDcYj/HniypXmSJo1SWakZeY=
+github.com/rjeczalik/notify v0.9.3/go.mod h1:gF3zSOrafR9DQEWSE8TjfI9NkooDxbyT4UgRGKZA0lc=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
@@ -361,6 +363,7 @@ golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
+golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
index 29b19f316ba0f654ae779526b5926b1fe9785819..0e60027daee654483f8fd7fb54a76587455ba5cf 100644
--- a/internal/lsp/watcher/global_watcher.go
+++ b/internal/lsp/watcher/global_watcher.go
@@ -5,6 +5,7 @@ import (
"fmt"
"log/slog"
"os"
+ "path/filepath"
"sync"
"sync/atomic"
"time"
@@ -13,22 +14,23 @@ import (
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/lsp/protocol"
- "github.com/fsnotify/fsnotify"
+ "github.com/rjeczalik/notify"
)
-// global manages a single fsnotify.Watcher instance shared across all LSP clients.
+// global manages file watching shared across all LSP clients.
//
-// IMPORTANT: This implementation only watches directories, not individual files.
-// The fsnotify library automatically provides events for all files within watched
-// directories, making this approach much more efficient than watching individual files.
+// IMPORTANT: This implementation uses github.com/rjeczalik/notify which provides
+// recursive watching on all platforms. On macOS it uses FSEvents, on Linux it
+// uses inotify (with recursion handled by the library), and on Windows it uses
+// ReadDirectoryChangesW.
//
-// Key benefits of directory-only watching:
-// - Significantly fewer file descriptors used
-// - Automatic coverage of new files created in watched directories
-// - Better performance with large codebases
-// - fsnotify handles deduplication internally (no need to track watched dirs)
+// Key benefits:
+// - Single watch point for entire directory tree
+// - Automatic recursive watching without manually adding subdirectories
+// - No file descriptor exhaustion issues
type global struct {
- watcher *fsnotify.Watcher
+ // Channel for receiving file system events
+ events chan notify.EventInfo
// Map of workspace watchers by client name
watchers *csync.Map[string, *Client]
@@ -54,6 +56,7 @@ type global struct {
var instance = sync.OnceValue(func() *global {
ctx, cancel := context.WithCancel(context.Background())
gw := &global{
+ events: make(chan notify.EventInfo, 4096), // Large buffer to prevent dropping events
watchers: csync.NewMap[string, *Client](),
debounceTime: 300 * time.Millisecond,
debounceMap: csync.NewMap[string, *time.Timer](),
@@ -61,15 +64,6 @@ var instance = sync.OnceValue(func() *global {
cancel: cancel,
}
- // Initialize the fsnotify watcher
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- slog.Error("lsp watcher: Failed to create global file watcher", "error", err)
- return gw
- }
-
- gw.watcher = watcher
-
return gw
})
@@ -85,11 +79,11 @@ func (gw *global) unregister(name string) {
slog.Debug("lsp watcher: Unregistered workspace watcher", "name", name)
}
-// Start walks the given path and sets up the watcher on it.
+// Start sets up recursive watching on the workspace root.
//
-// Note: We only watch directories, not individual files. fsnotify automatically provides
-// events for all files within watched directories. Multiple calls with the same workspace
-// are safe since fsnotify handles directory deduplication internally.
+// Note: We use github.com/rjeczalik/notify which provides recursive watching
+// with a single watch point. The "..." suffix means watch recursively.
+// This is much more efficient than manually walking and watching each directory.
func Start() error {
gw := instance()
@@ -107,59 +101,33 @@ func Start() error {
gw.root = root
gw.started.Store(true)
- // Start the event processing goroutine now that we're initialized
+ // Start the event processing goroutine
gw.wg.Add(1)
go gw.processEvents()
- // Walk the workspace and add only directories to the watcher
- // fsnotify will automatically provide events for all files within these directories
- // Multiple calls with the same directories are safe (fsnotify deduplicates)
- err := fsext.WalkDirectories(root, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Add directory to watcher (fsnotify handles deduplication automatically)
- if err := gw.addDirectoryToWatcher(path); err != nil {
- slog.Error("lsp watcher: Error watching directory", "path", path, "error", err)
- }
-
- return nil
- })
- if err != nil {
- return fmt.Errorf("lsp watcher: error walking workspace %s: %w", root, err)
- }
-
- return nil
-}
+ // Set up recursive watching on the root directory
+ // The "..." suffix tells notify to watch recursively
+ watchPath := filepath.Join(root, "...")
-// addDirectoryToWatcher adds a directory to the fsnotify watcher.
-// fsnotify handles deduplication internally, so we don't need to track watched directories.
-func (gw *global) addDirectoryToWatcher(dirPath string) error {
- if gw.watcher == nil {
- return fmt.Errorf("lsp watcher: global watcher not initialized")
- }
+ // Watch for all event types we care about
+ events := notify.Create | notify.Write | notify.Remove | notify.Rename
- // Add directory to fsnotify watcher - fsnotify handles deduplication
- // "A path can only be watched once; watching it more than once is a no-op"
- err := gw.watcher.Add(dirPath)
- if err != nil {
- return fmt.Errorf("lsp watcher: failed to watch directory %s: %w", dirPath, err)
+ if err := notify.Watch(watchPath, gw.events, events); err != nil {
+ return fmt.Errorf("lsp watcher: error setting up recursive watch on %s: %w", root, err)
}
- slog.Debug("lsp watcher: watching directory", "path", dirPath)
+ slog.Info("lsp watcher: Started recursive watching", "root", root)
return nil
}
-// processEvents processes file system events and handles them centrally.
-// Since we only watch directories, we automatically get events for all files
-// within those directories. When new directories are created, we add them
-// to the watcher to ensure complete coverage.
+// processEvents processes file system events from the notify library.
+// Since notify handles recursive watching for us, we don't need to manually
+// add new directories - they're automatically included.
func (gw *global) processEvents() {
defer gw.wg.Done()
cfg := config.Get()
- if gw.watcher == nil || !gw.started.Load() {
+ if !gw.started.Load() {
slog.Error("lsp watcher: Global watcher not initialized")
return
}
@@ -169,68 +137,89 @@ func (gw *global) processEvents() {
case <-gw.ctx.Done():
return
- case event, ok := <-gw.watcher.Events:
+ case event, ok := <-gw.events:
if !ok {
return
}
- // Handle directory creation globally (only once)
- // When new directories are created, we need to add them to the watcher
- // to ensure we get events for files created within them
- if event.Op&fsnotify.Create != 0 {
- if info, err := os.Stat(event.Name); err == nil && info.IsDir() {
- if !fsext.ShouldExcludeFile(gw.root, event.Name) {
- if err := gw.addDirectoryToWatcher(event.Name); err != nil {
- slog.Error("lsp watcher: Error adding new directory to watcher", "path", event.Name, "error", err)
- }
- } else if cfg != nil && cfg.Options.DebugLSP {
- slog.Debug("lsp watcher: Skipping ignored new directory", "path", event.Name)
- }
- }
+ path := event.Path()
+
+ // Skip ignored files
+ if fsext.ShouldExcludeFile(gw.root, path) {
+ continue
}
if cfg != nil && cfg.Options.DebugLSP {
- slog.Debug("lsp watcher: Global watcher received event", "path", event.Name, "op", event.Op.String())
+ slog.Debug("lsp watcher: Global watcher received event", "path", path, "event", event.Event().String())
}
- // Process the event centrally
+ // Convert notify event to our internal format and handle it
gw.handleFileEvent(event)
-
- case err, ok := <-gw.watcher.Errors:
- if !ok {
- return
- }
- slog.Error("lsp watcher: Global watcher error", "error", err)
}
}
}
// handleFileEvent processes a file system event and distributes notifications to relevant clients
-func (gw *global) handleFileEvent(event fsnotify.Event) {
+func (gw *global) handleFileEvent(event notify.EventInfo) {
cfg := config.Get()
- uri := string(protocol.URIFromPath(event.Name))
-
- // Handle file creation for all relevant clients (only once)
- if event.Op&fsnotify.Create != 0 {
- if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
- if !fsext.ShouldExcludeFile(gw.root, event.Name) {
- gw.openMatchingFileForClients(event.Name)
+ path := event.Path()
+ uri := string(protocol.URIFromPath(path))
+
+ // Map notify events to our change types
+ var changeType protocol.FileChangeType
+ var watchKindNeeded protocol.WatchKind
+
+ switch event.Event() {
+ case notify.Create:
+ changeType = protocol.FileChangeType(protocol.Created)
+ watchKindNeeded = protocol.WatchCreate
+ // Handle file creation for all relevant clients
+ if !isDir(path) && !fsext.ShouldExcludeFile(gw.root, path) {
+ gw.openMatchingFileForClients(path)
+ }
+ case notify.Write:
+ changeType = protocol.FileChangeType(protocol.Changed)
+ watchKindNeeded = protocol.WatchChange
+ case notify.Remove:
+ changeType = protocol.FileChangeType(protocol.Deleted)
+ watchKindNeeded = protocol.WatchDelete
+ case notify.Rename:
+ // Treat rename as delete + create
+ // First handle as delete
+ for _, watcher := range gw.watchers.Seq2() {
+ if !watcher.client.HandlesFile(path) {
+ continue
}
+ if watched, watchKind := watcher.isPathWatched(path); watched {
+ if watchKind&protocol.WatchDelete != 0 {
+ gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
+ }
+ }
+ }
+ // Then check if renamed file exists and treat as create
+ if !isDir(path) {
+ changeType = protocol.FileChangeType(protocol.Created)
+ watchKindNeeded = protocol.WatchCreate
+ } else {
+ return // Already handled delete, nothing more to do for directories
}
+ default:
+ // Unknown event type, skip
+ return
}
// Process the event for each relevant client
for client, watcher := range gw.watchers.Seq2() {
- if !watcher.client.HandlesFile(event.Name) {
+ if !watcher.client.HandlesFile(path) {
continue // client doesn't handle this filetype
}
// Debug logging per client
if cfg.Options.DebugLSP {
- matched, kind := watcher.isPathWatched(event.Name)
+ matched, kind := watcher.isPathWatched(path)
slog.Debug("lsp watcher: File event for client",
- "path", event.Name,
- "operation", event.Op.String(),
+ "path", path,
+ "event", event.Event().String(),
"watched", matched,
"kind", kind,
"client", client,
@@ -238,46 +227,31 @@ func (gw *global) handleFileEvent(event fsnotify.Event) {
}
// Check if this path should be watched according to server registrations
- if watched, watchKind := watcher.isPathWatched(event.Name); watched {
- switch {
- case event.Op&fsnotify.Write != 0:
- if watchKind&protocol.WatchChange != 0 {
- gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Changed))
- }
- case event.Op&fsnotify.Create != 0:
- // File creation was already handled globally above
- // Just send the notification if needed
- info, err := os.Stat(event.Name)
- if err != nil {
- if !os.IsNotExist(err) {
- slog.Debug("lsp watcher: Error getting file info", "path", event.Name, "error", err)
- }
+ if watched, watchKind := watcher.isPathWatched(path); watched {
+ if watchKind&watchKindNeeded != 0 {
+ // Skip directory events for non-delete operations
+ if changeType != protocol.FileChangeType(protocol.Deleted) && isDir(path) {
continue
}
- if !info.IsDir() && watchKind&protocol.WatchCreate != 0 {
- gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Created))
- }
- case event.Op&fsnotify.Remove != 0:
- if watchKind&protocol.WatchDelete != 0 {
- gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
- }
- case event.Op&fsnotify.Rename != 0:
- // For renames, first delete
- if watchKind&protocol.WatchDelete != 0 {
- gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
- }
- // Then check if the new file exists and create an event
- if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
- if watchKind&protocol.WatchCreate != 0 {
- gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Created))
- }
+ if changeType == protocol.FileChangeType(protocol.Deleted) {
+ // Don't debounce deletes
+ gw.handleFileEventForClient(watcher, uri, changeType)
+ } else {
+ // Debounce creates and changes
+ gw.debounceHandleFileEventForClient(watcher, uri, changeType)
}
}
}
}
}
+// isDir checks if a path is a directory
+func isDir(path string) bool {
+ info, err := os.Stat(path)
+ return err == nil && info.IsDir()
+}
+
// openMatchingFileForClients opens a newly created file for all clients that handle it (only once per file)
func (gw *global) openMatchingFileForClients(path string) {
// Skip directories
@@ -349,10 +323,9 @@ func (gw *global) shutdown() {
gw.cancel()
}
- if gw.watcher != nil {
- gw.watcher.Close()
- gw.watcher = nil
- }
+ // Stop watching and close the event channel
+ notify.Stop(gw.events)
+ close(gw.events)
gw.wg.Wait()
slog.Debug("lsp watcher: Global watcher shutdown complete")
diff --git a/internal/lsp/watcher/global_watcher_test.go b/internal/lsp/watcher/global_watcher_test.go
index 09124cd6a570b9b46b003b06b5f76dcbcbef22ff..cfbe8a51fb9be09fdfdc9f37be830c92d6b6eab8 100644
--- a/internal/lsp/watcher/global_watcher_test.go
+++ b/internal/lsp/watcher/global_watcher_test.go
@@ -8,7 +8,7 @@ import (
"time"
"github.com/charmbracelet/crush/internal/csync"
- "github.com/fsnotify/fsnotify"
+ "github.com/rjeczalik/notify"
)
func TestGlobalWatcher(t *testing.T) {
@@ -60,15 +60,8 @@ func TestGlobalWatcherWorkspaceIdempotent(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- // Create a real fsnotify watcher for testing
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
-
gw := &global{
- watcher: watcher,
+ events: make(chan notify.EventInfo, 100),
watchers: csync.NewMap[string, *Client](),
debounceTime: 300 * time.Millisecond,
debounceMap: csync.NewMap[string, *time.Timer](),
@@ -77,26 +70,31 @@ func TestGlobalWatcherWorkspaceIdempotent(t *testing.T) {
}
// Test that watching the same workspace multiple times is safe (idempotent)
- err1 := gw.addDirectoryToWatcher(tempDir)
+ // With notify, we use recursive watching with "..."
+ watchPath := filepath.Join(tempDir, "...")
+
+ err1 := notify.Watch(watchPath, gw.events, notify.All)
if err1 != nil {
- t.Fatalf("First addDirectoryToWatcher call failed: %v", err1)
+ t.Fatalf("First Watch call failed: %v", err1)
}
+ defer notify.Stop(gw.events)
- err2 := gw.addDirectoryToWatcher(tempDir)
+ // Watching the same path again should be safe (notify handles this)
+ err2 := notify.Watch(watchPath, gw.events, notify.All)
if err2 != nil {
- t.Fatalf("Second addDirectoryToWatcher call failed: %v", err2)
+ t.Fatalf("Second Watch call failed: %v", err2)
}
- err3 := gw.addDirectoryToWatcher(tempDir)
+ err3 := notify.Watch(watchPath, gw.events, notify.All)
if err3 != nil {
- t.Fatalf("Third addDirectoryToWatcher call failed: %v", err3)
+ t.Fatalf("Third Watch call failed: %v", err3)
}
- // All calls should succeed - fsnotify handles deduplication internally
- // This test verifies that multiple WatchWorkspace calls are safe
+ // All calls should succeed - notify handles deduplication internally
+ // This test verifies that multiple Watch calls are safe
}
-func TestGlobalWatcherOnlyWatchesDirectories(t *testing.T) {
+func TestGlobalWatcherRecursiveWatching(t *testing.T) {
t.Parallel()
// Create a temporary directory structure for testing
@@ -120,29 +118,24 @@ func TestGlobalWatcherOnlyWatchesDirectories(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- // Create a real fsnotify watcher for testing
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
-
gw := &global{
- watcher: watcher,
+ events: make(chan notify.EventInfo, 100),
watchers: csync.NewMap[string, *Client](),
debounceTime: 300 * time.Millisecond,
debounceMap: csync.NewMap[string, *time.Timer](),
ctx: ctx,
cancel: cancel,
+ root: tempDir,
}
- // Watch the workspace
- err = gw.addDirectoryToWatcher(tempDir)
- if err != nil {
- t.Fatalf("addDirectoryToWatcher failed: %v", err)
+ // Set up recursive watching on the root directory
+ watchPath := filepath.Join(tempDir, "...")
+ if err := notify.Watch(watchPath, gw.events, notify.All); err != nil {
+ t.Fatalf("Failed to set up recursive watch: %v", err)
}
+ defer notify.Stop(gw.events)
- // Verify that our expected directories exist and can be watched
+ // Verify that our expected directories and files exist
expectedDirs := []string{tempDir, subDir}
for _, expectedDir := range expectedDirs {
@@ -153,15 +146,9 @@ func TestGlobalWatcherOnlyWatchesDirectories(t *testing.T) {
if !info.IsDir() {
t.Fatalf("Expected %s to be a directory, but it's not", expectedDir)
}
-
- // Try to add it again - fsnotify should handle this gracefully
- err = gw.addDirectoryToWatcher(expectedDir)
- if err != nil {
- t.Fatalf("Failed to add directory %s to watcher: %v", expectedDir, err)
- }
}
- // Verify that files exist but we don't try to watch them directly
+ // Verify that files exist
testFiles := []string{file1, file2}
for _, file := range testFiles {
info, err := os.Stat(file)
@@ -172,39 +159,61 @@ func TestGlobalWatcherOnlyWatchesDirectories(t *testing.T) {
t.Fatalf("Expected %s to be a file, but it's a directory", file)
}
}
+
+ // Create a new file in the subdirectory to test recursive watching
+ newFile := filepath.Join(subDir, "new.txt")
+ if err := os.WriteFile(newFile, []byte("new content"), 0o644); err != nil {
+ t.Fatalf("Failed to create new file: %v", err)
+ }
+
+ // We should receive an event for the file creation
+ select {
+ case event := <-gw.events:
+ // On macOS, paths might have /private prefix, so we need to compare the real paths
+ eventPath, _ := filepath.EvalSymlinks(event.Path())
+ expectedPath, _ := filepath.EvalSymlinks(newFile)
+ if eventPath != expectedPath {
+ // Also try comparing just the base names as a fallback
+ if filepath.Base(event.Path()) != filepath.Base(newFile) {
+ t.Errorf("Expected event for %s, got %s", newFile, event.Path())
+ }
+ }
+ case <-time.After(2 * time.Second):
+ t.Fatal("Timeout waiting for file creation event")
+ }
}
-func TestFsnotifyDeduplication(t *testing.T) {
+func TestNotifyDeduplication(t *testing.T) {
t.Parallel()
// Create a temporary directory for testing
tempDir := t.TempDir()
- // Create a real fsnotify watcher
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
+ // Create an event channel
+ events := make(chan notify.EventInfo, 100)
+ defer close(events)
+
+ // Add the same directory multiple times with recursive watching
+ watchPath := filepath.Join(tempDir, "...")
- // Add the same directory multiple times
- err1 := watcher.Add(tempDir)
+ err1 := notify.Watch(watchPath, events, notify.All)
if err1 != nil {
- t.Fatalf("First Add failed: %v", err1)
+ t.Fatalf("First Watch failed: %v", err1)
}
+ defer notify.Stop(events)
- err2 := watcher.Add(tempDir)
+ err2 := notify.Watch(watchPath, events, notify.All)
if err2 != nil {
- t.Fatalf("Second Add failed: %v", err2)
+ t.Fatalf("Second Watch failed: %v", err2)
}
- err3 := watcher.Add(tempDir)
+ err3 := notify.Watch(watchPath, events, notify.All)
if err3 != nil {
- t.Fatalf("Third Add failed: %v", err3)
+ t.Fatalf("Third Watch failed: %v", err3)
}
- // All should succeed - fsnotify handles deduplication internally
- // This test verifies the fsnotify behavior we're relying on
+ // All should succeed - notify handles deduplication internally
+ // This test verifies the notify behavior we're relying on
}
func TestGlobalWatcherRespectsIgnoreFiles(t *testing.T) {
@@ -241,31 +250,26 @@ func TestGlobalWatcherRespectsIgnoreFiles(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- // Create a real fsnotify watcher for testing
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
-
gw := &global{
- watcher: watcher,
+ events: make(chan notify.EventInfo, 100),
watchers: csync.NewMap[string, *Client](),
debounceTime: 300 * time.Millisecond,
debounceMap: csync.NewMap[string, *time.Timer](),
ctx: ctx,
cancel: cancel,
+ root: tempDir,
}
- // Watch the workspace
- err = gw.addDirectoryToWatcher(tempDir)
- if err != nil {
- t.Fatalf("addDirectoryToWatcher failed: %v", err)
+ // Set up recursive watching
+ watchPath := filepath.Join(tempDir, "...")
+ if err := notify.Watch(watchPath, gw.events, notify.All); err != nil {
+ t.Fatalf("Failed to set up recursive watch: %v", err)
}
+ defer notify.Stop(gw.events)
- // This test verifies that the watcher can successfully add directories to fsnotify
- // The actual ignore logic is tested in the fsext package
- // Here we just verify that the watcher integration works
+ // The notify library watches everything, but our processEvents
+ // function should filter out ignored files using fsext.ShouldExcludeFile
+ // This test verifies that the structure is set up correctly
}
func TestGlobalWatcherShutdown(t *testing.T) {
@@ -277,6 +281,7 @@ func TestGlobalWatcherShutdown(t *testing.T) {
// Create a temporary global watcher for testing
gw := &global{
+ events: make(chan notify.EventInfo, 100),
watchers: csync.NewMap[string, *Client](),
debounceTime: 300 * time.Millisecond,
debounceMap: csync.NewMap[string, *time.Timer](),
From 18ea1c976a933abc8f716770d43943d70c2b0bca Mon Sep 17 00:00:00 2001
From: Vadim Inshakov
Date: Sat, 13 Sep 2025 22:32:58 +0500
Subject: [PATCH 073/236] handle ctx cancel event
---
internal/llm/provider/openai.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index bfdeda2bc1cd0738b3370a2132d4945ad64d9fb5..8d49062044af2c3c2e8663d8d281d0e27ac1a1b1 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -483,7 +483,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
select {
case <-ctx.Done():
// context cancelled
- if ctx.Err() == nil {
+ if ctx.Err() != nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
close(eventChan)
From 3d79848146ef5a88b30711f6efca026ca8a776dd Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Sun, 14 Sep 2025 22:16:17 -0300
Subject: [PATCH 074/236] chore(legal): @WhiskeyJack96 has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 53b95be41fa0ad52087a63aba18bc241cbf20d68..921eb84ef7d2af9e45622bfe9723dfe883d26a1d 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -607,6 +607,14 @@
"created_at": "2025-09-12T15:00:12Z",
"repoId": 987670088,
"pullRequestNo": 1025
+ },
+ {
+ "name": "WhiskeyJack96",
+ "id": 10688621,
+ "comment_id": 3290164209,
+ "created_at": "2025-09-15T01:16:08Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1037
}
]
}
\ No newline at end of file
From 480dc028e550dedd52b1c0e0f852cae202cebd35 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Mon, 15 Sep 2025 02:42:41 -0300
Subject: [PATCH 075/236] chore(legal): @Grin1024 has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 921eb84ef7d2af9e45622bfe9723dfe883d26a1d..34f0772e708ac062a7f4da8f0d299ca67558bb30 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -615,6 +615,14 @@
"created_at": "2025-09-15T01:16:08Z",
"repoId": 987670088,
"pullRequestNo": 1037
+ },
+ {
+ "name": "Grin1024",
+ "id": 34613592,
+ "comment_id": 3290570050,
+ "created_at": "2025-09-15T05:42:29Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1042
}
]
}
\ No newline at end of file
From a7766d99538f06e28ecdfeb08a3286c30ec94647 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Mon, 15 Sep 2025 09:01:49 -0300
Subject: [PATCH 076/236] chore: remove anim example (#1045)
there no reason for that to be there.
with it removed, go run ./... will work :)
Signed-off-by: Carlos Alexandro Becker
---
internal/tui/components/anim/example/main.go | 90 --------------------
1 file changed, 90 deletions(-)
delete mode 100644 internal/tui/components/anim/example/main.go
diff --git a/internal/tui/components/anim/example/main.go b/internal/tui/components/anim/example/main.go
deleted file mode 100644
index 0bf47654ecbeeb3293c8ad59b40ec35016607b1c..0000000000000000000000000000000000000000
--- a/internal/tui/components/anim/example/main.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package main
-
-import (
- "fmt"
- "image/color"
- "os"
-
- tea "github.com/charmbracelet/bubbletea/v2"
- anim "github.com/charmbracelet/crush/internal/tui/components/anim"
- "github.com/charmbracelet/crush/internal/tui/styles"
- "github.com/charmbracelet/lipgloss/v2"
-)
-
-type model struct {
- anim tea.Model
- bgColor color.Color
- quitting bool
- w, h int
-}
-
-func (m model) Init() tea.Cmd {
- return m.anim.Init()
-}
-
-func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
- switch msg := msg.(type) {
- case tea.WindowSizeMsg:
- m.w, m.h = msg.Width, msg.Height
- return m, nil
- case tea.KeyMsg:
- switch msg.String() {
- case "q", "ctrl+c":
- m.quitting = true
- return m, tea.Quit
- default:
- return m, nil
- }
- case anim.StepMsg:
- var cmd tea.Cmd
- m.anim, cmd = m.anim.Update(msg)
- return m, cmd
- default:
- return m, nil
- }
-}
-
-func (m model) View() tea.View {
- if m.w == 0 || m.h == 0 {
- return tea.NewView("")
- }
-
- v := tea.NewView("")
- v.BackgroundColor = m.bgColor
-
- if m.quitting {
- return v
- }
-
- if a, ok := m.anim.(*anim.Anim); ok {
- l := lipgloss.NewLayer(a.View()).
- Width(a.Width()).
- X(m.w/2 - a.Width()/2).
- Y(m.h / 2)
-
- v = tea.NewView(lipgloss.NewCanvas(l))
- v.BackgroundColor = m.bgColor
- return v
- }
- return v
-}
-
-func main() {
- t := styles.CurrentTheme()
- p := tea.NewProgram(model{
- bgColor: t.BgBase,
- anim: anim.New(anim.Settings{
- Label: "Hello",
- Size: 50,
- LabelColor: t.FgBase,
- GradColorA: t.Primary,
- GradColorB: t.Secondary,
- CycleColors: true,
- }),
- }, tea.WithAltScreen())
-
- if _, err := p.Run(); err != nil {
- fmt.Fprintf(os.Stderr, "Uh oh: %v\n", err)
- os.Exit(1)
- }
-}
From fd6b617ad5ee1f6e50f4093eb6085e19a2d57894 Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Mon, 15 Sep 2025 14:03:59 +0200
Subject: [PATCH 077/236] fix: remove ulimt as go 1.19 automatically raises
file descriptors
---
internal/lsp/watcher/ulimit_bsd.go | 25 ----------------
internal/lsp/watcher/ulimit_darwin.go | 24 ----------------
internal/lsp/watcher/ulimit_fallback.go | 8 ------
internal/lsp/watcher/ulimit_linux.go | 25 ----------------
internal/lsp/watcher/ulimit_windows.go | 38 -------------------------
internal/lsp/watcher/watcher.go | 7 -----
6 files changed, 127 deletions(-)
delete mode 100644 internal/lsp/watcher/ulimit_bsd.go
delete mode 100644 internal/lsp/watcher/ulimit_darwin.go
delete mode 100644 internal/lsp/watcher/ulimit_fallback.go
delete mode 100644 internal/lsp/watcher/ulimit_linux.go
delete mode 100644 internal/lsp/watcher/ulimit_windows.go
diff --git a/internal/lsp/watcher/ulimit_bsd.go b/internal/lsp/watcher/ulimit_bsd.go
deleted file mode 100644
index 816e82adee5e57341b7e392e117b245a7ca4a0dc..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_bsd.go
+++ /dev/null
@@ -1,25 +0,0 @@
-//go:build freebsd || openbsd || netbsd || dragonfly
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var currentLimit uint64 = 0
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit = uint64(rLimit.Cur)
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return uint64(rLimit.Cur), nil
-}
diff --git a/internal/lsp/watcher/ulimit_darwin.go b/internal/lsp/watcher/ulimit_darwin.go
deleted file mode 100644
index a53f143bd0341e5fc7ac95441c2246eb7ffb2ccb..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_darwin.go
+++ /dev/null
@@ -1,24 +0,0 @@
-//go:build darwin
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit := rLimit.Cur
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return rLimit.Cur, nil
-}
diff --git a/internal/lsp/watcher/ulimit_fallback.go b/internal/lsp/watcher/ulimit_fallback.go
deleted file mode 100644
index 118554f25a34aa5921b1773c72d87dc3975324a7..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_fallback.go
+++ /dev/null
@@ -1,8 +0,0 @@
-//go:build !linux && !darwin && !freebsd && !openbsd && !netbsd && !dragonfly && !windows
-
-package watcher
-
-func Ulimit() (uint64, error) {
- // Fallback for exotic systems - return a reasonable default
- return 2048, nil
-}
diff --git a/internal/lsp/watcher/ulimit_linux.go b/internal/lsp/watcher/ulimit_linux.go
deleted file mode 100644
index 298fcad96710eb106ee607ac823962450f892bf3..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_linux.go
+++ /dev/null
@@ -1,25 +0,0 @@
-//go:build linux
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var currentLimit uint64 = 0
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit = rLimit.Cur
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return rLimit.Cur, nil
-}
diff --git a/internal/lsp/watcher/ulimit_windows.go b/internal/lsp/watcher/ulimit_windows.go
deleted file mode 100644
index 14afbabeea1ce4818bb59a3fc8c5e2ee1fa8432a..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_windows.go
+++ /dev/null
@@ -1,38 +0,0 @@
-//go:build windows
-
-package watcher
-
-import (
- "syscall"
- "unsafe"
-
- "golang.org/x/sys/windows"
-)
-
-var (
- kernel32 = windows.NewLazyDLL("kernel32.dll")
- procGetProcessHandleCount = kernel32.NewProc("GetProcessHandleCount")
-)
-
-func Ulimit() (uint64, error) {
- // Windows doesn't have the same file descriptor limits as Unix systems
- // Instead, we can get the current handle count for monitoring purposes
- currentProcess := windows.CurrentProcess()
-
- var handleCount uint32
- ret, _, err := procGetProcessHandleCount.Call(
- uintptr(currentProcess),
- uintptr(unsafe.Pointer(&handleCount)),
- )
-
- if ret == 0 {
- // If the call failed, return a reasonable default
- if err != syscall.Errno(0) {
- return 2048, nil
- }
- }
-
- // Windows typically allows much higher handle counts than Unix file descriptors
- // Return the current count, which serves as a baseline for monitoring
- return uint64(handleCount), nil
-}
diff --git a/internal/lsp/watcher/watcher.go b/internal/lsp/watcher/watcher.go
index 139d144e1e5c65c11962e73201b42b15cd09f98a..18b790349a10f0827f45f8ccb9fb6968980a9d4e 100644
--- a/internal/lsp/watcher/watcher.go
+++ b/internal/lsp/watcher/watcher.go
@@ -28,13 +28,6 @@ type Client struct {
registrations *csync.Slice[protocol.FileSystemWatcher]
}
-func init() {
- // Ensure the watcher is initialized with a reasonable file limit
- if _, err := Ulimit(); err != nil {
- slog.Error("Error setting file limit", "error", err)
- }
-}
-
// New creates a new workspace watcher for the given client.
func New(name string, client *lsp.Client) *Client {
return &Client{
From d401aa3a4a21de982db8752fae5ad9a4d1745b68 Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Mon, 15 Sep 2025 14:19:22 +0200
Subject: [PATCH 078/236] fix: request MaximizeOpenFileLimit for unix
---
internal/lsp/watcher/global_watcher.go | 27 ++++++++++++
internal/lsp/watcher/rlimit_stub.go | 12 ++++++
internal/lsp/watcher/rlimit_unix.go | 57 ++++++++++++++++++++++++++
3 files changed, 96 insertions(+)
create mode 100644 internal/lsp/watcher/rlimit_stub.go
create mode 100644 internal/lsp/watcher/rlimit_unix.go
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
index 0e60027daee654483f8fd7fb54a76587455ba5cf..7045cb04837b2fc33b0696c089f12adc220db587 100644
--- a/internal/lsp/watcher/global_watcher.go
+++ b/internal/lsp/watcher/global_watcher.go
@@ -2,12 +2,14 @@ package watcher
import (
"context"
+ "errors"
"fmt"
"log/slog"
"os"
"path/filepath"
"sync"
"sync/atomic"
+ "syscall"
"time"
"github.com/charmbracelet/crush/internal/config"
@@ -113,8 +115,24 @@ func Start() error {
events := notify.Create | notify.Write | notify.Remove | notify.Rename
if err := notify.Watch(watchPath, gw.events, events); err != nil {
+ // Check if the error might be due to file descriptor limits
+ if isFileLimitError(err) {
+ slog.Warn("lsp watcher: Hit file descriptor limit, attempting to increase", "error", err)
+ if newLimit, rlimitErr := MaximizeOpenFileLimit(); rlimitErr == nil {
+ slog.Info("lsp watcher: Increased file descriptor limit", "limit", newLimit)
+ // Retry the watch operation
+ if err = notify.Watch(watchPath, gw.events, events); err == nil {
+ slog.Info("lsp watcher: Successfully set up watch after increasing limit")
+ goto watchSuccess
+ }
+ err = fmt.Errorf("still failed after increasing limit: %w", err)
+ } else {
+ slog.Warn("lsp watcher: Failed to increase file descriptor limit", "error", rlimitErr)
+ }
+ }
return fmt.Errorf("lsp watcher: error setting up recursive watch on %s: %w", root, err)
}
+watchSuccess:
slog.Info("lsp watcher: Started recursive watching", "root", root)
return nil
@@ -335,3 +353,12 @@ func (gw *global) shutdown() {
func Shutdown() {
instance().shutdown()
}
+
+// isFileLimitError checks if an error is related to file descriptor limits
+func isFileLimitError(err error) bool {
+ if err == nil {
+ return false
+ }
+ // Check for common file limit errors
+ return errors.Is(err, syscall.EMFILE) || errors.Is(err, syscall.ENFILE)
+}
diff --git a/internal/lsp/watcher/rlimit_stub.go b/internal/lsp/watcher/rlimit_stub.go
new file mode 100644
index 0000000000000000000000000000000000000000..965e6bba89c82ca35331dcd1588e27da7aac29e7
--- /dev/null
+++ b/internal/lsp/watcher/rlimit_stub.go
@@ -0,0 +1,12 @@
+//go:build !unix
+
+package watcher
+
+// MaximizeOpenFileLimit is a no-op on non-Unix systems.
+// Returns a high value to indicate no practical limit.
+func MaximizeOpenFileLimit() (int, error) {
+ // Windows and other non-Unix systems don't have file descriptor limits
+ // in the same way Unix systems do. Return a high value to indicate
+ // there's no practical limit to worry about.
+ return 1<<20, nil // 1M, effectively unlimited
+}
\ No newline at end of file
diff --git a/internal/lsp/watcher/rlimit_unix.go b/internal/lsp/watcher/rlimit_unix.go
new file mode 100644
index 0000000000000000000000000000000000000000..29f99c4fdba2870ea5e8f8e68273e99c4e98e3de
--- /dev/null
+++ b/internal/lsp/watcher/rlimit_unix.go
@@ -0,0 +1,57 @@
+//go:build unix
+
+// This file contains code inspired by Syncthing's rlimit implementation
+// Syncthing is licensed under the Mozilla Public License Version 2.0
+// See: https://github.com/syncthing/syncthing/blob/main/LICENSE
+
+package watcher
+
+import (
+ "runtime"
+ "syscall"
+)
+
+const (
+ // macOS has a specific limit for RLIMIT_NOFILE
+ darwinOpenMax = 10240
+)
+
+// MaximizeOpenFileLimit tries to set the resource limit RLIMIT_NOFILE (number
+// of open file descriptors) to the max (hard limit), if the current (soft
+// limit) is below the max. Returns the new (though possibly unchanged) limit,
+// or an error if it could not be changed.
+func MaximizeOpenFileLimit() (int, error) {
+ // Get the current limit on number of open files.
+ var lim syscall.Rlimit
+ if err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &lim); err != nil {
+ return 0, err
+ }
+
+ // If we're already at max, there's no need to try to raise the limit.
+ if lim.Cur >= lim.Max {
+ return int(lim.Cur), nil
+ }
+
+ // macOS doesn't like a soft limit greater than OPEN_MAX
+ if runtime.GOOS == "darwin" && lim.Max > darwinOpenMax {
+ lim.Max = darwinOpenMax
+ }
+
+ // Try to increase the limit to the max.
+ oldLimit := lim.Cur
+ lim.Cur = lim.Max
+ if err := syscall.Setrlimit(syscall.RLIMIT_NOFILE, &lim); err != nil {
+ return int(oldLimit), err
+ }
+
+ // If the set succeeded, perform a new get to see what happened. We might
+ // have gotten a value lower than the one in lim.Max, if lim.Max was
+ // something that indicated "unlimited" (i.e. intmax).
+ if err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &lim); err != nil {
+ // We don't really know the correct value here since Getrlimit
+ // mysteriously failed after working once... Shouldn't ever happen.
+ return 0, err
+ }
+
+ return int(lim.Cur), nil
+}
From de3d46bbcc232b76d714ce7800c7559342df811d Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Mon, 15 Sep 2025 14:26:32 +0200
Subject: [PATCH 079/236] fix: make the limit really high on non-unix
---
internal/lsp/watcher/rlimit_stub.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/internal/lsp/watcher/rlimit_stub.go b/internal/lsp/watcher/rlimit_stub.go
index 965e6bba89c82ca35331dcd1588e27da7aac29e7..e016c403486b8d947a87edaf90e01643d01efed4 100644
--- a/internal/lsp/watcher/rlimit_stub.go
+++ b/internal/lsp/watcher/rlimit_stub.go
@@ -6,7 +6,7 @@ package watcher
// Returns a high value to indicate no practical limit.
func MaximizeOpenFileLimit() (int, error) {
// Windows and other non-Unix systems don't have file descriptor limits
- // in the same way Unix systems do. Return a high value to indicate
+ // in the same way Unix systems do. Return a very high value to indicate
// there's no practical limit to worry about.
- return 1<<20, nil // 1M, effectively unlimited
+ return 10_000_000, nil // 10M handles - way more than any process would use
}
\ No newline at end of file
From 8cae31408ea74778380e7a1ae35ae60e9d2a57ca Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Mon, 15 Sep 2025 14:30:03 +0200
Subject: [PATCH 080/236] fix: windows lint for number
---
internal/lsp/watcher/rlimit_stub.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/lsp/watcher/rlimit_stub.go b/internal/lsp/watcher/rlimit_stub.go
index e016c403486b8d947a87edaf90e01643d01efed4..e60cf27d80bae21fe326f2b584dd32aac03130d4 100644
--- a/internal/lsp/watcher/rlimit_stub.go
+++ b/internal/lsp/watcher/rlimit_stub.go
@@ -8,5 +8,5 @@ func MaximizeOpenFileLimit() (int, error) {
// Windows and other non-Unix systems don't have file descriptor limits
// in the same way Unix systems do. Return a very high value to indicate
// there's no practical limit to worry about.
- return 10_000_000, nil // 10M handles - way more than any process would use
+ return 10000000, nil // 10M handles - way more than any process would use
}
\ No newline at end of file
From 7ff6ba95be162d727eb3b0059566192dfd5f321f Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Mon, 15 Sep 2025 09:55:14 -0300
Subject: [PATCH 081/236] refactor: make func unexported
---
internal/lsp/watcher/global_watcher.go | 2 +-
internal/lsp/watcher/rlimit_stub.go | 6 +++---
internal/lsp/watcher/rlimit_unix.go | 4 ++--
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
index 7045cb04837b2fc33b0696c089f12adc220db587..5a546a7db45ffa006a97d9ba98d8816665a6efa1 100644
--- a/internal/lsp/watcher/global_watcher.go
+++ b/internal/lsp/watcher/global_watcher.go
@@ -118,7 +118,7 @@ func Start() error {
// Check if the error might be due to file descriptor limits
if isFileLimitError(err) {
slog.Warn("lsp watcher: Hit file descriptor limit, attempting to increase", "error", err)
- if newLimit, rlimitErr := MaximizeOpenFileLimit(); rlimitErr == nil {
+ if newLimit, rlimitErr := maximizeOpenFileLimit(); rlimitErr == nil {
slog.Info("lsp watcher: Increased file descriptor limit", "limit", newLimit)
// Retry the watch operation
if err = notify.Watch(watchPath, gw.events, events); err == nil {
diff --git a/internal/lsp/watcher/rlimit_stub.go b/internal/lsp/watcher/rlimit_stub.go
index e60cf27d80bae21fe326f2b584dd32aac03130d4..9e39467f21bf602c73fd124f799139e4b6cafc09 100644
--- a/internal/lsp/watcher/rlimit_stub.go
+++ b/internal/lsp/watcher/rlimit_stub.go
@@ -2,11 +2,11 @@
package watcher
-// MaximizeOpenFileLimit is a no-op on non-Unix systems.
+// maximizeOpenFileLimit is a no-op on non-Unix systems.
// Returns a high value to indicate no practical limit.
-func MaximizeOpenFileLimit() (int, error) {
+func maximizeOpenFileLimit() (int, error) {
// Windows and other non-Unix systems don't have file descriptor limits
// in the same way Unix systems do. Return a very high value to indicate
// there's no practical limit to worry about.
return 10000000, nil // 10M handles - way more than any process would use
-}
\ No newline at end of file
+}
diff --git a/internal/lsp/watcher/rlimit_unix.go b/internal/lsp/watcher/rlimit_unix.go
index 29f99c4fdba2870ea5e8f8e68273e99c4e98e3de..298f3d5b3004a032f0ce5cc592ed30e954fef3f9 100644
--- a/internal/lsp/watcher/rlimit_unix.go
+++ b/internal/lsp/watcher/rlimit_unix.go
@@ -16,11 +16,11 @@ const (
darwinOpenMax = 10240
)
-// MaximizeOpenFileLimit tries to set the resource limit RLIMIT_NOFILE (number
+// maximizeOpenFileLimit tries to set the resource limit RLIMIT_NOFILE (number
// of open file descriptors) to the max (hard limit), if the current (soft
// limit) is below the max. Returns the new (though possibly unchanged) limit,
// or an error if it could not be changed.
-func MaximizeOpenFileLimit() (int, error) {
+func maximizeOpenFileLimit() (int, error) {
// Get the current limit on number of open files.
var lim syscall.Rlimit
if err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &lim); err != nil {
From 9a132dcbf2d9019b66fca90112158ed9c1fb701e Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Mon, 15 Sep 2025 14:48:12 +0200
Subject: [PATCH 082/236] fix: introduce notify ignore files
---
go.mod | 2 +-
go.sum | 8 ++--
internal/lsp/watcher/global_watcher.go | 46 +++++++++++++++++----
internal/lsp/watcher/global_watcher_test.go | 2 +-
4 files changed, 44 insertions(+), 14 deletions(-)
diff --git a/go.mod b/go.mod
index ff4eab1623e5a73e91d403ad89dc7b40150112eb..7c29af04b932c44304bb76ec932c476ea1f89fe0 100644
--- a/go.mod
+++ b/go.mod
@@ -112,8 +112,8 @@ require (
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/raphamorim/notify v0.9.3
github.com/rivo/uniseg v0.4.7
- github.com/rjeczalik/notify v0.9.3
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
diff --git a/go.sum b/go.sum
index 74a8425c0431ce926f07e0bb3c3bd7ac540367c7..1a7f1fa4c6ab97f37f459cb9be75c281d65f61cc 100644
--- a/go.sum
+++ b/go.sum
@@ -86,7 +86,7 @@ github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqI
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0=
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU=
-github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
+github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:5KgReOUbYf1O8+dIiGF0JVirb5NJNjE0gLQMwxDJap4=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0 h1:sWRGoSw/JsO2S4t2+fmmEkRbkOxphI0AxZkQPQVKWbs=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
@@ -94,7 +94,7 @@ github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mS
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf h1:2fs3BT8BFjpJ4134Tq4VoBm/fE9FB2f2P/FhmzsWelQ=
github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
-github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
+github.com/charmbracelet/x/ansi v0.10.1 h1:LT77A3bpevRD0yZ5NDR5nonS7N83mxzzGwuZcTGezLE=
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a/go.mod h1:rc2bsPC6MWae3LdOxNO1mOb443NlMrrDL0xEya48NNc=
@@ -232,13 +232,13 @@ github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjG
github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c h1:kmzxiX+OB0knCo1V0dkEkdPelzCdAzCURCfmFArn2/A=
github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
+github.com/raphamorim/notify v0.9.3 h1:sOUIE8U6wtt93QA3/2HOXsGsrsVvT7US5Ye01+Hzl9E=
+github.com/raphamorim/notify v0.9.3/go.mod h1:3FXSIPyrunV10GCnLGPrpSxoY/Dxi+saeQb9hf+TDSo=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
-github.com/rjeczalik/notify v0.9.3 h1:6rJAzHTGKXGj76sbRgDiDcYj/HniypXmSJo1SWakZeY=
-github.com/rjeczalik/notify v0.9.3/go.mod h1:gF3zSOrafR9DQEWSE8TjfI9NkooDxbyT4UgRGKZA0lc=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
index 5a546a7db45ffa006a97d9ba98d8816665a6efa1..c9aa4b3a26e42fe9a9e2c86834147828534c70fc 100644
--- a/internal/lsp/watcher/global_watcher.go
+++ b/internal/lsp/watcher/global_watcher.go
@@ -16,12 +16,12 @@ import (
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/lsp/protocol"
- "github.com/rjeczalik/notify"
+ "github.com/raphamorim/notify"
)
// global manages file watching shared across all LSP clients.
//
-// IMPORTANT: This implementation uses github.com/rjeczalik/notify which provides
+// IMPORTANT: This implementation uses github.com/raphamorim/notify which provides
// recursive watching on all platforms. On macOS it uses FSEvents, on Linux it
// uses inotify (with recursion handled by the library), and on Windows it uses
// ReadDirectoryChangesW.
@@ -30,6 +30,7 @@ import (
// - Single watch point for entire directory tree
// - Automatic recursive watching without manually adding subdirectories
// - No file descriptor exhaustion issues
+// - Built-in ignore system for filtering file events
type global struct {
// Channel for receiving file system events
events chan notify.EventInfo
@@ -83,7 +84,7 @@ func (gw *global) unregister(name string) {
// Start sets up recursive watching on the workspace root.
//
-// Note: We use github.com/rjeczalik/notify which provides recursive watching
+// Note: We use github.com/raphamorim/notify which provides recursive watching
// with a single watch point. The "..." suffix means watch recursively.
// This is much more efficient than manually walking and watching each directory.
func Start() error {
@@ -103,6 +104,12 @@ func Start() error {
gw.root = root
gw.started.Store(true)
+ // Set up ignore system
+ if err := setupIgnoreSystem(root); err != nil {
+ slog.Warn("lsp watcher: Failed to set up ignore system", "error", err)
+ // Continue anyway, but without ignore functionality
+ }
+
// Start the event processing goroutine
gw.wg.Add(1)
go gw.processEvents()
@@ -162,11 +169,6 @@ func (gw *global) processEvents() {
path := event.Path()
- // Skip ignored files
- if fsext.ShouldExcludeFile(gw.root, path) {
- continue
- }
-
if cfg != nil && cfg.Options.DebugLSP {
slog.Debug("lsp watcher: Global watcher received event", "path", path, "event", event.Event().String())
}
@@ -362,3 +364,31 @@ func isFileLimitError(err error) bool {
// Check for common file limit errors
return errors.Is(err, syscall.EMFILE) || errors.Is(err, syscall.ENFILE)
}
+
+// setupIgnoreSystem configures the notify library's ignore system
+// to use .crushignore and .gitignore files for filtering file events
+func setupIgnoreSystem(root string) error {
+ // Create a new ignore matcher for the workspace root
+ im := notify.NewIgnoreMatcher(root)
+
+ // Load .crushignore file if it exists
+ crushignorePath := filepath.Join(root, ".crushignore")
+ if _, err := os.Stat(crushignorePath); err == nil {
+ if err := im.LoadIgnoreFile(crushignorePath); err != nil {
+ slog.Warn("lsp watcher: Failed to load .crushignore file", "error", err)
+ }
+ }
+
+ // Load .gitignore file if it exists
+ gitignorePath := filepath.Join(root, ".gitignore")
+ if _, err := os.Stat(gitignorePath); err == nil {
+ if err := im.LoadIgnoreFile(gitignorePath); err != nil {
+ slog.Warn("lsp watcher: Failed to load .gitignore file", "error", err)
+ }
+ }
+
+ // Set as the global ignore matcher
+ notify.SetIgnoreMatcher(im)
+
+ return nil
+}
diff --git a/internal/lsp/watcher/global_watcher_test.go b/internal/lsp/watcher/global_watcher_test.go
index cfbe8a51fb9be09fdfdc9f37be830c92d6b6eab8..f33244dea3b3b95bb65c8a570d366d4b887f6b34 100644
--- a/internal/lsp/watcher/global_watcher_test.go
+++ b/internal/lsp/watcher/global_watcher_test.go
@@ -8,7 +8,7 @@ import (
"time"
"github.com/charmbracelet/crush/internal/csync"
- "github.com/rjeczalik/notify"
+ "github.com/raphamorim/notify"
)
func TestGlobalWatcher(t *testing.T) {
From 313510c573e08f590558d155060f28a34eaaf63b Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Mon, 15 Sep 2025 09:53:54 -0300
Subject: [PATCH 083/236] chore(deps): fix go.sum
Signed-off-by: Carlos Alexandro Becker
---
go.sum | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/go.sum b/go.sum
index 1a7f1fa4c6ab97f37f459cb9be75c281d65f61cc..1e3cb1aeebda980f388e26ad055969dbd95f08c7 100644
--- a/go.sum
+++ b/go.sum
@@ -86,7 +86,7 @@ github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqI
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0=
github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU=
-github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:5KgReOUbYf1O8+dIiGF0JVirb5NJNjE0gLQMwxDJap4=
+github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0 h1:sWRGoSw/JsO2S4t2+fmmEkRbkOxphI0AxZkQPQVKWbs=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
@@ -94,7 +94,7 @@ github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mS
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf h1:2fs3BT8BFjpJ4134Tq4VoBm/fE9FB2f2P/FhmzsWelQ=
github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
-github.com/charmbracelet/x/ansi v0.10.1 h1:LT77A3bpevRD0yZ5NDR5nonS7N83mxzzGwuZcTGezLE=
+github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a/go.mod h1:rc2bsPC6MWae3LdOxNO1mOb443NlMrrDL0xEya48NNc=
From 6ec5a77ad4c479ac96c5060b4d9c560933e44e32 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Mon, 15 Sep 2025 10:18:14 -0300
Subject: [PATCH 084/236] chore: update `github.com/raphamorim/notify` to
v0.9.4
---
go.mod | 4 ++--
go.sum | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/go.mod b/go.mod
index 7c29af04b932c44304bb76ec932c476ea1f89fe0..d91beb1b21b503fa52893dcf6bf07f675b9f78a0 100644
--- a/go.mod
+++ b/go.mod
@@ -112,7 +112,7 @@ require (
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/raphamorim/notify v0.9.3
+ github.com/raphamorim/notify v0.9.4
github.com/rivo/uniseg v0.4.7
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
@@ -142,7 +142,7 @@ require (
golang.org/x/net v0.42.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sync v0.17.0 // indirect
- golang.org/x/sys v0.36.0
+ golang.org/x/sys v0.36.0 // indirect
golang.org/x/term v0.34.0 // indirect
golang.org/x/text v0.28.0
golang.org/x/time v0.8.0 // indirect
diff --git a/go.sum b/go.sum
index 1e3cb1aeebda980f388e26ad055969dbd95f08c7..7047afa69b7b3382a04eeb87a68e4e0a9116e8c2 100644
--- a/go.sum
+++ b/go.sum
@@ -232,8 +232,8 @@ github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjG
github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c h1:kmzxiX+OB0knCo1V0dkEkdPelzCdAzCURCfmFArn2/A=
github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
-github.com/raphamorim/notify v0.9.3 h1:sOUIE8U6wtt93QA3/2HOXsGsrsVvT7US5Ye01+Hzl9E=
-github.com/raphamorim/notify v0.9.3/go.mod h1:3FXSIPyrunV10GCnLGPrpSxoY/Dxi+saeQb9hf+TDSo=
+github.com/raphamorim/notify v0.9.4 h1:JXAGOzeR/cnclKkRCZINKS4EtB47O5TD1N1iCkkarTM=
+github.com/raphamorim/notify v0.9.4/go.mod h1:3FXSIPyrunV10GCnLGPrpSxoY/Dxi+saeQb9hf+TDSo=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
From 0146161bcbb8b1920cac3436c777a2dd7bf26d57 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Mon, 15 Sep 2025 15:42:11 -0400
Subject: [PATCH 086/236] docs(readme): add cerebras API key to table
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 497cac7205f941e05490efc6c04823a60dfa251d..d2a908f83a8ec6f56f3c6223127765049b09754a 100644
--- a/README.md
+++ b/README.md
@@ -124,6 +124,7 @@ That said, you can also set environment variables for preferred providers.
| `ANTHROPIC_API_KEY` | Anthropic |
| `OPENAI_API_KEY` | OpenAI |
| `OPENROUTER_API_KEY` | OpenRouter |
+| `CEREBRAS_API_KEY` | Cerebras |
| `GEMINI_API_KEY` | Google Gemini |
| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
From 9a8574b5095d73bac1705896ab49f224354772bc Mon Sep 17 00:00:00 2001
From: Ayman Bagabas
Date: Mon, 15 Sep 2025 17:02:50 -0400
Subject: [PATCH 087/236] chore: bump fang to v0.4.1 to fix #1041
---
go.mod | 2 +-
go.sum | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/go.mod b/go.mod
index d91beb1b21b503fa52893dcf6bf07f675b9f78a0..18c273da8af18dfcde5a1f6135e21d475d4ece34 100644
--- a/go.mod
+++ b/go.mod
@@ -15,7 +15,7 @@ require (
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e
github.com/charmbracelet/catwalk v0.5.3
- github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674
+ github.com/charmbracelet/fang v0.4.1
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706
diff --git a/go.sum b/go.sum
index 7047afa69b7b3382a04eeb87a68e4e0a9116e8c2..991099ad908150789fb917d1588f5b7a44073d01 100644
--- a/go.sum
+++ b/go.sum
@@ -84,8 +84,8 @@ github.com/charmbracelet/catwalk v0.5.3 h1:Hw9DlX8u79K9iLQJB4Bti9/rTzMvEpBjE/Gyn
github.com/charmbracelet/catwalk v0.5.3/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
-github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0=
-github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU=
+github.com/charmbracelet/fang v0.4.1 h1:NC0Y4oqg7YuZcBg/KKsHy8DSow0ZDjF4UJL7LwtA0dE=
+github.com/charmbracelet/fang v0.4.1/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0 h1:sWRGoSw/JsO2S4t2+fmmEkRbkOxphI0AxZkQPQVKWbs=
From f14867ed22f5a8e340ab5476e23713a614135b6c Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Tue, 16 Sep 2025 05:48:28 -0300
Subject: [PATCH 088/236] chore(legal): @dvcrn has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 34f0772e708ac062a7f4da8f0d299ca67558bb30..36f944370fcfe8a397c17f5c1f39298dc838ef76 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -623,6 +623,14 @@
"created_at": "2025-09-15T05:42:29Z",
"repoId": 987670088,
"pullRequestNo": 1042
+ },
+ {
+ "name": "dvcrn",
+ "id": 688326,
+ "comment_id": 3296702457,
+ "created_at": "2025-09-16T08:48:17Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1056
}
]
}
\ No newline at end of file
From a9d3080174b8c9995555da3a1cb2bd9b71074af5 Mon Sep 17 00:00:00 2001
From: David Mohl
Date: Tue, 16 Sep 2025 17:42:53 +0900
Subject: [PATCH 089/236] Fix(gemini): ensure tool responses have the user role
---
internal/llm/provider/gemini.go | 32 ++++++++++++--------------------
1 file changed, 12 insertions(+), 20 deletions(-)
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 9d5164973a5ad86b4c0dee001e54b46b838b89e6..256e21bf7d59216a41be4603c1475dc9e24bdeea 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -102,6 +102,7 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
}
case message.Tool:
+ var toolParts []*genai.Part
for _, result := range msg.ToolResults() {
response := map[string]any{"result": result.Content}
parsed, err := parseJSONToMap(result.Content)
@@ -121,16 +122,17 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
}
}
- history = append(history, &genai.Content{
- Parts: []*genai.Part{
- {
- FunctionResponse: &genai.FunctionResponse{
- Name: toolCall.Name,
- Response: response,
- },
- },
+ toolParts = append(toolParts, &genai.Part{
+ FunctionResponse: &genai.FunctionResponse{
+ Name: toolCall.Name,
+ Response: response,
},
- Role: genai.RoleModel,
+ })
+ }
+ if len(toolParts) > 0 {
+ history = append(history, &genai.Content{
+ Parts: toolParts,
+ Role: genai.RoleUser,
})
}
}
@@ -373,17 +375,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
Finished: true,
}
- isNew := true
- for _, existing := range toolCalls {
- if existing.Name == newCall.Name && existing.Input == newCall.Input {
- isNew = false
- break
- }
- }
-
- if isNew {
- toolCalls = append(toolCalls, newCall)
- }
+ toolCalls = append(toolCalls, newCall)
}
}
} else {
From e2e952b1ab297774145897de1c0d28eebda13bb3 Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Tue, 16 Sep 2025 20:10:21 +0200
Subject: [PATCH 090/236] fix: only enable watcher for git repos (#1060)
---
internal/app/app.go | 22 +++++++++++++++++++---
1 file changed, 19 insertions(+), 3 deletions(-)
diff --git a/internal/app/app.go b/internal/app/app.go
index 21ddcd25eff1c9aeebb9d6700f9340ab0932e7ab..d9bcfa091006a447bf9af9ef6178cec41f9781a3 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -7,6 +7,8 @@ import (
"fmt"
"log/slog"
"maps"
+ "os/exec"
+ "strings"
"sync"
"time"
@@ -54,6 +56,16 @@ type App struct {
cleanupFuncs []func() error
}
+// isGitRepo checks if the current directory is a git repository
+func isGitRepo() bool {
+ bts, err := exec.CommandContext(
+ context.Background(),
+ "git", "rev-parse",
+ "--is-inside-work-tree",
+ ).CombinedOutput()
+ return err == nil && strings.TrimSpace(string(bts)) == "true"
+}
+
// New initializes a new applcation instance.
func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
q := db.New(conn)
@@ -86,9 +98,13 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
app.setupEvents()
- // Start the global watcher
- if err := watcher.Start(); err != nil {
- return nil, fmt.Errorf("app: %w", err)
+ // Start the global watcher only if this is a git repository
+ if isGitRepo() {
+ if err := watcher.Start(); err != nil {
+ return nil, fmt.Errorf("app: %w", err)
+ }
+ } else {
+ slog.Warn("Not starting global watcher: not a git repository")
}
// Initialize LSP clients in the background.
From 0bba5af1396a605a1bc8daeb1bd68d2e02056e66 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 16 Sep 2025 16:21:10 -0300
Subject: [PATCH 092/236] chore: task run
Signed-off-by: Carlos Alexandro Becker
---
Taskfile.yaml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 3993be96f6ac8987a84491f87c4940963b176074..443531fa2435d5557536a4d2e6d88014ea4a5677 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -35,6 +35,11 @@ tasks:
generates:
- crush
+ run:
+ desc: Run build
+ cmds:
+ - go run .
+
test:
desc: Run tests
cmds:
From 073b5b3f83be881e7883b538508a5a6ce94dfa92 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Wed, 17 Sep 2025 02:39:34 -0300
Subject: [PATCH 093/236] chore(legal): @khushveer007 has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 36f944370fcfe8a397c17f5c1f39298dc838ef76..de147d1ee126d921e62b00e40cad4f36e7afd04b 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -631,6 +631,14 @@
"created_at": "2025-09-16T08:48:17Z",
"repoId": 987670088,
"pullRequestNo": 1056
+ },
+ {
+ "name": "khushveer007",
+ "id": 122660325,
+ "comment_id": 3301369568,
+ "created_at": "2025-09-17T05:32:53Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1065
}
]
}
\ No newline at end of file
From 11c6e4f941e7d902779b7cc1d3d9659207b8a898 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 17 Sep 2025 09:53:53 -0300
Subject: [PATCH 094/236] ci(issue-labeler): add azure
---
.github/labeler.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.github/labeler.yml b/.github/labeler.yml
index 75642def1c1e84476d692bee5e8711f52208d05d..dd07db45310a70016126e6455f8777a714d71f5c 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -43,6 +43,8 @@
- "/(anthropic|claude)/i"
"provider: aws bedrock":
- "/(aws|bedrock)/i"
+"provider: azure":
+ - "/azure/i"
"provider: google gemini":
- "/gemini/i"
"provider: google vertex":
From 382f1ca6f6c0e564a3a722d93838ad15145eecc4 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 17 Sep 2025 09:56:49 -0300
Subject: [PATCH 095/236] chore: fix labeler script
---
scripts/{run-issue-labeler.sh => run-labeler.sh} | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
rename scripts/{run-issue-labeler.sh => run-labeler.sh} (56%)
diff --git a/scripts/run-issue-labeler.sh b/scripts/run-labeler.sh
similarity index 56%
rename from scripts/run-issue-labeler.sh
rename to scripts/run-labeler.sh
index 041a95e29d5e9ac1ee0da39873d0a7c2ef24375d..4ac4bc2a5f2fda35ee439b69923e74e0bce1578f 100755
--- a/scripts/run-issue-labeler.sh
+++ b/scripts/run-labeler.sh
@@ -2,11 +2,11 @@ ISSUES=$(gh issue list --state=all --limit=1000 --json "number" -t '{{range .}}{
PRS=$(gh pr list --state=all --limit=1000 --json "number" -t '{{range .}}{{printf "%.0f\n" .number}}{{end}}')
for issue in $ISSUES; do
- echo "Dispatching issue-labeler.yml for $issue"
- gh workflow run issue-labeler.yml -f issue-number="$issue"
+ echo "Dispatching labeler.yml for $issue"
+ gh workflow run labeler.yml -f issue-number="$issue"
done
for pr in $PRS; do
- echo "Dispatching issue-labeler.yml for $pr"
- gh workflow run issue-labeler.yml -f issue-number="$pr"
+ echo "Dispatching labeler.yml for $pr"
+ gh workflow run labeler.yml -f issue-number="$pr"
done
From 898f1ee9e43d31d711b97a602db9bc152f18ccac Mon Sep 17 00:00:00 2001
From: Amolith
Date: Fri, 12 Sep 2025 14:13:41 -0600
Subject: [PATCH 096/236] fix(config): look for more than just crush.md
Crush used to show the initialization popup even when the project uses
something other than `CRUSH.md`. Crush already has support for loading
those other context files, like `CLAUDE.md` or `AGENTS.md`, so this just
changes the popup check logic to look for the same set it loads
Co-Authored-By: Crush
---
internal/config/init.go | 25 +++++++++++++++++--------
1 file changed, 17 insertions(+), 8 deletions(-)
diff --git a/internal/config/init.go b/internal/config/init.go
index f97272cefa779319a752927456c34fbcff97e3b6..6807ab25e819b99d899fac711da304c8dc8db595 100644
--- a/internal/config/init.go
+++ b/internal/config/init.go
@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
+ "slices"
"strings"
"sync/atomic"
)
@@ -50,30 +51,38 @@ func ProjectNeedsInitialization() (bool, error) {
return false, fmt.Errorf("failed to check init flag file: %w", err)
}
- crushExists, err := crushMdExists(cfg.WorkingDir())
+ someContextFileExists, err := contextPathsExist(cfg.WorkingDir())
if err != nil {
- return false, fmt.Errorf("failed to check for CRUSH.md files: %w", err)
+ return false, fmt.Errorf("failed to check for context files: %w", err)
}
- if crushExists {
+ if someContextFileExists {
return false, nil
}
return true, nil
}
-func crushMdExists(dir string) (bool, error) {
+func contextPathsExist(dir string) (bool, error) {
entries, err := os.ReadDir(dir)
if err != nil {
return false, err
}
+ // Create a slice of lowercase filenames for lookup with slices.Contains
+ var files []string
for _, entry := range entries {
- if entry.IsDir() {
- continue
+ if !entry.IsDir() {
+ files = append(files, strings.ToLower(entry.Name()))
}
+ }
+
+ // Check if any of the default context paths exist in the directory
+ for _, path := range defaultContextPaths {
+ // Extract just the filename from the path
+ _, filename := filepath.Split(path)
+ filename = strings.ToLower(filename)
- name := strings.ToLower(entry.Name())
- if name == "crush.md" {
+ if slices.Contains(files, filename) {
return true, nil
}
}
From b00ffcc671ae0cd06d3b1baf5e8c537634e46ccd Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 17 Sep 2025 14:07:34 -0300
Subject: [PATCH 097/236] feat: LSP implementation using x/powernap (#1011)
* feat: LSP implementation using x/powernap
Replace custom LSP client implementation with the `charmbracelet/x/powernap` library, significantly reducing codebase complexity by removing ~12,000 lines of custom LSP protocol handling code. This migration introduces:
- **Simplified client architecture**: Replace custom LSP transport, protocol, and method handling with powernap's battle-tested implementation
- **Enhanced workspace support**: Add workspace markers and inactive LSP server detection capabilities from powernap
- **Improved configuration**: Integrate powernap's default LSP server configurations and settings
- **Reduced maintenance burden**: Remove custom protocol definitions, JSON marshaling, and transport layer code
- **More features**: e.g. workspace root markers, starting LSP servers only where these files exist
The refactor maintains existing functionality while leveraging powernap's robust LSP client foundation, making the codebase more maintainable and feature-rich.
Signed-off-by: Carlos Alexandro Becker
chore: updates
Signed-off-by: Carlos Alexandro Becker
* fix: code review
Signed-off-by: Carlos Alexandro Becker
* revert: unwanted change
Signed-off-by: Carlos Alexandro Becker
* fix: improve code
Signed-off-by: Carlos Alexandro Becker
* fix: schema
Signed-off-by: Carlos Alexandro Becker
* fix: improve merge
Signed-off-by: Carlos Alexandro Becker
* fix: unneeded changes
Signed-off-by: Carlos Alexandro Becker
* fix: cleanup
Signed-off-by: Carlos Alexandro Becker
* fix: several fixes
Signed-off-by: Carlos Alexandro Becker
* fix: cleanup
Signed-off-by: Carlos Alexandro Becker
* fix: more cleanup
Signed-off-by: Carlos Alexandro Becker
* fix: use csync
Signed-off-by: Carlos Alexandro Becker
* fix: did close
Signed-off-by: Carlos Alexandro Becker
* fix: remove unused code
Signed-off-by: Carlos Alexandro Becker
* fix: cleanup
Signed-off-by: Carlos Alexandro Becker
* fix: cleanup
Signed-off-by: Carlos Alexandro Becker
* fix: clean
Signed-off-by: Carlos Alexandro Becker
* fix: cleanup
Signed-off-by: Carlos Alexandro Becker
* test: fix
Signed-off-by: Carlos Alexandro Becker
* refactor: improve func
Signed-off-by: Carlos Alexandro Becker
* fix: http client debug
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
Co-authored-by: Andrey Nering
---
go.mod | 3 +
go.sum | 7 +
internal/app/app.go | 2 +-
internal/app/lsp.go | 19 +-
internal/config/config.go | 14 +-
internal/config/load.go | 57 +-
internal/config/lsp_defaults_test.go | 35 +
internal/llm/tools/diagnostics.go | 204 +-
internal/log/http.go | 4 -
internal/lsp/caps.go | 112 -
internal/lsp/client.go | 646 +-
internal/lsp/client_test.go | 125 +-
internal/lsp/handlers.go | 55 +-
internal/lsp/language.go | 2 +-
internal/lsp/methods.go | 554 --
internal/lsp/protocol.go | 48 -
internal/lsp/protocol/LICENSE | 27 -
internal/lsp/protocol/interface.go | 117 -
internal/lsp/protocol/pattern_interfaces.go | 73 -
internal/lsp/protocol/tables.go | 30 -
internal/lsp/protocol/tsdocument-changes.go | 81 -
internal/lsp/protocol/tsjson.go | 3073 --------
internal/lsp/protocol/tsprotocol.go | 6952 -----------------
internal/lsp/protocol/uri.go | 229 -
internal/lsp/rootmarkers_test.go | 37 +
internal/lsp/transport.go | 284 -
internal/lsp/util/edit.go | 2 +-
internal/lsp/watcher/global_watcher.go | 2 +-
internal/lsp/watcher/watcher.go | 3 +-
internal/tui/components/chat/header/header.go | 2 +-
internal/tui/components/lsp/lsp.go | 8 +-
schema.json | 25 +-
32 files changed, 517 insertions(+), 12315 deletions(-)
create mode 100644 internal/config/lsp_defaults_test.go
delete mode 100644 internal/lsp/caps.go
delete mode 100644 internal/lsp/methods.go
delete mode 100644 internal/lsp/protocol.go
delete mode 100644 internal/lsp/protocol/LICENSE
delete mode 100644 internal/lsp/protocol/interface.go
delete mode 100644 internal/lsp/protocol/pattern_interfaces.go
delete mode 100644 internal/lsp/protocol/tables.go
delete mode 100644 internal/lsp/protocol/tsdocument-changes.go
delete mode 100644 internal/lsp/protocol/tsjson.go
delete mode 100644 internal/lsp/protocol/tsprotocol.go
delete mode 100644 internal/lsp/protocol/uri.go
create mode 100644 internal/lsp/rootmarkers_test.go
delete mode 100644 internal/lsp/transport.go
diff --git a/go.mod b/go.mod
index 18c273da8af18dfcde5a1f6135e21d475d4ece34..b4d0015ef96fde5aa0105bac9c7a2dcbfe2d8d8b 100644
--- a/go.mod
+++ b/go.mod
@@ -76,6 +76,7 @@ require (
github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
+ github.com/charmbracelet/x/powernap v0.0.0-20250911135559-c589b77c25e6
github.com/charmbracelet/x/term v0.2.1
github.com/charmbracelet/x/termios v0.1.1 // indirect
github.com/charmbracelet/x/windows v0.2.2 // indirect
@@ -104,6 +105,7 @@ require (
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
+ github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/mango v0.1.0 // indirect
github.com/muesli/mango-cobra v1.2.0 // indirect
@@ -116,6 +118,7 @@ require (
github.com/rivo/uniseg v0.4.7
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
+ github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.7 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
diff --git a/go.sum b/go.sum
index 991099ad908150789fb917d1588f5b7a44073d01..dd9347e6e058b89170a4925f65507e01b69ab89c 100644
--- a/go.sum
+++ b/go.sum
@@ -104,6 +104,8 @@ github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHE
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
+github.com/charmbracelet/x/powernap v0.0.0-20250911135559-c589b77c25e6 h1:8XaGEZ453uu9IUBlEWu1I9U+Z7GmdwIzFFzBasRzDEk=
+github.com/charmbracelet/x/powernap v0.0.0-20250911135559-c589b77c25e6/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
@@ -157,6 +159,7 @@ github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrk
github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
+github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
@@ -197,6 +200,8 @@ github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6B
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI=
@@ -254,6 +259,8 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
+github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFRh6zSQ=
+github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
diff --git a/internal/app/app.go b/internal/app/app.go
index d9bcfa091006a447bf9af9ef6178cec41f9781a3..f30df8b8adb4ef52c5ef93a5934b070454b29981 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -368,7 +368,7 @@ func (app *App) Shutdown() {
// Shutdown all LSP clients.
for name, client := range clients {
shutdownCtx, cancel := context.WithTimeout(app.globalCtx, 5*time.Second)
- if err := client.Shutdown(shutdownCtx); err != nil {
+ if err := client.Close(shutdownCtx); err != nil {
slog.Error("Failed to shutdown LSP client", "name", name, "error", err)
}
cancel()
diff --git a/internal/app/lsp.go b/internal/app/lsp.go
index 8a9b06c1e784770371bc4000a2101af11aa44d64..d273774620dad4ac3cfc4f79f9d7b5fd681cbfb2 100644
--- a/internal/app/lsp.go
+++ b/internal/app/lsp.go
@@ -14,6 +14,10 @@ import (
// initLSPClients initializes LSP clients.
func (app *App) initLSPClients(ctx context.Context) {
for name, clientConfig := range app.config.LSP {
+ if clientConfig.Disabled {
+ slog.Info("Skipping disabled LSP client", "name", name)
+ continue
+ }
go app.createAndStartLSPClient(ctx, name, clientConfig)
}
slog.Info("LSP clients initialization started in background")
@@ -23,11 +27,18 @@ func (app *App) initLSPClients(ctx context.Context) {
func (app *App) createAndStartLSPClient(ctx context.Context, name string, config config.LSPConfig) {
slog.Info("Creating LSP client", "name", name, "command", config.Command, "fileTypes", config.FileTypes, "args", config.Args)
+ // Check if any root markers exist in the working directory (config now has defaults)
+ if !lsp.HasRootMarkers(app.config.WorkingDir(), config.RootMarkers) {
+ slog.Info("Skipping LSP client - no root markers found", "name", name, "rootMarkers", config.RootMarkers)
+ updateLSPState(name, lsp.StateDisabled, nil, nil, 0)
+ return
+ }
+
// Update state to starting
updateLSPState(name, lsp.StateStarting, nil, nil, 0)
// Create LSP client.
- lspClient, err := lsp.NewClient(ctx, name, config)
+ lspClient, err := lsp.New(ctx, name, config)
if err != nil {
slog.Error("Failed to create LSP client for", name, err)
updateLSPState(name, lsp.StateError, err, nil, 0)
@@ -42,11 +53,11 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
defer cancel()
// Initialize LSP client.
- _, err = lspClient.InitializeLSPClient(initCtx, app.config.WorkingDir())
+ _, err = lspClient.Initialize(initCtx, app.config.WorkingDir())
if err != nil {
slog.Error("Initialize failed", "name", name, "error", err)
updateLSPState(name, lsp.StateError, err, lspClient, 0)
- lspClient.Close()
+ lspClient.Close(ctx)
return
}
@@ -119,7 +130,7 @@ func (app *App) restartLSPClient(ctx context.Context, name string) {
if exists && oldClient != nil {
// Try to shut down client gracefully, but don't block on errors.
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
- _ = oldClient.Shutdown(shutdownCtx)
+ _ = oldClient.Close(shutdownCtx)
cancel()
}
diff --git a/internal/config/config.go b/internal/config/config.go
index 17ed626838cb555db163ee6c4db47d9d1be61b2a..05f6f8a10209ca4c5ddb084c04eb873c043f3c2c 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -117,12 +117,14 @@ type MCPConfig struct {
}
type LSPConfig struct {
- Disabled bool `json:"enabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
- Command string `json:"command" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
- Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
- Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
- Options any `json:"options,omitempty" jsonschema:"description=LSP server-specific configuration options"`
- FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
+ Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
+ Command string `json:"command" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
+ Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
+ Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
+ FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
+ RootMarkers []string `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
+ InitOptions map[string]any `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
+ Options map[string]any `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
}
type TUIOptions struct {
diff --git a/internal/config/load.go b/internal/config/load.go
index 9e1c9d0f7b739d7d6bdd974657b6efb5ea52d2ee..bdfd6328c8cabb1ea57afcd1f6e7e94645fd93e5 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -19,6 +19,7 @@ import (
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/log"
+ powernapConfig "github.com/charmbracelet/x/powernap/pkg/config"
)
const defaultCatwalkURL = "https://catwalk.charm.sh"
@@ -334,8 +335,8 @@ func (c *Config) setDefaults(workingDir, dataDir string) {
c.LSP = make(map[string]LSPConfig)
}
- // Apply default file types for known LSP servers if not specified
- applyDefaultLSPFileTypes(c.LSP)
+ // Apply defaults to LSP configurations
+ c.applyLSPDefaults()
// Add the default context paths if they are not already present
c.Options.ContextPaths = append(defaultContextPaths, c.Options.ContextPaths...)
@@ -347,35 +348,33 @@ func (c *Config) setDefaults(workingDir, dataDir string) {
}
}
-var defaultLSPFileTypes = map[string][]string{
- "gopls": {"go", "mod", "sum", "work"},
- "typescript-language-server": {"ts", "tsx", "js", "jsx", "mjs", "cjs"},
- "vtsls": {"ts", "tsx", "js", "jsx", "mjs", "cjs"},
- "bash-language-server": {"sh", "bash", "zsh", "ksh"},
- "rust-analyzer": {"rs"},
- "pyright": {"py", "pyi"},
- "pylsp": {"py", "pyi"},
- "clangd": {"c", "cpp", "cc", "cxx", "h", "hpp"},
- "jdtls": {"java"},
- "vscode-html-languageserver": {"html", "htm"},
- "vscode-css-languageserver": {"css", "scss", "sass", "less"},
- "vscode-json-languageserver": {"json", "jsonc"},
- "yaml-language-server": {"yaml", "yml"},
- "lua-language-server": {"lua"},
- "solargraph": {"rb"},
- "elixir-ls": {"ex", "exs"},
- "zls": {"zig"},
-}
-
-// applyDefaultLSPFileTypes sets default file types for known LSP servers
-func applyDefaultLSPFileTypes(lspConfigs map[string]LSPConfig) {
- for name, config := range lspConfigs {
- if len(config.FileTypes) != 0 {
+// applyLSPDefaults applies default values from powernap to LSP configurations
+func (c *Config) applyLSPDefaults() {
+ // Get powernap's default configuration
+ configManager := powernapConfig.NewManager()
+ configManager.LoadDefaults()
+
+ // Apply defaults to each LSP configuration
+ for name, cfg := range c.LSP {
+ // Try to get defaults from powernap based on command name
+ base, ok := configManager.GetServer(cfg.Command)
+ if !ok {
continue
}
- bin := strings.ToLower(filepath.Base(config.Command))
- config.FileTypes = defaultLSPFileTypes[bin]
- lspConfigs[name] = config
+ if cfg.Options == nil {
+ cfg.Options = base.Settings
+ }
+ if cfg.InitOptions == nil {
+ cfg.InitOptions = base.InitOptions
+ }
+ if len(cfg.FileTypes) == 0 {
+ cfg.FileTypes = base.FileTypes
+ }
+ if len(cfg.RootMarkers) == 0 {
+ cfg.RootMarkers = base.RootMarkers
+ }
+ // Update the config in the map
+ c.LSP[name] = cfg
}
}
diff --git a/internal/config/lsp_defaults_test.go b/internal/config/lsp_defaults_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..f04666597803f390fe25699a3a8bf9aba44b68be
--- /dev/null
+++ b/internal/config/lsp_defaults_test.go
@@ -0,0 +1,35 @@
+package config
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestApplyLSPDefaults(t *testing.T) {
+ t.Parallel()
+
+ // Create a config with an LSP that should get defaults
+ config := &Config{
+ LSP: map[string]LSPConfig{
+ "gopls": {
+ Command: "gopls", // This should get defaults from powernap
+ },
+ "custom": {
+ Command: "custom-lsp",
+ RootMarkers: []string{"custom.toml"}, // This should keep its explicit config
+ },
+ },
+ }
+
+ // Apply defaults
+ config.applyLSPDefaults()
+
+ // Check that gopls got defaults (it should have some root markers now)
+ goplsConfig := config.LSP["gopls"]
+ require.NotEmpty(t, goplsConfig.RootMarkers, "gopls should have received default root markers")
+
+ // Check that custom LSP kept its explicit config
+ customConfig := config.LSP["custom"]
+ require.Equal(t, []string{"custom.toml"}, customConfig.RootMarkers, "custom LSP should keep its explicit root markers")
+}
diff --git a/internal/llm/tools/diagnostics.go b/internal/llm/tools/diagnostics.go
index b6773a8cf9de28b71cafca6fb45d3e2cb69d8c0a..68586023296c1b5763faefe609171e5c1759eb09 100644
--- a/internal/llm/tools/diagnostics.go
+++ b/internal/llm/tools/diagnostics.go
@@ -10,7 +10,7 @@ import (
"time"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
type DiagnosticsParams struct {
@@ -110,7 +110,7 @@ func waitForLspDiagnostics(ctx context.Context, filePath string, lsps map[string
for _, client := range lsps {
originalDiags := client.GetDiagnostics()
- handler := func(params json.RawMessage) {
+ handler := func(_ context.Context, _ string, params json.RawMessage) {
lsp.HandleDiagnostics(client, params)
var diagParams protocol.PublishDiagnosticsParams
if err := json.Unmarshal(params, &diagParams); err != nil {
@@ -167,135 +167,123 @@ func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string {
fileDiagnostics := []string{}
projectDiagnostics := []string{}
- formatDiagnostic := func(pth string, diagnostic protocol.Diagnostic, source string) string {
- severity := "Info"
- switch diagnostic.Severity {
- case protocol.SeverityError:
- severity = "Error"
- case protocol.SeverityWarning:
- severity = "Warn"
- case protocol.SeverityHint:
- severity = "Hint"
+ for lspName, client := range lsps {
+ for location, diags := range client.GetDiagnostics() {
+ path, err := location.Path()
+ if err != nil {
+ slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err)
+ continue
+ }
+ isCurrentFile := path == filePath
+ for _, diag := range diags {
+ formattedDiag := formatDiagnostic(path, diag, lspName)
+ if isCurrentFile {
+ fileDiagnostics = append(fileDiagnostics, formattedDiag)
+ } else {
+ projectDiagnostics = append(projectDiagnostics, formattedDiag)
+ }
+ }
}
+ }
- location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
+ sortDiagnostics(fileDiagnostics)
+ sortDiagnostics(projectDiagnostics)
- sourceInfo := ""
- if diagnostic.Source != "" {
- sourceInfo = diagnostic.Source
- } else if source != "" {
- sourceInfo = source
- }
-
- codeInfo := ""
- if diagnostic.Code != nil {
- codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
- }
+ var output strings.Builder
+ writeDiagnostics(&output, "file_diagnostics", fileDiagnostics)
+ writeDiagnostics(&output, "project_diagnostics", projectDiagnostics)
- tagsInfo := ""
- if len(diagnostic.Tags) > 0 {
- tags := []string{}
- for _, tag := range diagnostic.Tags {
- switch tag {
- case protocol.Unnecessary:
- tags = append(tags, "unnecessary")
- case protocol.Deprecated:
- tags = append(tags, "deprecated")
- }
- }
- if len(tags) > 0 {
- tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
- }
- }
+ if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
+ fileErrors := countSeverity(fileDiagnostics, "Error")
+ fileWarnings := countSeverity(fileDiagnostics, "Warn")
+ projectErrors := countSeverity(projectDiagnostics, "Error")
+ projectWarnings := countSeverity(projectDiagnostics, "Warn")
- return fmt.Sprintf("%s: %s [%s]%s%s %s",
- severity,
- location,
- sourceInfo,
- codeInfo,
- tagsInfo,
- diagnostic.Message)
+ output.WriteString("\n\n")
+ fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
+ fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
+ output.WriteString("\n")
}
- for lspName, client := range lsps {
- diagnostics := client.GetDiagnostics()
- if len(diagnostics) > 0 {
- for location, diags := range diagnostics {
- path, err := location.Path()
- if err != nil {
- slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err)
- continue
- }
- isCurrentFile := path == filePath
-
- for _, diag := range diags {
- formattedDiag := formatDiagnostic(path, diag, lspName)
+ out := output.String()
+ slog.Info("Diagnostics", "output", fmt.Sprintf("%q", out))
+ return out
+}
- if isCurrentFile {
- fileDiagnostics = append(fileDiagnostics, formattedDiag)
- } else {
- projectDiagnostics = append(projectDiagnostics, formattedDiag)
- }
- }
- }
- }
+func writeDiagnostics(output *strings.Builder, tag string, in []string) {
+ if len(in) == 0 {
+ return
+ }
+ output.WriteString("\n<" + tag + ">\n")
+ if len(in) > 10 {
+ output.WriteString(strings.Join(in[:10], "\n"))
+ fmt.Fprintf(output, "\n... and %d more diagnostics", len(in)-10)
+ } else {
+ output.WriteString(strings.Join(in, "\n"))
}
+ output.WriteString("\n" + tag + ">\n")
+}
- sort.Slice(fileDiagnostics, func(i, j int) bool {
- iIsError := strings.HasPrefix(fileDiagnostics[i], "Error")
- jIsError := strings.HasPrefix(fileDiagnostics[j], "Error")
+func sortDiagnostics(in []string) []string {
+ sort.Slice(in, func(i, j int) bool {
+ iIsError := strings.HasPrefix(in[i], "Error")
+ jIsError := strings.HasPrefix(in[j], "Error")
if iIsError != jIsError {
return iIsError // Errors come first
}
- return fileDiagnostics[i] < fileDiagnostics[j] // Then alphabetically
+ return in[i] < in[j] // Then alphabetically
})
+ return in
+}
- sort.Slice(projectDiagnostics, func(i, j int) bool {
- iIsError := strings.HasPrefix(projectDiagnostics[i], "Error")
- jIsError := strings.HasPrefix(projectDiagnostics[j], "Error")
- if iIsError != jIsError {
- return iIsError
- }
- return projectDiagnostics[i] < projectDiagnostics[j]
- })
+func formatDiagnostic(pth string, diagnostic protocol.Diagnostic, source string) string {
+ severity := "Info"
+ switch diagnostic.Severity {
+ case protocol.SeverityError:
+ severity = "Error"
+ case protocol.SeverityWarning:
+ severity = "Warn"
+ case protocol.SeverityHint:
+ severity = "Hint"
+ }
- var output strings.Builder
+ location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
- if len(fileDiagnostics) > 0 {
- output.WriteString("\n\n")
- if len(fileDiagnostics) > 10 {
- output.WriteString(strings.Join(fileDiagnostics[:10], "\n"))
- fmt.Fprintf(&output, "\n... and %d more diagnostics", len(fileDiagnostics)-10)
- } else {
- output.WriteString(strings.Join(fileDiagnostics, "\n"))
- }
- output.WriteString("\n\n")
+ sourceInfo := ""
+ if diagnostic.Source != "" {
+ sourceInfo = diagnostic.Source
+ } else if source != "" {
+ sourceInfo = source
}
- if len(projectDiagnostics) > 0 {
- output.WriteString("\n\n")
- if len(projectDiagnostics) > 10 {
- output.WriteString(strings.Join(projectDiagnostics[:10], "\n"))
- fmt.Fprintf(&output, "\n... and %d more diagnostics", len(projectDiagnostics)-10)
- } else {
- output.WriteString(strings.Join(projectDiagnostics, "\n"))
- }
- output.WriteString("\n\n")
+ codeInfo := ""
+ if diagnostic.Code != nil {
+ codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
}
- if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
- fileErrors := countSeverity(fileDiagnostics, "Error")
- fileWarnings := countSeverity(fileDiagnostics, "Warn")
- projectErrors := countSeverity(projectDiagnostics, "Error")
- projectWarnings := countSeverity(projectDiagnostics, "Warn")
-
- output.WriteString("\n\n")
- fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
- fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
- output.WriteString("\n")
+ tagsInfo := ""
+ if len(diagnostic.Tags) > 0 {
+ tags := []string{}
+ for _, tag := range diagnostic.Tags {
+ switch tag {
+ case protocol.Unnecessary:
+ tags = append(tags, "unnecessary")
+ case protocol.Deprecated:
+ tags = append(tags, "deprecated")
+ }
+ }
+ if len(tags) > 0 {
+ tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
+ }
}
- return output.String()
+ return fmt.Sprintf("%s: %s [%s]%s%s %s",
+ severity,
+ location,
+ sourceInfo,
+ codeInfo,
+ tagsInfo,
+ diagnostic.Message)
}
func countSeverity(diagnostics []string, severity string) int {
diff --git a/internal/log/http.go b/internal/log/http.go
index 2c74bd05201ad9cbd8d60c6e3c3db3f637fb99b3..46c4b42af599f1809478a5c3f083c6249a3e13d0 100644
--- a/internal/log/http.go
+++ b/internal/log/http.go
@@ -2,7 +2,6 @@ package log
import (
"bytes"
- "context"
"encoding/json"
"io"
"log/slog"
@@ -13,9 +12,6 @@ import (
// NewHTTPClient creates an HTTP client with debug logging enabled when debug mode is on.
func NewHTTPClient() *http.Client {
- if !slog.Default().Enabled(context.TODO(), slog.LevelDebug) {
- return http.DefaultClient
- }
return &http.Client{
Transport: &HTTPRoundTripLogger{
Transport: http.DefaultTransport,
diff --git a/internal/lsp/caps.go b/internal/lsp/caps.go
deleted file mode 100644
index 7edc0886f72a92183a8570e45db74218e3aead47..0000000000000000000000000000000000000000
--- a/internal/lsp/caps.go
+++ /dev/null
@@ -1,112 +0,0 @@
-package lsp
-
-import "github.com/charmbracelet/crush/internal/lsp/protocol"
-
-func (c *Client) setCapabilities(caps protocol.ServerCapabilities) {
- c.capsMu.Lock()
- defer c.capsMu.Unlock()
- c.caps = caps
- c.capsSet.Store(true)
-}
-
-func (c *Client) getCapabilities() (protocol.ServerCapabilities, bool) {
- c.capsMu.RLock()
- defer c.capsMu.RUnlock()
- return c.caps, c.capsSet.Load()
-}
-
-func (c *Client) IsMethodSupported(method string) bool {
- // Always allow core lifecycle and generic methods
- switch method {
- case "initialize", "shutdown", "exit", "$/cancelRequest":
- return true
- }
-
- caps, ok := c.getCapabilities()
- if !ok {
- // caps not set yet, be permissive
- return true
- }
-
- switch method {
- case "textDocument/hover":
- return caps.HoverProvider != nil
- case "textDocument/definition":
- return caps.DefinitionProvider != nil
- case "textDocument/references":
- return caps.ReferencesProvider != nil
- case "textDocument/implementation":
- return caps.ImplementationProvider != nil
- case "textDocument/typeDefinition":
- return caps.TypeDefinitionProvider != nil
- case "textDocument/documentColor", "textDocument/colorPresentation":
- return caps.ColorProvider != nil
- case "textDocument/foldingRange":
- return caps.FoldingRangeProvider != nil
- case "textDocument/declaration":
- return caps.DeclarationProvider != nil
- case "textDocument/selectionRange":
- return caps.SelectionRangeProvider != nil
- case "textDocument/prepareCallHierarchy", "callHierarchy/incomingCalls", "callHierarchy/outgoingCalls":
- return caps.CallHierarchyProvider != nil
- case "textDocument/semanticTokens/full", "textDocument/semanticTokens/full/delta", "textDocument/semanticTokens/range":
- return caps.SemanticTokensProvider != nil
- case "textDocument/linkedEditingRange":
- return caps.LinkedEditingRangeProvider != nil
- case "workspace/willCreateFiles":
- return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillCreate != nil
- case "workspace/willRenameFiles":
- return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillRename != nil
- case "workspace/willDeleteFiles":
- return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillDelete != nil
- case "textDocument/moniker":
- return caps.MonikerProvider != nil
- case "textDocument/prepareTypeHierarchy", "typeHierarchy/supertypes", "typeHierarchy/subtypes":
- return caps.TypeHierarchyProvider != nil
- case "textDocument/inlineValue":
- return caps.InlineValueProvider != nil
- case "textDocument/inlayHint", "inlayHint/resolve":
- return caps.InlayHintProvider != nil
- case "textDocument/diagnostic", "workspace/diagnostic":
- return caps.DiagnosticProvider != nil
- case "textDocument/inlineCompletion":
- return caps.InlineCompletionProvider != nil
- case "workspace/textDocumentContent":
- return caps.Workspace != nil && caps.Workspace.TextDocumentContent != nil
- case "textDocument/willSaveWaitUntil":
- if caps.TextDocumentSync == nil {
- return false
- }
- return true
- case "textDocument/completion", "completionItem/resolve":
- return caps.CompletionProvider != nil
- case "textDocument/signatureHelp":
- return caps.SignatureHelpProvider != nil
- case "textDocument/documentHighlight":
- return caps.DocumentHighlightProvider != nil
- case "textDocument/documentSymbol":
- return caps.DocumentSymbolProvider != nil
- case "textDocument/codeAction", "codeAction/resolve":
- return caps.CodeActionProvider != nil
- case "workspace/symbol", "workspaceSymbol/resolve":
- return caps.WorkspaceSymbolProvider != nil
- case "textDocument/codeLens", "codeLens/resolve":
- return caps.CodeLensProvider != nil
- case "textDocument/documentLink", "documentLink/resolve":
- return caps.DocumentLinkProvider != nil
- case "textDocument/formatting":
- return caps.DocumentFormattingProvider != nil
- case "textDocument/rangeFormatting":
- return caps.DocumentRangeFormattingProvider != nil
- case "textDocument/rangesFormatting":
- return caps.DocumentRangeFormattingProvider != nil
- case "textDocument/onTypeFormatting":
- return caps.DocumentOnTypeFormattingProvider != nil
- case "textDocument/rename", "textDocument/prepareRename":
- return caps.RenameProvider != nil
- case "workspace/executeCommand":
- return caps.ExecuteCommandProvider != nil
- default:
- return true
- }
-}
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index e09a6a446db2f62476e072c79daadd2d832f895b..08b5cc2cc438546d3f60674d4f1daf7906b7b21a 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -1,291 +1,160 @@
package lsp
import (
- "bufio"
"context"
"encoding/json"
"fmt"
- "io"
"log/slog"
"maps"
"os"
- "os/exec"
"path/filepath"
- "slices"
"strings"
- "sync"
"sync/atomic"
"time"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/log"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/fsext"
+ powernap "github.com/charmbracelet/x/powernap/pkg/lsp"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/transport"
)
type Client struct {
- Cmd *exec.Cmd
- stdin io.WriteCloser
- stdout *bufio.Reader
- stderr io.ReadCloser
-
- // Client name for identification
- name string
+ client *powernap.Client
+ name string
// File types this LSP server handles (e.g., .go, .rs, .py)
fileTypes []string
+ // Configuration for this LSP client
+ config config.LSPConfig
+
// Diagnostic change callback
onDiagnosticsChanged func(name string, count int)
- // Request ID counter
- nextID atomic.Int32
-
- // Response handlers
- handlers map[int32]chan *Message
- handlersMu sync.RWMutex
-
- // Server request handlers
- serverRequestHandlers map[string]ServerRequestHandler
- serverHandlersMu sync.RWMutex
-
- // Notification handlers
- notificationHandlers map[string]NotificationHandler
- notificationMu sync.RWMutex
-
// Diagnostic cache
- diagnostics map[protocol.DocumentURI][]protocol.Diagnostic
- diagnosticsMu sync.RWMutex
+ diagnostics *csync.Map[protocol.DocumentURI, []protocol.Diagnostic]
// Files are currently opened by the LSP
- openFiles map[string]*OpenFileInfo
- openFilesMu sync.RWMutex
+ openFiles *csync.Map[string, *OpenFileInfo]
// Server state
serverState atomic.Value
-
- // Server capabilities as returned by initialize
- caps protocol.ServerCapabilities
- capsMu sync.RWMutex
- capsSet atomic.Bool
}
-// NewClient creates a new LSP client.
-func NewClient(ctx context.Context, name string, config config.LSPConfig) (*Client, error) {
- cmd := exec.CommandContext(ctx, config.Command, config.Args...)
-
- // Copy env
- cmd.Env = slices.Concat(os.Environ(), config.ResolvedEnv())
-
- stdin, err := cmd.StdinPipe()
- if err != nil {
- return nil, fmt.Errorf("failed to create stdin pipe: %w", err)
- }
-
- stdout, err := cmd.StdoutPipe()
+// New creates a new LSP client using the powernap implementation.
+func New(ctx context.Context, name string, config config.LSPConfig) (*Client, error) {
+ // Convert working directory to file URI
+ workDir, err := os.Getwd()
if err != nil {
- return nil, fmt.Errorf("failed to create stdout pipe: %w", err)
+ return nil, fmt.Errorf("failed to get working directory: %w", err)
+ }
+
+ rootURI := string(protocol.URIFromPath(workDir))
+
+ // Create powernap client config
+ clientConfig := powernap.ClientConfig{
+ Command: config.Command,
+ Args: config.Args,
+ RootURI: rootURI,
+ Environment: func() map[string]string {
+ env := make(map[string]string)
+ maps.Copy(env, config.Env)
+ return env
+ }(),
+ Settings: config.Options,
+ InitOptions: config.InitOptions,
+ WorkspaceFolders: []protocol.WorkspaceFolder{
+ {
+ URI: rootURI,
+ Name: filepath.Base(workDir),
+ },
+ },
}
- stderr, err := cmd.StderrPipe()
+ // Create the powernap client
+ powernapClient, err := powernap.NewClient(clientConfig)
if err != nil {
- return nil, fmt.Errorf("failed to create stderr pipe: %w", err)
+ return nil, fmt.Errorf("failed to create powernap client: %w", err)
}
client := &Client{
- Cmd: cmd,
- name: name,
- fileTypes: config.FileTypes,
- stdin: stdin,
- stdout: bufio.NewReader(stdout),
- stderr: stderr,
- handlers: make(map[int32]chan *Message),
- notificationHandlers: make(map[string]NotificationHandler),
- serverRequestHandlers: make(map[string]ServerRequestHandler),
- diagnostics: make(map[protocol.DocumentURI][]protocol.Diagnostic),
- openFiles: make(map[string]*OpenFileInfo),
+ client: powernapClient,
+ name: name,
+ fileTypes: config.FileTypes,
+ diagnostics: csync.NewMap[protocol.DocumentURI, []protocol.Diagnostic](),
+ openFiles: csync.NewMap[string, *OpenFileInfo](),
+ config: config,
}
// Initialize server state
client.serverState.Store(StateStarting)
- // Start the LSP server process
- if err := cmd.Start(); err != nil {
- return nil, fmt.Errorf("failed to start LSP server: %w", err)
- }
-
- // Handle stderr in a separate goroutine
- go func() {
- scanner := bufio.NewScanner(stderr)
- for scanner.Scan() {
- slog.Error("LSP Server", "err", scanner.Text())
- }
- if err := scanner.Err(); err != nil {
- slog.Error("Error reading", "err", err)
- }
- }()
-
- // Start message handling loop
- go func() {
- defer log.RecoverPanic("LSP-message-handler", func() {
- slog.Error("LSP message handler crashed, LSP functionality may be impaired")
- })
- client.handleMessages()
- }()
-
return client, nil
}
-func (c *Client) RegisterNotificationHandler(method string, handler NotificationHandler) {
- c.notificationMu.Lock()
- defer c.notificationMu.Unlock()
- c.notificationHandlers[method] = handler
-}
-
-func (c *Client) RegisterServerRequestHandler(method string, handler ServerRequestHandler) {
- c.serverHandlersMu.Lock()
- defer c.serverHandlersMu.Unlock()
- c.serverRequestHandlers[method] = handler
-}
-
-func (c *Client) InitializeLSPClient(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
- initParams := protocol.ParamInitialize{
- WorkspaceFoldersInitializeParams: protocol.WorkspaceFoldersInitializeParams{
- WorkspaceFolders: []protocol.WorkspaceFolder{
- {
- URI: protocol.URI(protocol.URIFromPath(workspaceDir)),
- Name: workspaceDir,
- },
- },
- },
-
- XInitializeParams: protocol.XInitializeParams{
- ProcessID: int32(os.Getpid()),
- ClientInfo: &protocol.ClientInfo{
- Name: "mcp-language-server",
- Version: "0.1.0",
- },
- RootPath: workspaceDir,
- RootURI: protocol.URIFromPath(workspaceDir),
- Capabilities: protocol.ClientCapabilities{
- Workspace: protocol.WorkspaceClientCapabilities{
- Configuration: true,
- DidChangeConfiguration: protocol.DidChangeConfigurationClientCapabilities{
- DynamicRegistration: true,
- },
- DidChangeWatchedFiles: protocol.DidChangeWatchedFilesClientCapabilities{
- DynamicRegistration: true,
- RelativePatternSupport: true,
- },
- },
- TextDocument: protocol.TextDocumentClientCapabilities{
- Synchronization: &protocol.TextDocumentSyncClientCapabilities{
- DynamicRegistration: true,
- DidSave: true,
- },
- Completion: protocol.CompletionClientCapabilities{
- CompletionItem: protocol.ClientCompletionItemOptions{},
- },
- CodeLens: &protocol.CodeLensClientCapabilities{
- DynamicRegistration: true,
- },
- DocumentSymbol: protocol.DocumentSymbolClientCapabilities{},
- CodeAction: protocol.CodeActionClientCapabilities{
- CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{
- CodeActionKind: protocol.ClientCodeActionKindOptions{
- ValueSet: []protocol.CodeActionKind{},
- },
- },
- },
- PublishDiagnostics: protocol.PublishDiagnosticsClientCapabilities{
- VersionSupport: true,
- },
- SemanticTokens: protocol.SemanticTokensClientCapabilities{
- Requests: protocol.ClientSemanticTokensRequestOptions{
- Range: &protocol.Or_ClientSemanticTokensRequestOptions_range{},
- Full: &protocol.Or_ClientSemanticTokensRequestOptions_full{},
- },
- TokenTypes: []string{},
- TokenModifiers: []string{},
- Formats: []protocol.TokenFormat{},
- },
- },
- Window: protocol.WindowClientCapabilities{},
- },
- InitializationOptions: map[string]any{
- "codelenses": map[string]bool{
- "generate": true,
- "regenerate_cgo": true,
- "test": true,
- "tidy": true,
- "upgrade_dependency": true,
- "vendor": true,
- "vulncheck": false,
- },
- },
- },
- }
-
- result, err := c.Initialize(ctx, initParams)
- if err != nil {
- return nil, fmt.Errorf("initialize failed: %w", err)
+// Initialize initializes the LSP client and returns the server capabilities.
+func (c *Client) Initialize(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
+ if err := c.client.Initialize(ctx, false); err != nil {
+ return nil, fmt.Errorf("failed to initialize powernap client: %w", err)
+ }
+
+ // Convert powernap capabilities to protocol capabilities
+ caps := c.client.GetCapabilities()
+ protocolCaps := protocol.ServerCapabilities{
+ TextDocumentSync: caps.TextDocumentSync,
+ CompletionProvider: func() *protocol.CompletionOptions {
+ if caps.CompletionProvider != nil {
+ return &protocol.CompletionOptions{
+ TriggerCharacters: caps.CompletionProvider.TriggerCharacters,
+ AllCommitCharacters: caps.CompletionProvider.AllCommitCharacters,
+ ResolveProvider: caps.CompletionProvider.ResolveProvider,
+ }
+ }
+ return nil
+ }(),
}
- c.setCapabilities(result.Capabilities)
-
- if err := c.Initialized(ctx, protocol.InitializedParams{}); err != nil {
- return nil, fmt.Errorf("initialized notification failed: %w", err)
+ result := &protocol.InitializeResult{
+ Capabilities: protocolCaps,
}
- // Register handlers
c.RegisterServerRequestHandler("workspace/applyEdit", HandleApplyEdit)
c.RegisterServerRequestHandler("workspace/configuration", HandleWorkspaceConfiguration)
c.RegisterServerRequestHandler("client/registerCapability", HandleRegisterCapability)
c.RegisterNotificationHandler("window/showMessage", HandleServerMessage)
- c.RegisterNotificationHandler("textDocument/publishDiagnostics", func(params json.RawMessage) {
+ c.RegisterNotificationHandler("textDocument/publishDiagnostics", func(_ context.Context, _ string, params json.RawMessage) {
HandleDiagnostics(c, params)
})
- return &result, nil
+ return result, nil
}
-func (c *Client) Close() error {
+// Close closes the LSP client.
+func (c *Client) Close(ctx context.Context) error {
// Try to close all open files first
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ ctx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
- // Attempt to close files but continue shutdown regardless
c.CloseAllFiles(ctx)
- // Close stdin to signal the server
- if err := c.stdin.Close(); err != nil {
- return fmt.Errorf("failed to close stdin: %w", err)
+ // Shutdown and exit the client
+ if err := c.client.Shutdown(ctx); err != nil {
+ slog.Warn("Failed to shutdown LSP client", "error", err)
}
- // Use a channel to handle the Wait with timeout
- done := make(chan error, 1)
- go func() {
- done <- c.Cmd.Wait()
- }()
-
- // Wait for process to exit with timeout
- select {
- case err := <-done:
- return err
- case <-time.After(2 * time.Second):
- // If we timeout, try to kill the process
- if err := c.Cmd.Process.Kill(); err != nil {
- return fmt.Errorf("failed to kill process: %w", err)
- }
- return fmt.Errorf("process killed after timeout")
- }
+ return c.client.Exit()
}
+// ServerState represents the state of an LSP server
type ServerState int
const (
StateStarting ServerState = iota
StateReady
StateError
+ StateDisabled
)
// GetServerState returns the current state of the LSP server
@@ -311,8 +180,7 @@ func (c *Client) SetDiagnosticsCallback(callback func(name string, count int)) {
c.onDiagnosticsChanged = callback
}
-// WaitForServerReady waits for the server to be ready by polling the server
-// with a simple request until it responds successfully or times out
+// WaitForServerReady waits for the server to be ready
func (c *Client) WaitForServerReady(ctx context.Context) error {
cfg := config.Get()
@@ -327,7 +195,7 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
ticker := time.NewTicker(500 * time.Millisecond)
defer ticker.Stop()
- if cfg.Options.DebugLSP {
+ if cfg != nil && cfg.Options.DebugLSP {
slog.Debug("Waiting for LSP server to be ready...")
}
@@ -339,17 +207,17 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
c.SetServerState(StateError)
return fmt.Errorf("timeout waiting for LSP server to be ready")
case <-ticker.C:
- // Try a ping method appropriate for this server type
- if err := c.ping(ctx); err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("LSP server not ready yet", "error", err, "server", c.name)
+ // Check if client is running
+ if !c.client.IsRunning() {
+ if cfg != nil && cfg.Options.DebugLSP {
+ slog.Debug("LSP server not ready yet", "server", c.name)
}
continue
}
- // Server responded successfully
+ // Server is ready
c.SetServerState(StateReady)
- if cfg.Options.DebugLSP {
+ if cfg != nil && cfg.Options.DebugLSP {
slog.Debug("LSP server is ready")
}
return nil
@@ -357,171 +225,13 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
}
}
-// ServerType represents the type of LSP server
-type ServerType int
-
-const (
- ServerTypeUnknown ServerType = iota
- ServerTypeGo
- ServerTypeTypeScript
- ServerTypeRust
- ServerTypePython
- ServerTypeGeneric
-)
-
-// detectServerType tries to determine what type of LSP server we're dealing with
-func (c *Client) detectServerType() ServerType {
- if c.Cmd == nil {
- return ServerTypeUnknown
- }
-
- cmdPath := strings.ToLower(c.Cmd.Path)
-
- switch {
- case strings.Contains(cmdPath, "gopls"):
- return ServerTypeGo
- case strings.Contains(cmdPath, "typescript") || strings.Contains(cmdPath, "vtsls") || strings.Contains(cmdPath, "tsserver"):
- return ServerTypeTypeScript
- case strings.Contains(cmdPath, "rust-analyzer"):
- return ServerTypeRust
- case strings.Contains(cmdPath, "pyright") || strings.Contains(cmdPath, "pylsp") || strings.Contains(cmdPath, "python"):
- return ServerTypePython
- default:
- return ServerTypeGeneric
- }
-}
-
-// openKeyConfigFiles opens important configuration files that help initialize the server
-func (c *Client) openKeyConfigFiles(ctx context.Context) {
- workDir := config.Get().WorkingDir()
- serverType := c.detectServerType()
-
- var filesToOpen []string
-
- switch serverType {
- case ServerTypeTypeScript:
- // TypeScript servers need these config files to properly initialize
- filesToOpen = []string{
- filepath.Join(workDir, "tsconfig.json"),
- filepath.Join(workDir, "package.json"),
- filepath.Join(workDir, "jsconfig.json"),
- }
-
- // Also find and open a few TypeScript files to help the server initialize
- c.openTypeScriptFiles(ctx, workDir)
- case ServerTypeGo:
- filesToOpen = []string{
- filepath.Join(workDir, "go.mod"),
- filepath.Join(workDir, "go.sum"),
- }
- case ServerTypeRust:
- filesToOpen = []string{
- filepath.Join(workDir, "Cargo.toml"),
- filepath.Join(workDir, "Cargo.lock"),
- }
- }
-
- // Try to open each file, ignoring errors if they don't exist
- for _, file := range filesToOpen {
- if _, err := os.Stat(file); err == nil {
- // File exists, try to open it
- if err := c.OpenFile(ctx, file); err != nil {
- slog.Debug("Failed to open key config file", "file", file, "error", err)
- } else {
- slog.Debug("Opened key config file for initialization", "file", file)
- }
- }
- }
-}
-
-// ping sends a ping request...
-func (c *Client) ping(ctx context.Context) error {
- if _, err := c.Symbol(ctx, protocol.WorkspaceSymbolParams{}); err == nil {
- return nil
- }
- // This is a very lightweight request that should work for most servers
- return c.Notify(ctx, "$/cancelRequest", protocol.CancelParams{ID: "1"})
-}
-
-// openTypeScriptFiles finds and opens TypeScript files to help initialize the server
-func (c *Client) openTypeScriptFiles(ctx context.Context, workDir string) {
- cfg := config.Get()
- filesOpened := 0
- maxFilesToOpen := 5 // Limit to a reasonable number of files
-
- // Find and open TypeScript files
- err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip directories and non-TypeScript files
- if d.IsDir() {
- // Skip common directories to avoid wasting time
- if shouldSkipDir(path) {
- return filepath.SkipDir
- }
- return nil
- }
-
- // Check if we've opened enough files
- if filesOpened >= maxFilesToOpen {
- return filepath.SkipAll
- }
-
- // Check file extension
- ext := filepath.Ext(path)
- if ext == ".ts" || ext == ".tsx" || ext == ".js" || ext == ".jsx" {
- // Try to open the file
- if err := c.OpenFile(ctx, path); err == nil {
- filesOpened++
- if cfg.Options.DebugLSP {
- slog.Debug("Opened TypeScript file for initialization", "file", path)
- }
- }
- }
-
- return nil
- })
-
- if err != nil && cfg.Options.DebugLSP {
- slog.Debug("Error walking directory for TypeScript files", "error", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Opened TypeScript files for initialization", "count", filesOpened)
- }
-}
-
-// shouldSkipDir returns true if the directory should be skipped during file search
-func shouldSkipDir(path string) bool {
- dirName := filepath.Base(path)
-
- // Skip hidden directories
- if strings.HasPrefix(dirName, ".") {
- return true
- }
-
- // Skip common directories that won't contain relevant source files
- skipDirs := map[string]bool{
- "node_modules": true,
- "dist": true,
- "build": true,
- "coverage": true,
- "vendor": true,
- "target": true,
- }
-
- return skipDirs[dirName]
-}
-
+// OpenFileInfo contains information about an open file
type OpenFileInfo struct {
Version int32
URI protocol.DocumentURI
}
-// HandlesFile checks if this LSP client handles the given file based on its
-// extension.
+// HandlesFile checks if this LSP client handles the given file based on its extension.
func (c *Client) HandlesFile(path string) bool {
// If no file types are specified, handle all files (backward compatibility)
if len(c.fileTypes) == 0 {
@@ -529,13 +239,13 @@ func (c *Client) HandlesFile(path string) bool {
}
name := strings.ToLower(filepath.Base(path))
- for _, filetpe := range c.fileTypes {
- suffix := strings.ToLower(filetpe)
+ for _, filetype := range c.fileTypes {
+ suffix := strings.ToLower(filetype)
if !strings.HasPrefix(suffix, ".") {
suffix = "." + suffix
}
if strings.HasSuffix(name, suffix) {
- slog.Debug("handles file", "name", c.name, "file", name, "filetype", filetpe)
+ slog.Debug("handles file", "name", c.name, "file", name, "filetype", filetype)
return true
}
}
@@ -543,6 +253,7 @@ func (c *Client) HandlesFile(path string) bool {
return false
}
+// OpenFile opens a file in the LSP server.
func (c *Client) OpenFile(ctx context.Context, filepath string) error {
if !c.HandlesFile(filepath) {
return nil
@@ -550,12 +261,9 @@ func (c *Client) OpenFile(ctx context.Context, filepath string) error {
uri := string(protocol.URIFromPath(filepath))
- c.openFilesMu.Lock()
- if _, exists := c.openFiles[uri]; exists {
- c.openFilesMu.Unlock()
+ if _, exists := c.openFiles.Get(uri); exists {
return nil // Already open
}
- c.openFilesMu.Unlock()
// Skip files that do not exist or cannot be read
content, err := os.ReadFile(filepath)
@@ -563,29 +271,20 @@ func (c *Client) OpenFile(ctx context.Context, filepath string) error {
return fmt.Errorf("error reading file: %w", err)
}
- params := protocol.DidOpenTextDocumentParams{
- TextDocument: protocol.TextDocumentItem{
- URI: protocol.DocumentURI(uri),
- LanguageID: DetectLanguageID(uri),
- Version: 1,
- Text: string(content),
- },
- }
-
- if err := c.DidOpen(ctx, params); err != nil {
+ // Notify the server about the opened document
+ if err = c.client.NotifyDidOpenTextDocument(ctx, uri, string(DetectLanguageID(uri)), 1, string(content)); err != nil {
return err
}
- c.openFilesMu.Lock()
- c.openFiles[uri] = &OpenFileInfo{
+ c.openFiles.Set(uri, &OpenFileInfo{
Version: 1,
URI: protocol.DocumentURI(uri),
- }
- c.openFilesMu.Unlock()
+ })
return nil
}
+// NotifyChange notifies the server about a file change.
func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
uri := string(protocol.URIFromPath(filepath))
@@ -594,84 +293,62 @@ func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
return fmt.Errorf("error reading file: %w", err)
}
- c.openFilesMu.Lock()
- fileInfo, isOpen := c.openFiles[uri]
+ fileInfo, isOpen := c.openFiles.Get(uri)
if !isOpen {
- c.openFilesMu.Unlock()
return fmt.Errorf("cannot notify change for unopened file: %s", filepath)
}
// Increment version
fileInfo.Version++
- version := fileInfo.Version
- c.openFilesMu.Unlock()
- params := protocol.DidChangeTextDocumentParams{
- TextDocument: protocol.VersionedTextDocumentIdentifier{
- TextDocumentIdentifier: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
- },
- Version: version,
- },
- ContentChanges: []protocol.TextDocumentContentChangeEvent{
- {
- Value: protocol.TextDocumentContentChangeWholeDocument{
- Text: string(content),
- },
+ // Create change event
+ changes := []protocol.TextDocumentContentChangeEvent{
+ {
+ Value: protocol.TextDocumentContentChangeWholeDocument{
+ Text: string(content),
},
},
}
- return c.DidChange(ctx, params)
+ return c.client.NotifyDidChangeTextDocument(ctx, uri, int(fileInfo.Version), changes)
}
+// CloseFile closes a file in the LSP server.
func (c *Client) CloseFile(ctx context.Context, filepath string) error {
cfg := config.Get()
uri := string(protocol.URIFromPath(filepath))
- c.openFilesMu.Lock()
- if _, exists := c.openFiles[uri]; !exists {
- c.openFilesMu.Unlock()
+ if _, exists := c.openFiles.Get(uri); !exists {
return nil // Already closed
}
- c.openFilesMu.Unlock()
-
- params := protocol.DidCloseTextDocumentParams{
- TextDocument: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
- },
- }
if cfg.Options.DebugLSP {
slog.Debug("Closing file", "file", filepath)
}
- if err := c.DidClose(ctx, params); err != nil {
+
+ if err := c.client.NotifyDidCloseTextDocument(ctx, uri); err != nil {
return err
}
- c.openFilesMu.Lock()
- delete(c.openFiles, uri)
- c.openFilesMu.Unlock()
+ c.openFiles.Del(uri)
return nil
}
+// IsFileOpen checks if a file is currently open.
func (c *Client) IsFileOpen(filepath string) bool {
uri := string(protocol.URIFromPath(filepath))
- c.openFilesMu.RLock()
- defer c.openFilesMu.RUnlock()
- _, exists := c.openFiles[uri]
+ _, exists := c.openFiles.Get(uri)
return exists
}
-// CloseAllFiles closes all currently open files
+// CloseAllFiles closes all currently open files.
func (c *Client) CloseAllFiles(ctx context.Context) {
cfg := config.Get()
- c.openFilesMu.Lock()
- filesToClose := make([]string, 0, len(c.openFiles))
+ filesToClose := make([]string, 0, c.openFiles.Len())
// First collect all URIs that need to be closed
- for uri := range c.openFiles {
+ for uri := range c.openFiles.Seq2() {
// Convert URI back to file path using proper URI handling
filePath, err := protocol.DocumentURI(uri).Path()
if err != nil {
@@ -680,38 +357,32 @@ func (c *Client) CloseAllFiles(ctx context.Context) {
}
filesToClose = append(filesToClose, filePath)
}
- c.openFilesMu.Unlock()
// Then close them all
for _, filePath := range filesToClose {
err := c.CloseFile(ctx, filePath)
- if err != nil && cfg.Options.DebugLSP {
+ if err != nil && cfg != nil && cfg.Options.DebugLSP {
slog.Warn("Error closing file", "file", filePath, "error", err)
}
}
- if cfg.Options.DebugLSP {
+ if cfg != nil && cfg.Options.DebugLSP {
slog.Debug("Closed all files", "files", filesToClose)
}
}
+// GetFileDiagnostics returns diagnostics for a specific file.
func (c *Client) GetFileDiagnostics(uri protocol.DocumentURI) []protocol.Diagnostic {
- c.diagnosticsMu.RLock()
- defer c.diagnosticsMu.RUnlock()
-
- return c.diagnostics[uri]
+ diags, _ := c.diagnostics.Get(uri)
+ return diags
}
-// GetDiagnostics returns all diagnostics for all files
+// GetDiagnostics returns all diagnostics for all files.
func (c *Client) GetDiagnostics() map[protocol.DocumentURI][]protocol.Diagnostic {
- c.diagnosticsMu.RLock()
- defer c.diagnosticsMu.RUnlock()
-
- return maps.Clone(c.diagnostics)
+ return maps.Collect(c.diagnostics.Seq2())
}
-// OpenFileOnDemand opens a file only if it's not already open
-// This is used for lazy-loading files when they're actually needed
+// OpenFileOnDemand opens a file only if it's not already open.
func (c *Client) OpenFileOnDemand(ctx context.Context, filepath string) error {
// Check if the file is already open
if c.IsFileOpen(filepath) {
@@ -722,8 +393,7 @@ func (c *Client) OpenFileOnDemand(ctx context.Context, filepath string) error {
return c.OpenFile(ctx, filepath)
}
-// GetDiagnosticsForFile ensures a file is open and returns its diagnostics
-// This is useful for on-demand diagnostics when using lazy loading
+// GetDiagnosticsForFile ensures a file is open and returns its diagnostics.
func (c *Client) GetDiagnosticsForFile(ctx context.Context, filepath string) ([]protocol.Diagnostic, error) {
documentURI := protocol.URIFromPath(filepath)
@@ -738,16 +408,64 @@ func (c *Client) GetDiagnosticsForFile(ctx context.Context, filepath string) ([]
}
// Get diagnostics
- c.diagnosticsMu.RLock()
- diagnostics := c.diagnostics[documentURI]
- c.diagnosticsMu.RUnlock()
+ diagnostics, _ := c.diagnostics.Get(documentURI)
return diagnostics, nil
}
-// ClearDiagnosticsForURI removes diagnostics for a specific URI from the cache
+// ClearDiagnosticsForURI removes diagnostics for a specific URI from the cache.
func (c *Client) ClearDiagnosticsForURI(uri protocol.DocumentURI) {
- c.diagnosticsMu.Lock()
- defer c.diagnosticsMu.Unlock()
- delete(c.diagnostics, uri)
+ c.diagnostics.Del(uri)
+}
+
+// RegisterNotificationHandler registers a notification handler.
+func (c *Client) RegisterNotificationHandler(method string, handler transport.NotificationHandler) {
+ c.client.RegisterNotificationHandler(method, handler)
+}
+
+// RegisterServerRequestHandler handles server requests.
+func (c *Client) RegisterServerRequestHandler(method string, handler transport.Handler) {
+ c.client.RegisterHandler(method, handler)
+}
+
+// DidChangeWatchedFiles sends a workspace/didChangeWatchedFiles notification to the server.
+func (c *Client) DidChangeWatchedFiles(ctx context.Context, params protocol.DidChangeWatchedFilesParams) error {
+ return c.client.NotifyDidChangeWatchedFiles(ctx, params.Changes)
+}
+
+// openKeyConfigFiles opens important configuration files that help initialize the server.
+func (c *Client) openKeyConfigFiles(ctx context.Context) {
+ wd, err := os.Getwd()
+ if err != nil {
+ return
+ }
+
+ // Try to open each file, ignoring errors if they don't exist
+ for _, file := range c.config.RootMarkers {
+ file = filepath.Join(wd, file)
+ if _, err := os.Stat(file); err == nil {
+ // File exists, try to open it
+ if err := c.OpenFile(ctx, file); err != nil {
+ slog.Debug("Failed to open key config file", "file", file, "error", err)
+ } else {
+ slog.Debug("Opened key config file for initialization", "file", file)
+ }
+ }
+ }
+}
+
+// HasRootMarkers checks if any of the specified root marker patterns exist in the given directory.
+// Uses glob patterns to match files, allowing for more flexible matching.
+func HasRootMarkers(dir string, rootMarkers []string) bool {
+ if len(rootMarkers) == 0 {
+ return true
+ }
+ for _, pattern := range rootMarkers {
+ // Use fsext.GlobWithDoubleStar to find matches
+ matches, _, err := fsext.GlobWithDoubleStar(pattern, dir, 1)
+ if err == nil && len(matches) > 0 {
+ return true
+ }
+ }
+ return false
}
diff --git a/internal/lsp/client_test.go b/internal/lsp/client_test.go
index f97b9bdddba1e0fa5ab22cbe68635b4f1b9b02c3..99ef0ca3143e5b8689ba3b63fd5c172456a46c24 100644
--- a/internal/lsp/client_test.go
+++ b/internal/lsp/client_test.go
@@ -1,93 +1,54 @@
package lsp
import (
+ "context"
"testing"
- "github.com/stretchr/testify/require"
+ "github.com/charmbracelet/crush/internal/config"
)
-func TestHandlesFile(t *testing.T) {
- tests := []struct {
- name string
- fileTypes []string
- filepath string
- expected bool
- }{
- {
- name: "no file types specified - handles all files",
- fileTypes: nil,
- filepath: "test.go",
- expected: true,
- },
- {
- name: "empty file types - handles all files",
- fileTypes: []string{},
- filepath: "test.go",
- expected: true,
- },
- {
- name: "matches .go extension",
- fileTypes: []string{".go"},
- filepath: "main.go",
- expected: true,
- },
- {
- name: "matches go extension without dot",
- fileTypes: []string{"go"},
- filepath: "main.go",
- expected: true,
- },
- {
- name: "matches one of multiple extensions",
- fileTypes: []string{".js", ".ts", ".tsx"},
- filepath: "component.tsx",
- expected: true,
- },
- {
- name: "does not match extension",
- fileTypes: []string{".go", ".rs"},
- filepath: "script.sh",
- expected: false,
- },
- {
- name: "matches with full path",
- fileTypes: []string{".sh"},
- filepath: "/usr/local/bin/script.sh",
- expected: true,
- },
- {
- name: "case insensitive matching",
- fileTypes: []string{".GO"},
- filepath: "main.go",
- expected: true,
- },
- {
- name: "bash file types",
- fileTypes: []string{".sh", ".bash", ".zsh", ".ksh"},
- filepath: "script.sh",
- expected: true,
- },
- {
- name: "bash should not handle go files",
- fileTypes: []string{".sh", ".bash", ".zsh", ".ksh"},
- filepath: "main.go",
- expected: false,
- },
- {
- name: "bash should not handle json files",
- fileTypes: []string{".sh", ".bash", ".zsh", ".ksh"},
- filepath: "config.json",
- expected: false,
- },
+func TestPowernapClient(t *testing.T) {
+ ctx := context.Background()
+
+ // Create a simple config for testing
+ cfg := config.LSPConfig{
+ Command: "echo", // Use echo as a dummy command that won't fail
+ Args: []string{"hello"},
+ FileTypes: []string{"go"},
+ Env: map[string]string{},
+ }
+
+ // Test creating a powernap client - this will likely fail with echo
+ // but we can still test the basic structure
+ client, err := New(ctx, "test", cfg)
+ if err != nil {
+ // Expected to fail with echo command, skip the rest
+ t.Skipf("Powernap client creation failed as expected with dummy command: %v", err)
+ return
+ }
+
+ // If we get here, test basic interface methods
+ if client.GetName() != "test" {
+ t.Errorf("Expected name 'test', got '%s'", client.GetName())
+ }
+
+ if !client.HandlesFile("test.go") {
+ t.Error("Expected client to handle .go files")
+ }
+
+ if client.HandlesFile("test.py") {
+ t.Error("Expected client to not handle .py files")
+ }
+
+ // Test server state
+ client.SetServerState(StateReady)
+ if client.GetServerState() != StateReady {
+ t.Error("Expected server state to be StateReady")
}
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- client := &Client{
- fileTypes: tt.fileTypes,
- }
- result := client.HandlesFile(tt.filepath)
- require.Equal(t, tt.expected, result)
- })
+ // Clean up - expect this to fail with echo command
+ if err := client.Close(t.Context()); err != nil {
+ // Expected to fail with echo command
+ t.Logf("Close failed as expected with dummy command: %v", err)
}
}
diff --git a/internal/lsp/handlers.go b/internal/lsp/handlers.go
index 72f3018b3da969000672e5b4ba47f73f2b72df97..b386e0780f6f6db6db13be380496c60a6e3c457e 100644
--- a/internal/lsp/handlers.go
+++ b/internal/lsp/handlers.go
@@ -1,22 +1,22 @@
package lsp
import (
+ "context"
"encoding/json"
"log/slog"
"github.com/charmbracelet/crush/internal/config"
-
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/lsp/util"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
-// Requests
-
-func HandleWorkspaceConfiguration(params json.RawMessage) (any, error) {
+// HandleWorkspaceConfiguration handles workspace configuration requests
+func HandleWorkspaceConfiguration(_ context.Context, _ string, params json.RawMessage) (any, error) {
return []map[string]any{{}}, nil
}
-func HandleRegisterCapability(params json.RawMessage) (any, error) {
+// HandleRegisterCapability handles capability registration requests
+func HandleRegisterCapability(_ context.Context, _ string, params json.RawMessage) (any, error) {
var registerParams protocol.RegistrationParams
if err := json.Unmarshal(params, ®isterParams); err != nil {
slog.Error("Error unmarshaling registration params", "error", err)
@@ -32,22 +32,20 @@ func HandleRegisterCapability(params json.RawMessage) (any, error) {
slog.Error("Error marshaling registration options", "error", err)
continue
}
-
var options protocol.DidChangeWatchedFilesRegistrationOptions
if err := json.Unmarshal(optionsJSON, &options); err != nil {
slog.Error("Error unmarshaling registration options", "error", err)
continue
}
-
// Store the file watchers registrations
notifyFileWatchRegistration(reg.ID, options.Watchers)
}
}
-
return nil, nil
}
-func HandleApplyEdit(params json.RawMessage) (any, error) {
+// HandleApplyEdit handles workspace edit requests
+func HandleApplyEdit(_ context.Context, _ string, params json.RawMessage) (any, error) {
var edit protocol.ApplyWorkspaceEditParams
if err := json.Unmarshal(params, &edit); err != nil {
return nil, err
@@ -80,21 +78,32 @@ func notifyFileWatchRegistration(id string, watchers []protocol.FileSystemWatche
}
}
-// Notifications
-
-func HandleServerMessage(params json.RawMessage) {
+// HandleServerMessage handles server messages
+func HandleServerMessage(_ context.Context, method string, params json.RawMessage) {
cfg := config.Get()
- var msg struct {
- Type int `json:"type"`
- Message string `json:"message"`
+ if !cfg.Options.DebugLSP {
+ return
}
- if err := json.Unmarshal(params, &msg); err == nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Server message", "type", msg.Type, "message", msg.Message)
- }
+
+ var msg protocol.ShowMessageParams
+ if err := json.Unmarshal(params, &msg); err != nil {
+ slog.Debug("Server message", "type", msg.Type, "message", msg.Message)
+ return
+ }
+
+ switch msg.Type {
+ case protocol.Error:
+ slog.Error("LSP Server", "message", msg.Message)
+ case protocol.Warning:
+ slog.Warn("LSP Server", "message", msg.Message)
+ case protocol.Info:
+ slog.Info("LSP Server", "message", msg.Message)
+ case protocol.Log:
+ slog.Debug("LSP Server", "message", msg.Message)
}
}
+// HandleDiagnostics handles diagnostic notifications from the LSP server
func HandleDiagnostics(client *Client, params json.RawMessage) {
var diagParams protocol.PublishDiagnosticsParams
if err := json.Unmarshal(params, &diagParams); err != nil {
@@ -102,15 +111,13 @@ func HandleDiagnostics(client *Client, params json.RawMessage) {
return
}
- client.diagnosticsMu.Lock()
- client.diagnostics[diagParams.URI] = diagParams.Diagnostics
+ client.diagnostics.Set(diagParams.URI, diagParams.Diagnostics)
// Calculate total diagnostic count
totalCount := 0
- for _, diagnostics := range client.diagnostics {
+ for _, diagnostics := range client.diagnostics.Seq2() {
totalCount += len(diagnostics)
}
- client.diagnosticsMu.Unlock()
// Trigger callback if set
if client.onDiagnosticsChanged != nil {
diff --git a/internal/lsp/language.go b/internal/lsp/language.go
index 87d209f1dbc51eafbde4d85b0ce6001dd17729b5..7d6a1517e849b6f09352447b2acb05539b3220af 100644
--- a/internal/lsp/language.go
+++ b/internal/lsp/language.go
@@ -4,7 +4,7 @@ import (
"path/filepath"
"strings"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
func DetectLanguageID(uri string) protocol.LanguageKind {
diff --git a/internal/lsp/methods.go b/internal/lsp/methods.go
deleted file mode 100644
index afd087c1b86d5242e845e419c47234de11ce467f..0000000000000000000000000000000000000000
--- a/internal/lsp/methods.go
+++ /dev/null
@@ -1,554 +0,0 @@
-// Generated code. Do not edit
-package lsp
-
-import (
- "context"
-
- "github.com/charmbracelet/crush/internal/lsp/protocol"
-)
-
-// Implementation sends a textDocument/implementation request to the LSP server.
-// A request to resolve the implementation locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
-func (c *Client) Implementation(ctx context.Context, params protocol.ImplementationParams) (protocol.Or_Result_textDocument_implementation, error) {
- var result protocol.Or_Result_textDocument_implementation
- err := c.Call(ctx, "textDocument/implementation", params, &result)
- return result, err
-}
-
-// TypeDefinition sends a textDocument/typeDefinition request to the LSP server.
-// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
-func (c *Client) TypeDefinition(ctx context.Context, params protocol.TypeDefinitionParams) (protocol.Or_Result_textDocument_typeDefinition, error) {
- var result protocol.Or_Result_textDocument_typeDefinition
- err := c.Call(ctx, "textDocument/typeDefinition", params, &result)
- return result, err
-}
-
-// DocumentColor sends a textDocument/documentColor request to the LSP server.
-// A request to list all color symbols found in a given text document. The request's parameter is of type DocumentColorParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
-func (c *Client) DocumentColor(ctx context.Context, params protocol.DocumentColorParams) ([]protocol.ColorInformation, error) {
- var result []protocol.ColorInformation
- err := c.Call(ctx, "textDocument/documentColor", params, &result)
- return result, err
-}
-
-// ColorPresentation sends a textDocument/colorPresentation request to the LSP server.
-// A request to list all presentation for a color. The request's parameter is of type ColorPresentationParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
-func (c *Client) ColorPresentation(ctx context.Context, params protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) {
- var result []protocol.ColorPresentation
- err := c.Call(ctx, "textDocument/colorPresentation", params, &result)
- return result, err
-}
-
-// FoldingRange sends a textDocument/foldingRange request to the LSP server.
-// A request to provide folding ranges in a document. The request's parameter is of type FoldingRangeParams, the response is of type FoldingRangeList or a Thenable that resolves to such.
-func (c *Client) FoldingRange(ctx context.Context, params protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) {
- var result []protocol.FoldingRange
- err := c.Call(ctx, "textDocument/foldingRange", params, &result)
- return result, err
-}
-
-// Declaration sends a textDocument/declaration request to the LSP server.
-// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Declaration or a typed array of DeclarationLink or a Thenable that resolves to such.
-func (c *Client) Declaration(ctx context.Context, params protocol.DeclarationParams) (protocol.Or_Result_textDocument_declaration, error) {
- var result protocol.Or_Result_textDocument_declaration
- err := c.Call(ctx, "textDocument/declaration", params, &result)
- return result, err
-}
-
-// SelectionRange sends a textDocument/selectionRange request to the LSP server.
-// A request to provide selection ranges in a document. The request's parameter is of type SelectionRangeParams, the response is of type SelectionRange SelectionRange[] or a Thenable that resolves to such.
-func (c *Client) SelectionRange(ctx context.Context, params protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) {
- var result []protocol.SelectionRange
- err := c.Call(ctx, "textDocument/selectionRange", params, &result)
- return result, err
-}
-
-// PrepareCallHierarchy sends a textDocument/prepareCallHierarchy request to the LSP server.
-// A request to result a CallHierarchyItem in a document at a given position. Can be used as an input to an incoming or outgoing call hierarchy. Since 3.16.0
-func (c *Client) PrepareCallHierarchy(ctx context.Context, params protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) {
- var result []protocol.CallHierarchyItem
- err := c.Call(ctx, "textDocument/prepareCallHierarchy", params, &result)
- return result, err
-}
-
-// IncomingCalls sends a callHierarchy/incomingCalls request to the LSP server.
-// A request to resolve the incoming calls for a given CallHierarchyItem. Since 3.16.0
-func (c *Client) IncomingCalls(ctx context.Context, params protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) {
- var result []protocol.CallHierarchyIncomingCall
- err := c.Call(ctx, "callHierarchy/incomingCalls", params, &result)
- return result, err
-}
-
-// OutgoingCalls sends a callHierarchy/outgoingCalls request to the LSP server.
-// A request to resolve the outgoing calls for a given CallHierarchyItem. Since 3.16.0
-func (c *Client) OutgoingCalls(ctx context.Context, params protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) {
- var result []protocol.CallHierarchyOutgoingCall
- err := c.Call(ctx, "callHierarchy/outgoingCalls", params, &result)
- return result, err
-}
-
-// SemanticTokensFull sends a textDocument/semanticTokens/full request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensFull(ctx context.Context, params protocol.SemanticTokensParams) (protocol.SemanticTokens, error) {
- var result protocol.SemanticTokens
- err := c.Call(ctx, "textDocument/semanticTokens/full", params, &result)
- return result, err
-}
-
-// SemanticTokensFullDelta sends a textDocument/semanticTokens/full/delta request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensFullDelta(ctx context.Context, params protocol.SemanticTokensDeltaParams) (protocol.Or_Result_textDocument_semanticTokens_full_delta, error) {
- var result protocol.Or_Result_textDocument_semanticTokens_full_delta
- err := c.Call(ctx, "textDocument/semanticTokens/full/delta", params, &result)
- return result, err
-}
-
-// SemanticTokensRange sends a textDocument/semanticTokens/range request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensRange(ctx context.Context, params protocol.SemanticTokensRangeParams) (protocol.SemanticTokens, error) {
- var result protocol.SemanticTokens
- err := c.Call(ctx, "textDocument/semanticTokens/range", params, &result)
- return result, err
-}
-
-// LinkedEditingRange sends a textDocument/linkedEditingRange request to the LSP server.
-// A request to provide ranges that can be edited together. Since 3.16.0
-func (c *Client) LinkedEditingRange(ctx context.Context, params protocol.LinkedEditingRangeParams) (protocol.LinkedEditingRanges, error) {
- var result protocol.LinkedEditingRanges
- err := c.Call(ctx, "textDocument/linkedEditingRange", params, &result)
- return result, err
-}
-
-// WillCreateFiles sends a workspace/willCreateFiles request to the LSP server.
-// The will create files request is sent from the client to the server before files are actually created as long as the creation is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are created. Hence the WorkspaceEdit can not manipulate the content of the file to be created. Since 3.16.0
-func (c *Client) WillCreateFiles(ctx context.Context, params protocol.CreateFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willCreateFiles", params, &result)
- return result, err
-}
-
-// WillRenameFiles sends a workspace/willRenameFiles request to the LSP server.
-// The will rename files request is sent from the client to the server before files are actually renamed as long as the rename is triggered from within the client. Since 3.16.0
-func (c *Client) WillRenameFiles(ctx context.Context, params protocol.RenameFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willRenameFiles", params, &result)
- return result, err
-}
-
-// WillDeleteFiles sends a workspace/willDeleteFiles request to the LSP server.
-// The did delete files notification is sent from the client to the server when files were deleted from within the client. Since 3.16.0
-func (c *Client) WillDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willDeleteFiles", params, &result)
- return result, err
-}
-
-// Moniker sends a textDocument/moniker request to the LSP server.
-// A request to get the moniker of a symbol at a given text document position. The request parameter is of type TextDocumentPositionParams. The response is of type Moniker Moniker[] or null.
-func (c *Client) Moniker(ctx context.Context, params protocol.MonikerParams) ([]protocol.Moniker, error) {
- var result []protocol.Moniker
- err := c.Call(ctx, "textDocument/moniker", params, &result)
- return result, err
-}
-
-// PrepareTypeHierarchy sends a textDocument/prepareTypeHierarchy request to the LSP server.
-// A request to result a TypeHierarchyItem in a document at a given position. Can be used as an input to a subtypes or supertypes type hierarchy. Since 3.17.0
-func (c *Client) PrepareTypeHierarchy(ctx context.Context, params protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "textDocument/prepareTypeHierarchy", params, &result)
- return result, err
-}
-
-// Supertypes sends a typeHierarchy/supertypes request to the LSP server.
-// A request to resolve the supertypes for a given TypeHierarchyItem. Since 3.17.0
-func (c *Client) Supertypes(ctx context.Context, params protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "typeHierarchy/supertypes", params, &result)
- return result, err
-}
-
-// Subtypes sends a typeHierarchy/subtypes request to the LSP server.
-// A request to resolve the subtypes for a given TypeHierarchyItem. Since 3.17.0
-func (c *Client) Subtypes(ctx context.Context, params protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "typeHierarchy/subtypes", params, &result)
- return result, err
-}
-
-// InlineValue sends a textDocument/inlineValue request to the LSP server.
-// A request to provide inline values in a document. The request's parameter is of type InlineValueParams, the response is of type InlineValue InlineValue[] or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) InlineValue(ctx context.Context, params protocol.InlineValueParams) ([]protocol.InlineValue, error) {
- var result []protocol.InlineValue
- err := c.Call(ctx, "textDocument/inlineValue", params, &result)
- return result, err
-}
-
-// InlayHint sends a textDocument/inlayHint request to the LSP server.
-// A request to provide inlay hints in a document. The request's parameter is of type InlayHintsParams, the response is of type InlayHint InlayHint[] or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) InlayHint(ctx context.Context, params protocol.InlayHintParams) ([]protocol.InlayHint, error) {
- var result []protocol.InlayHint
- err := c.Call(ctx, "textDocument/inlayHint", params, &result)
- return result, err
-}
-
-// Resolve sends a inlayHint/resolve request to the LSP server.
-// A request to resolve additional properties for an inlay hint. The request's parameter is of type InlayHint, the response is of type InlayHint or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) Resolve(ctx context.Context, params protocol.InlayHint) (protocol.InlayHint, error) {
- var result protocol.InlayHint
- err := c.Call(ctx, "inlayHint/resolve", params, &result)
- return result, err
-}
-
-// Diagnostic sends a textDocument/diagnostic request to the LSP server.
-// The document diagnostic request definition. Since 3.17.0
-func (c *Client) Diagnostic(ctx context.Context, params protocol.DocumentDiagnosticParams) (protocol.DocumentDiagnosticReport, error) {
- var result protocol.DocumentDiagnosticReport
- err := c.Call(ctx, "textDocument/diagnostic", params, &result)
- return result, err
-}
-
-// DiagnosticWorkspace sends a workspace/diagnostic request to the LSP server.
-// The workspace diagnostic request definition. Since 3.17.0
-func (c *Client) DiagnosticWorkspace(ctx context.Context, params protocol.WorkspaceDiagnosticParams) (protocol.WorkspaceDiagnosticReport, error) {
- var result protocol.WorkspaceDiagnosticReport
- err := c.Call(ctx, "workspace/diagnostic", params, &result)
- return result, err
-}
-
-// InlineCompletion sends a textDocument/inlineCompletion request to the LSP server.
-// A request to provide inline completions in a document. The request's parameter is of type InlineCompletionParams, the response is of type InlineCompletion InlineCompletion[] or a Thenable that resolves to such. Since 3.18.0 PROPOSED
-func (c *Client) InlineCompletion(ctx context.Context, params protocol.InlineCompletionParams) (protocol.Or_Result_textDocument_inlineCompletion, error) {
- var result protocol.Or_Result_textDocument_inlineCompletion
- err := c.Call(ctx, "textDocument/inlineCompletion", params, &result)
- return result, err
-}
-
-// TextDocumentContent sends a workspace/textDocumentContent request to the LSP server.
-// The workspace/textDocumentContent request is sent from the client to the server to request the content of a text document. Since 3.18.0 PROPOSED
-func (c *Client) TextDocumentContent(ctx context.Context, params protocol.TextDocumentContentParams) (string, error) {
- var result string
- err := c.Call(ctx, "workspace/textDocumentContent", params, &result)
- return result, err
-}
-
-// Initialize sends a initialize request to the LSP server.
-// The initialize request is sent from the client to the server. It is sent once as the request after starting up the server. The requests parameter is of type InitializeParams the response if of type InitializeResult of a Thenable that resolves to such.
-func (c *Client) Initialize(ctx context.Context, params protocol.ParamInitialize) (protocol.InitializeResult, error) {
- var result protocol.InitializeResult
- err := c.Call(ctx, "initialize", params, &result)
- return result, err
-}
-
-// Shutdown sends a shutdown request to the LSP server.
-// A shutdown request is sent from the client to the server. It is sent once when the client decides to shutdown the server. The only notification that is sent after a shutdown request is the exit event.
-func (c *Client) Shutdown(ctx context.Context) error {
- return c.Call(ctx, "shutdown", nil, nil)
-}
-
-// WillSaveWaitUntil sends a textDocument/willSaveWaitUntil request to the LSP server.
-// A document will save request is sent from the client to the server before the document is actually saved. The request can return an array of TextEdits which will be applied to the text document before it is saved. Please note that clients might drop results if computing the text edits took too long or if a server constantly fails on this request. This is done to keep the save fast and reliable.
-func (c *Client) WillSaveWaitUntil(ctx context.Context, params protocol.WillSaveTextDocumentParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/willSaveWaitUntil", params, &result)
- return result, err
-}
-
-// Completion sends a textDocument/completion request to the LSP server.
-// Request to request completion at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type CompletionItem CompletionItem[] or CompletionList or a Thenable that resolves to such. The request can delay the computation of the CompletionItem.detail detail and CompletionItem.documentation documentation properties to the completionItem/resolve request. However, properties that are needed for the initial sorting and filtering, like sortText, filterText, insertText, and textEdit, must not be changed during resolve.
-func (c *Client) Completion(ctx context.Context, params protocol.CompletionParams) (protocol.Or_Result_textDocument_completion, error) {
- var result protocol.Or_Result_textDocument_completion
- err := c.Call(ctx, "textDocument/completion", params, &result)
- return result, err
-}
-
-// ResolveCompletionItem sends a completionItem/resolve request to the LSP server.
-// Request to resolve additional information for a given completion item.The request's parameter is of type CompletionItem the response is of type CompletionItem or a Thenable that resolves to such.
-func (c *Client) ResolveCompletionItem(ctx context.Context, params protocol.CompletionItem) (protocol.CompletionItem, error) {
- var result protocol.CompletionItem
- err := c.Call(ctx, "completionItem/resolve", params, &result)
- return result, err
-}
-
-// Hover sends a textDocument/hover request to the LSP server.
-// Request to request hover information at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type Hover or a Thenable that resolves to such.
-func (c *Client) Hover(ctx context.Context, params protocol.HoverParams) (protocol.Hover, error) {
- var result protocol.Hover
- err := c.Call(ctx, "textDocument/hover", params, &result)
- return result, err
-}
-
-// SignatureHelp sends a textDocument/signatureHelp request to the LSP server.
-func (c *Client) SignatureHelp(ctx context.Context, params protocol.SignatureHelpParams) (protocol.SignatureHelp, error) {
- var result protocol.SignatureHelp
- err := c.Call(ctx, "textDocument/signatureHelp", params, &result)
- return result, err
-}
-
-// Definition sends a textDocument/definition request to the LSP server.
-// A request to resolve the definition location of a symbol at a given text document position. The request's parameter is of type TextDocumentPosition the response is of either type Definition or a typed array of DefinitionLink or a Thenable that resolves to such.
-func (c *Client) Definition(ctx context.Context, params protocol.DefinitionParams) (protocol.Or_Result_textDocument_definition, error) {
- var result protocol.Or_Result_textDocument_definition
- err := c.Call(ctx, "textDocument/definition", params, &result)
- return result, err
-}
-
-// References sends a textDocument/references request to the LSP server.
-// A request to resolve project-wide references for the symbol denoted by the given text document position. The request's parameter is of type ReferenceParams the response is of type Location Location[] or a Thenable that resolves to such.
-func (c *Client) References(ctx context.Context, params protocol.ReferenceParams) ([]protocol.Location, error) {
- var result []protocol.Location
- err := c.Call(ctx, "textDocument/references", params, &result)
- return result, err
-}
-
-// DocumentHighlight sends a textDocument/documentHighlight request to the LSP server.
-// Request to resolve a DocumentHighlight for a given text document position. The request's parameter is of type TextDocumentPosition the request response is an array of type DocumentHighlight or a Thenable that resolves to such.
-func (c *Client) DocumentHighlight(ctx context.Context, params protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) {
- var result []protocol.DocumentHighlight
- err := c.Call(ctx, "textDocument/documentHighlight", params, &result)
- return result, err
-}
-
-// DocumentSymbol sends a textDocument/documentSymbol request to the LSP server.
-// A request to list all symbols found in a given text document. The request's parameter is of type TextDocumentIdentifier the response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such.
-func (c *Client) DocumentSymbol(ctx context.Context, params protocol.DocumentSymbolParams) (protocol.Or_Result_textDocument_documentSymbol, error) {
- var result protocol.Or_Result_textDocument_documentSymbol
- err := c.Call(ctx, "textDocument/documentSymbol", params, &result)
- return result, err
-}
-
-// CodeAction sends a textDocument/codeAction request to the LSP server.
-// A request to provide commands for the given text document and range.
-func (c *Client) CodeAction(ctx context.Context, params protocol.CodeActionParams) ([]protocol.Or_Result_textDocument_codeAction_Item0_Elem, error) {
- var result []protocol.Or_Result_textDocument_codeAction_Item0_Elem
- err := c.Call(ctx, "textDocument/codeAction", params, &result)
- return result, err
-}
-
-// ResolveCodeAction sends a codeAction/resolve request to the LSP server.
-// Request to resolve additional information for a given code action.The request's parameter is of type CodeAction the response is of type CodeAction or a Thenable that resolves to such.
-func (c *Client) ResolveCodeAction(ctx context.Context, params protocol.CodeAction) (protocol.CodeAction, error) {
- var result protocol.CodeAction
- err := c.Call(ctx, "codeAction/resolve", params, &result)
- return result, err
-}
-
-// Symbol sends a workspace/symbol request to the LSP server.
-// A request to list project-wide symbols matching the query string given by the WorkspaceSymbolParams. The response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such. Since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients need to advertise support for WorkspaceSymbols via the client capability workspace.symbol.resolveSupport.
-func (c *Client) Symbol(ctx context.Context, params protocol.WorkspaceSymbolParams) (protocol.Or_Result_workspace_symbol, error) {
- var result protocol.Or_Result_workspace_symbol
- err := c.Call(ctx, "workspace/symbol", params, &result)
- return result, err
-}
-
-// ResolveWorkspaceSymbol sends a workspaceSymbol/resolve request to the LSP server.
-// A request to resolve the range inside the workspace symbol's location. Since 3.17.0
-func (c *Client) ResolveWorkspaceSymbol(ctx context.Context, params protocol.WorkspaceSymbol) (protocol.WorkspaceSymbol, error) {
- var result protocol.WorkspaceSymbol
- err := c.Call(ctx, "workspaceSymbol/resolve", params, &result)
- return result, err
-}
-
-// CodeLens sends a textDocument/codeLens request to the LSP server.
-// A request to provide code lens for the given text document.
-func (c *Client) CodeLens(ctx context.Context, params protocol.CodeLensParams) ([]protocol.CodeLens, error) {
- var result []protocol.CodeLens
- err := c.Call(ctx, "textDocument/codeLens", params, &result)
- return result, err
-}
-
-// ResolveCodeLens sends a codeLens/resolve request to the LSP server.
-// A request to resolve a command for a given code lens.
-func (c *Client) ResolveCodeLens(ctx context.Context, params protocol.CodeLens) (protocol.CodeLens, error) {
- var result protocol.CodeLens
- err := c.Call(ctx, "codeLens/resolve", params, &result)
- return result, err
-}
-
-// DocumentLink sends a textDocument/documentLink request to the LSP server.
-// A request to provide document links
-func (c *Client) DocumentLink(ctx context.Context, params protocol.DocumentLinkParams) ([]protocol.DocumentLink, error) {
- var result []protocol.DocumentLink
- err := c.Call(ctx, "textDocument/documentLink", params, &result)
- return result, err
-}
-
-// ResolveDocumentLink sends a documentLink/resolve request to the LSP server.
-// Request to resolve additional information for a given document link. The request's parameter is of type DocumentLink the response is of type DocumentLink or a Thenable that resolves to such.
-func (c *Client) ResolveDocumentLink(ctx context.Context, params protocol.DocumentLink) (protocol.DocumentLink, error) {
- var result protocol.DocumentLink
- err := c.Call(ctx, "documentLink/resolve", params, &result)
- return result, err
-}
-
-// Formatting sends a textDocument/formatting request to the LSP server.
-// A request to format a whole document.
-func (c *Client) Formatting(ctx context.Context, params protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/formatting", params, &result)
- return result, err
-}
-
-// RangeFormatting sends a textDocument/rangeFormatting request to the LSP server.
-// A request to format a range in a document.
-func (c *Client) RangeFormatting(ctx context.Context, params protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/rangeFormatting", params, &result)
- return result, err
-}
-
-// RangesFormatting sends a textDocument/rangesFormatting request to the LSP server.
-// A request to format ranges in a document. Since 3.18.0 PROPOSED
-func (c *Client) RangesFormatting(ctx context.Context, params protocol.DocumentRangesFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/rangesFormatting", params, &result)
- return result, err
-}
-
-// OnTypeFormatting sends a textDocument/onTypeFormatting request to the LSP server.
-// A request to format a document on type.
-func (c *Client) OnTypeFormatting(ctx context.Context, params protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/onTypeFormatting", params, &result)
- return result, err
-}
-
-// Rename sends a textDocument/rename request to the LSP server.
-// A request to rename a symbol.
-func (c *Client) Rename(ctx context.Context, params protocol.RenameParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "textDocument/rename", params, &result)
- return result, err
-}
-
-// PrepareRename sends a textDocument/prepareRename request to the LSP server.
-// A request to test and perform the setup necessary for a rename. Since 3.16 - support for default behavior
-func (c *Client) PrepareRename(ctx context.Context, params protocol.PrepareRenameParams) (protocol.PrepareRenameResult, error) {
- var result protocol.PrepareRenameResult
- err := c.Call(ctx, "textDocument/prepareRename", params, &result)
- return result, err
-}
-
-// ExecuteCommand sends a workspace/executeCommand request to the LSP server.
-// A request send from the client to the server to execute a command. The request might return a workspace edit which the client will apply to the workspace.
-func (c *Client) ExecuteCommand(ctx context.Context, params protocol.ExecuteCommandParams) (any, error) {
- var result any
- err := c.Call(ctx, "workspace/executeCommand", params, &result)
- return result, err
-}
-
-// DidChangeWorkspaceFolders sends a workspace/didChangeWorkspaceFolders notification to the LSP server.
-// The workspace/didChangeWorkspaceFolders notification is sent from the client to the server when the workspace folder configuration changes.
-func (c *Client) DidChangeWorkspaceFolders(ctx context.Context, params protocol.DidChangeWorkspaceFoldersParams) error {
- return c.Notify(ctx, "workspace/didChangeWorkspaceFolders", params)
-}
-
-// WorkDoneProgressCancel sends a window/workDoneProgress/cancel notification to the LSP server.
-// The window/workDoneProgress/cancel notification is sent from the client to the server to cancel a progress initiated on the server side.
-func (c *Client) WorkDoneProgressCancel(ctx context.Context, params protocol.WorkDoneProgressCancelParams) error {
- return c.Notify(ctx, "window/workDoneProgress/cancel", params)
-}
-
-// DidCreateFiles sends a workspace/didCreateFiles notification to the LSP server.
-// The did create files notification is sent from the client to the server when files were created from within the client. Since 3.16.0
-func (c *Client) DidCreateFiles(ctx context.Context, params protocol.CreateFilesParams) error {
- return c.Notify(ctx, "workspace/didCreateFiles", params)
-}
-
-// DidRenameFiles sends a workspace/didRenameFiles notification to the LSP server.
-// The did rename files notification is sent from the client to the server when files were renamed from within the client. Since 3.16.0
-func (c *Client) DidRenameFiles(ctx context.Context, params protocol.RenameFilesParams) error {
- return c.Notify(ctx, "workspace/didRenameFiles", params)
-}
-
-// DidDeleteFiles sends a workspace/didDeleteFiles notification to the LSP server.
-// The will delete files request is sent from the client to the server before files are actually deleted as long as the deletion is triggered from within the client. Since 3.16.0
-func (c *Client) DidDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) error {
- return c.Notify(ctx, "workspace/didDeleteFiles", params)
-}
-
-// DidOpenNotebookDocument sends a notebookDocument/didOpen notification to the LSP server.
-// A notification sent when a notebook opens. Since 3.17.0
-func (c *Client) DidOpenNotebookDocument(ctx context.Context, params protocol.DidOpenNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didOpen", params)
-}
-
-// DidChangeNotebookDocument sends a notebookDocument/didChange notification to the LSP server.
-func (c *Client) DidChangeNotebookDocument(ctx context.Context, params protocol.DidChangeNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didChange", params)
-}
-
-// DidSaveNotebookDocument sends a notebookDocument/didSave notification to the LSP server.
-// A notification sent when a notebook document is saved. Since 3.17.0
-func (c *Client) DidSaveNotebookDocument(ctx context.Context, params protocol.DidSaveNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didSave", params)
-}
-
-// DidCloseNotebookDocument sends a notebookDocument/didClose notification to the LSP server.
-// A notification sent when a notebook closes. Since 3.17.0
-func (c *Client) DidCloseNotebookDocument(ctx context.Context, params protocol.DidCloseNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didClose", params)
-}
-
-// Initialized sends a initialized notification to the LSP server.
-// The initialized notification is sent from the client to the server after the client is fully initialized and the server is allowed to send requests from the server to the client.
-func (c *Client) Initialized(ctx context.Context, params protocol.InitializedParams) error {
- return c.Notify(ctx, "initialized", params)
-}
-
-// Exit sends a exit notification to the LSP server.
-// The exit event is sent from the client to the server to ask the server to exit its process.
-func (c *Client) Exit(ctx context.Context) error {
- return c.Notify(ctx, "exit", nil)
-}
-
-// DidChangeConfiguration sends a workspace/didChangeConfiguration notification to the LSP server.
-// The configuration change notification is sent from the client to the server when the client's configuration has changed. The notification contains the changed configuration as defined by the language client.
-func (c *Client) DidChangeConfiguration(ctx context.Context, params protocol.DidChangeConfigurationParams) error {
- return c.Notify(ctx, "workspace/didChangeConfiguration", params)
-}
-
-// DidOpen sends a textDocument/didOpen notification to the LSP server.
-// The document open notification is sent from the client to the server to signal newly opened text documents. The document's truth is now managed by the client and the server must not try to read the document's truth using the document's uri. Open in this sense means it is managed by the client. It doesn't necessarily mean that its content is presented in an editor. An open notification must not be sent more than once without a corresponding close notification send before. This means open and close notification must be balanced and the max open count is one.
-func (c *Client) DidOpen(ctx context.Context, params protocol.DidOpenTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didOpen", params)
-}
-
-// DidChange sends a textDocument/didChange notification to the LSP server.
-// The document change notification is sent from the client to the server to signal changes to a text document.
-func (c *Client) DidChange(ctx context.Context, params protocol.DidChangeTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didChange", params)
-}
-
-// DidClose sends a textDocument/didClose notification to the LSP server.
-// The document close notification is sent from the client to the server when the document got closed in the client. The document's truth now exists where the document's uri points to (e.g. if the document's uri is a file uri the truth now exists on disk). As with the open notification the close notification is about managing the document's content. Receiving a close notification doesn't mean that the document was open in an editor before. A close notification requires a previous open notification to be sent.
-func (c *Client) DidClose(ctx context.Context, params protocol.DidCloseTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didClose", params)
-}
-
-// DidSave sends a textDocument/didSave notification to the LSP server.
-// The document save notification is sent from the client to the server when the document got saved in the client.
-func (c *Client) DidSave(ctx context.Context, params protocol.DidSaveTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didSave", params)
-}
-
-// WillSave sends a textDocument/willSave notification to the LSP server.
-// A document will save notification is sent from the client to the server before the document is actually saved.
-func (c *Client) WillSave(ctx context.Context, params protocol.WillSaveTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/willSave", params)
-}
-
-// DidChangeWatchedFiles sends a workspace/didChangeWatchedFiles notification to the LSP server.
-// The watched files notification is sent from the client to the server when the client detects changes to file watched by the language client.
-func (c *Client) DidChangeWatchedFiles(ctx context.Context, params protocol.DidChangeWatchedFilesParams) error {
- return c.Notify(ctx, "workspace/didChangeWatchedFiles", params)
-}
-
-// SetTrace sends a $/setTrace notification to the LSP server.
-func (c *Client) SetTrace(ctx context.Context, params protocol.SetTraceParams) error {
- return c.Notify(ctx, "$/setTrace", params)
-}
-
-// Progress sends a $/progress notification to the LSP server.
-func (c *Client) Progress(ctx context.Context, params protocol.ProgressParams) error {
- return c.Notify(ctx, "$/progress", params)
-}
diff --git a/internal/lsp/protocol.go b/internal/lsp/protocol.go
deleted file mode 100644
index e70e2824b5fbdfdb2055b9bb827cce8c4d3ed850..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package lsp
-
-import (
- "encoding/json"
-)
-
-// Message represents a JSON-RPC 2.0 message
-type Message struct {
- JSONRPC string `json:"jsonrpc"`
- ID int32 `json:"id,omitempty"`
- Method string `json:"method,omitempty"`
- Params json.RawMessage `json:"params,omitempty"`
- Result json.RawMessage `json:"result,omitempty"`
- Error *ResponseError `json:"error,omitempty"`
-}
-
-// ResponseError represents a JSON-RPC 2.0 error
-type ResponseError struct {
- Code int `json:"code"`
- Message string `json:"message"`
-}
-
-func NewRequest(id int32, method string, params any) (*Message, error) {
- paramsJSON, err := json.Marshal(params)
- if err != nil {
- return nil, err
- }
-
- return &Message{
- JSONRPC: "2.0",
- ID: id,
- Method: method,
- Params: paramsJSON,
- }, nil
-}
-
-func NewNotification(method string, params any) (*Message, error) {
- paramsJSON, err := json.Marshal(params)
- if err != nil {
- return nil, err
- }
-
- return &Message{
- JSONRPC: "2.0",
- Method: method,
- Params: paramsJSON,
- }, nil
-}
diff --git a/internal/lsp/protocol/LICENSE b/internal/lsp/protocol/LICENSE
deleted file mode 100644
index 2a7cf70da6e498df9c11ab6a5eaa2ddd7af34da4..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright 2009 The Go Authors.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google LLC nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/internal/lsp/protocol/interface.go b/internal/lsp/protocol/interface.go
deleted file mode 100644
index 89311b31c8398db1d2da63fc15961294ff79badd..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/interface.go
+++ /dev/null
@@ -1,117 +0,0 @@
-package protocol
-
-import "fmt"
-
-// WorkspaceSymbolResult is an interface for types that represent workspace symbols
-type WorkspaceSymbolResult interface {
- GetName() string
- GetLocation() Location
- isWorkspaceSymbol() // marker method
-}
-
-func (ws *WorkspaceSymbol) GetName() string { return ws.Name }
-func (ws *WorkspaceSymbol) GetLocation() Location {
- switch v := ws.Location.Value.(type) {
- case Location:
- return v
- case LocationUriOnly:
- return Location{URI: v.URI}
- }
- return Location{}
-}
-func (ws *WorkspaceSymbol) isWorkspaceSymbol() {}
-
-func (si *SymbolInformation) GetName() string { return si.Name }
-func (si *SymbolInformation) GetLocation() Location { return si.Location }
-func (si *SymbolInformation) isWorkspaceSymbol() {}
-
-// Results converts the Value to a slice of WorkspaceSymbolResult
-func (r Or_Result_workspace_symbol) Results() ([]WorkspaceSymbolResult, error) {
- if r.Value == nil {
- return make([]WorkspaceSymbolResult, 0), nil
- }
- switch v := r.Value.(type) {
- case []WorkspaceSymbol:
- results := make([]WorkspaceSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- case []SymbolInformation:
- results := make([]WorkspaceSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- default:
- return nil, fmt.Errorf("unknown symbol type: %T", r.Value)
- }
-}
-
-// DocumentSymbolResult is an interface for types that represent document symbols
-type DocumentSymbolResult interface {
- GetRange() Range
- GetName() string
- isDocumentSymbol() // marker method
-}
-
-func (ds *DocumentSymbol) GetRange() Range { return ds.Range }
-func (ds *DocumentSymbol) GetName() string { return ds.Name }
-func (ds *DocumentSymbol) isDocumentSymbol() {}
-
-func (si *SymbolInformation) GetRange() Range { return si.Location.Range }
-
-// Note: SymbolInformation already has GetName() implemented above
-func (si *SymbolInformation) isDocumentSymbol() {}
-
-// Results converts the Value to a slice of DocumentSymbolResult
-func (r Or_Result_textDocument_documentSymbol) Results() ([]DocumentSymbolResult, error) {
- if r.Value == nil {
- return make([]DocumentSymbolResult, 0), nil
- }
- switch v := r.Value.(type) {
- case []DocumentSymbol:
- results := make([]DocumentSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- case []SymbolInformation:
- results := make([]DocumentSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- default:
- return nil, fmt.Errorf("unknown document symbol type: %T", v)
- }
-}
-
-// TextEditResult is an interface for types that can be used as text edits
-type TextEditResult interface {
- GetRange() Range
- GetNewText() string
- isTextEdit() // marker method
-}
-
-func (te *TextEdit) GetRange() Range { return te.Range }
-func (te *TextEdit) GetNewText() string { return te.NewText }
-func (te *TextEdit) isTextEdit() {}
-
-// AsTextEdit converts Or_TextDocumentEdit_edits_Elem to TextEdit
-func (e Or_TextDocumentEdit_edits_Elem) AsTextEdit() (TextEdit, error) {
- if e.Value == nil {
- return TextEdit{}, fmt.Errorf("nil text edit")
- }
- switch v := e.Value.(type) {
- case TextEdit:
- return v, nil
- case AnnotatedTextEdit:
- return TextEdit{
- Range: v.Range,
- NewText: v.NewText,
- }, nil
- default:
- return TextEdit{}, fmt.Errorf("unknown text edit type: %T", e.Value)
- }
-}
diff --git a/internal/lsp/protocol/pattern_interfaces.go b/internal/lsp/protocol/pattern_interfaces.go
deleted file mode 100644
index 5cb5dbb84ea385d96ac33fa2075d6590872da3cd..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/pattern_interfaces.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package protocol
-
-import (
- "fmt"
- "log/slog"
-)
-
-// PatternInfo is an interface for types that represent glob patterns
-type PatternInfo interface {
- GetPattern() string
- GetBasePath() string
- isPattern() // marker method
-}
-
-// StringPattern implements PatternInfo for string patterns
-type StringPattern struct {
- Pattern string
-}
-
-func (p StringPattern) GetPattern() string { return p.Pattern }
-func (p StringPattern) GetBasePath() string { return "" }
-func (p StringPattern) isPattern() {}
-
-// RelativePatternInfo implements PatternInfo for RelativePattern
-type RelativePatternInfo struct {
- RP RelativePattern
- BasePath string
-}
-
-func (p RelativePatternInfo) GetPattern() string { return string(p.RP.Pattern) }
-func (p RelativePatternInfo) GetBasePath() string { return p.BasePath }
-func (p RelativePatternInfo) isPattern() {}
-
-// AsPattern converts GlobPattern to a PatternInfo object
-func (g *GlobPattern) AsPattern() (PatternInfo, error) {
- if g.Value == nil {
- return nil, fmt.Errorf("nil pattern")
- }
-
- var err error
-
- switch v := g.Value.(type) {
- case string:
- return StringPattern{Pattern: v}, nil
-
- case RelativePattern:
- // Handle BaseURI which could be string or DocumentUri
- basePath := ""
- switch baseURI := v.BaseURI.Value.(type) {
- case string:
- basePath, err = DocumentURI(baseURI).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path", "uri", baseURI, "error", err)
- return nil, fmt.Errorf("invalid URI: %s", baseURI)
- }
-
- case DocumentURI:
- basePath, err = baseURI.Path()
- if err != nil {
- slog.Error("Failed to convert DocumentURI to path", "uri", baseURI, "error", err)
- return nil, fmt.Errorf("invalid DocumentURI: %s", baseURI)
- }
-
- default:
- return nil, fmt.Errorf("unknown BaseURI type: %T", v.BaseURI.Value)
- }
-
- return RelativePatternInfo{RP: v, BasePath: basePath}, nil
-
- default:
- return nil, fmt.Errorf("unknown pattern type: %T", g.Value)
- }
-}
diff --git a/internal/lsp/protocol/tables.go b/internal/lsp/protocol/tables.go
deleted file mode 100644
index 6a8fb99e0a27ce77906ead6f04b71539b741f181..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tables.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package protocol
-
-var TableKindMap = map[SymbolKind]string{
- File: "File",
- Module: "Module",
- Namespace: "Namespace",
- Package: "Package",
- Class: "Class",
- Method: "Method",
- Property: "Property",
- Field: "Field",
- Constructor: "Constructor",
- Enum: "Enum",
- Interface: "Interface",
- Function: "Function",
- Variable: "Variable",
- Constant: "Constant",
- String: "String",
- Number: "Number",
- Boolean: "Boolean",
- Array: "Array",
- Object: "Object",
- Key: "Key",
- Null: "Null",
- EnumMember: "EnumMember",
- Struct: "Struct",
- Event: "Event",
- Operator: "Operator",
- TypeParameter: "TypeParameter",
-}
diff --git a/internal/lsp/protocol/tsdocument-changes.go b/internal/lsp/protocol/tsdocument-changes.go
deleted file mode 100644
index f18825719efad72e04502094931280e78ccbad59..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tsdocument-changes.go
+++ /dev/null
@@ -1,81 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package protocol
-
-import (
- "encoding/json"
- "fmt"
-)
-
-// DocumentChange is a union of various file edit operations.
-//
-// Exactly one field of this struct is non-nil; see [DocumentChange.Valid].
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#resourceChanges
-type DocumentChange struct {
- TextDocumentEdit *TextDocumentEdit
- CreateFile *CreateFile
- RenameFile *RenameFile
- DeleteFile *DeleteFile
-}
-
-// Valid reports whether the DocumentChange sum-type value is valid,
-// that is, exactly one of create, delete, edit, or rename.
-func (d DocumentChange) Valid() bool {
- n := 0
- if d.TextDocumentEdit != nil {
- n++
- }
- if d.CreateFile != nil {
- n++
- }
- if d.RenameFile != nil {
- n++
- }
- if d.DeleteFile != nil {
- n++
- }
- return n == 1
-}
-
-func (d *DocumentChange) UnmarshalJSON(data []byte) error {
- var m map[string]any
- if err := json.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if _, ok := m["textDocument"]; ok {
- d.TextDocumentEdit = new(TextDocumentEdit)
- return json.Unmarshal(data, d.TextDocumentEdit)
- }
-
- // The {Create,Rename,Delete}File types all share a 'kind' field.
- kind := m["kind"]
- switch kind {
- case "create":
- d.CreateFile = new(CreateFile)
- return json.Unmarshal(data, d.CreateFile)
- case "rename":
- d.RenameFile = new(RenameFile)
- return json.Unmarshal(data, d.RenameFile)
- case "delete":
- d.DeleteFile = new(DeleteFile)
- return json.Unmarshal(data, d.DeleteFile)
- }
- return fmt.Errorf("DocumentChanges: unexpected kind: %q", kind)
-}
-
-func (d *DocumentChange) MarshalJSON() ([]byte, error) {
- if d.TextDocumentEdit != nil {
- return json.Marshal(d.TextDocumentEdit)
- } else if d.CreateFile != nil {
- return json.Marshal(d.CreateFile)
- } else if d.RenameFile != nil {
- return json.Marshal(d.RenameFile)
- } else if d.DeleteFile != nil {
- return json.Marshal(d.DeleteFile)
- }
- return nil, fmt.Errorf("empty DocumentChanges union value")
-}
diff --git a/internal/lsp/protocol/tsjson.go b/internal/lsp/protocol/tsjson.go
deleted file mode 100644
index 3cf7275245a5dc532c52e03024652fceda6e713a..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tsjson.go
+++ /dev/null
@@ -1,3073 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Code generated for LSP. DO NOT EDIT.
-
-package protocol
-
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.9 (hash c94395b5da53729e6dff931293b051009ccaaaa4).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.9/protocol/metaModel.json
-// LSP metaData.version = 3.17.0.
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
-)
-
-// UnmarshalError indicates that a JSON value did not conform to
-// one of the expected cases of an LSP union type.
-type UnmarshalError struct {
- msg string
-}
-
-func (e UnmarshalError) Error() string {
- return e.msg
-}
-func (t Or_CancelParams_id) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_CancelParams_id) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder41 := json.NewDecoder(bytes.NewReader(x))
- decoder41.DisallowUnknownFields()
- var int32Val int32
- if err := decoder41.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder42 := json.NewDecoder(bytes.NewReader(x))
- decoder42.DisallowUnknownFields()
- var stringVal string
- if err := decoder42.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_ClientSemanticTokensRequestOptions_full) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ClientSemanticTokensRequestFullDelta:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ClientSemanticTokensRequestFullDelta bool]", t)
-}
-
-func (t *Or_ClientSemanticTokensRequestOptions_full) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder220 := json.NewDecoder(bytes.NewReader(x))
- decoder220.DisallowUnknownFields()
- var boolVal bool
- if err := decoder220.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder221 := json.NewDecoder(bytes.NewReader(x))
- decoder221.DisallowUnknownFields()
- var h221 ClientSemanticTokensRequestFullDelta
- if err := decoder221.Decode(&h221); err == nil {
- t.Value = h221
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ClientSemanticTokensRequestFullDelta bool]"}
-}
-
-func (t Or_ClientSemanticTokensRequestOptions_range) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Lit_ClientSemanticTokensRequestOptions_range_Item1:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]", t)
-}
-
-func (t *Or_ClientSemanticTokensRequestOptions_range) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder217 := json.NewDecoder(bytes.NewReader(x))
- decoder217.DisallowUnknownFields()
- var boolVal bool
- if err := decoder217.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder218 := json.NewDecoder(bytes.NewReader(x))
- decoder218.DisallowUnknownFields()
- var h218 Lit_ClientSemanticTokensRequestOptions_range_Item1
- if err := decoder218.Decode(&h218); err == nil {
- t.Value = h218
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]"}
-}
-
-func (t Or_CompletionItemDefaults_editRange) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case EditRangeWithInsertReplace:
- return json.Marshal(x)
- case Range:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [EditRangeWithInsertReplace Range]", t)
-}
-
-func (t *Or_CompletionItemDefaults_editRange) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder183 := json.NewDecoder(bytes.NewReader(x))
- decoder183.DisallowUnknownFields()
- var h183 EditRangeWithInsertReplace
- if err := decoder183.Decode(&h183); err == nil {
- t.Value = h183
- return nil
- }
- decoder184 := json.NewDecoder(bytes.NewReader(x))
- decoder184.DisallowUnknownFields()
- var h184 Range
- if err := decoder184.Decode(&h184); err == nil {
- t.Value = h184
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [EditRangeWithInsertReplace Range]"}
-}
-
-func (t Or_CompletionItem_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_CompletionItem_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder25 := json.NewDecoder(bytes.NewReader(x))
- decoder25.DisallowUnknownFields()
- var stringVal string
- if err := decoder25.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder26 := json.NewDecoder(bytes.NewReader(x))
- decoder26.DisallowUnknownFields()
- var h26 MarkupContent
- if err := decoder26.Decode(&h26); err == nil {
- t.Value = h26
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_CompletionItem_textEdit) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InsertReplaceEdit:
- return json.Marshal(x)
- case TextEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InsertReplaceEdit TextEdit]", t)
-}
-
-func (t *Or_CompletionItem_textEdit) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder29 := json.NewDecoder(bytes.NewReader(x))
- decoder29.DisallowUnknownFields()
- var h29 InsertReplaceEdit
- if err := decoder29.Decode(&h29); err == nil {
- t.Value = h29
- return nil
- }
- decoder30 := json.NewDecoder(bytes.NewReader(x))
- decoder30.DisallowUnknownFields()
- var h30 TextEdit
- if err := decoder30.Decode(&h30); err == nil {
- t.Value = h30
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InsertReplaceEdit TextEdit]"}
-}
-
-func (t Or_Declaration) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case []Location:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location []Location]", t)
-}
-
-func (t *Or_Declaration) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder237 := json.NewDecoder(bytes.NewReader(x))
- decoder237.DisallowUnknownFields()
- var h237 Location
- if err := decoder237.Decode(&h237); err == nil {
- t.Value = h237
- return nil
- }
- decoder238 := json.NewDecoder(bytes.NewReader(x))
- decoder238.DisallowUnknownFields()
- var h238 []Location
- if err := decoder238.Decode(&h238); err == nil {
- t.Value = h238
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"}
-}
-
-func (t Or_Definition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case []Location:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location []Location]", t)
-}
-
-func (t *Or_Definition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder224 := json.NewDecoder(bytes.NewReader(x))
- decoder224.DisallowUnknownFields()
- var h224 Location
- if err := decoder224.Decode(&h224); err == nil {
- t.Value = h224
- return nil
- }
- decoder225 := json.NewDecoder(bytes.NewReader(x))
- decoder225.DisallowUnknownFields()
- var h225 []Location
- if err := decoder225.Decode(&h225); err == nil {
- t.Value = h225
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"}
-}
-
-func (t Or_Diagnostic_code) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_Diagnostic_code) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder179 := json.NewDecoder(bytes.NewReader(x))
- decoder179.DisallowUnknownFields()
- var int32Val int32
- if err := decoder179.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder180 := json.NewDecoder(bytes.NewReader(x))
- decoder180.DisallowUnknownFields()
- var stringVal string
- if err := decoder180.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_DidChangeConfigurationRegistrationOptions_section) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []string:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]string string]", t)
-}
-
-func (t *Or_DidChangeConfigurationRegistrationOptions_section) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder22 := json.NewDecoder(bytes.NewReader(x))
- decoder22.DisallowUnknownFields()
- var stringVal string
- if err := decoder22.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder23 := json.NewDecoder(bytes.NewReader(x))
- decoder23.DisallowUnknownFields()
- var h23 []string
- if err := decoder23.Decode(&h23); err == nil {
- t.Value = h23
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]string string]"}
-}
-
-func (t Or_DocumentDiagnosticReport) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case RelatedFullDocumentDiagnosticReport:
- return json.Marshal(x)
- case RelatedUnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_DocumentDiagnosticReport) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder247 := json.NewDecoder(bytes.NewReader(x))
- decoder247.DisallowUnknownFields()
- var h247 RelatedFullDocumentDiagnosticReport
- if err := decoder247.Decode(&h247); err == nil {
- t.Value = h247
- return nil
- }
- decoder248 := json.NewDecoder(bytes.NewReader(x))
- decoder248.DisallowUnknownFields()
- var h248 RelatedUnchangedDocumentDiagnosticReport
- if err := decoder248.Decode(&h248); err == nil {
- t.Value = h248
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder16 := json.NewDecoder(bytes.NewReader(x))
- decoder16.DisallowUnknownFields()
- var h16 FullDocumentDiagnosticReport
- if err := decoder16.Decode(&h16); err == nil {
- t.Value = h16
- return nil
- }
- decoder17 := json.NewDecoder(bytes.NewReader(x))
- decoder17.DisallowUnknownFields()
- var h17 UnchangedDocumentDiagnosticReport
- if err := decoder17.Decode(&h17); err == nil {
- t.Value = h17
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_DocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookCellTextDocumentFilter:
- return json.Marshal(x)
- case TextDocumentFilter:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookCellTextDocumentFilter TextDocumentFilter]", t)
-}
-
-func (t *Or_DocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder270 := json.NewDecoder(bytes.NewReader(x))
- decoder270.DisallowUnknownFields()
- var h270 NotebookCellTextDocumentFilter
- if err := decoder270.Decode(&h270); err == nil {
- t.Value = h270
- return nil
- }
- decoder271 := json.NewDecoder(bytes.NewReader(x))
- decoder271.DisallowUnknownFields()
- var h271 TextDocumentFilter
- if err := decoder271.Decode(&h271); err == nil {
- t.Value = h271
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookCellTextDocumentFilter TextDocumentFilter]"}
-}
-
-func (t Or_GlobPattern) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Pattern:
- return json.Marshal(x)
- case RelativePattern:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Pattern RelativePattern]", t)
-}
-
-func (t *Or_GlobPattern) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder274 := json.NewDecoder(bytes.NewReader(x))
- decoder274.DisallowUnknownFields()
- var h274 Pattern
- if err := decoder274.Decode(&h274); err == nil {
- t.Value = h274
- return nil
- }
- decoder275 := json.NewDecoder(bytes.NewReader(x))
- decoder275.DisallowUnknownFields()
- var h275 RelativePattern
- if err := decoder275.Decode(&h275); err == nil {
- t.Value = h275
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Pattern RelativePattern]"}
-}
-
-func (t Or_Hover_contents) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkedString:
- return json.Marshal(x)
- case MarkupContent:
- return json.Marshal(x)
- case []MarkedString:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkedString MarkupContent []MarkedString]", t)
-}
-
-func (t *Or_Hover_contents) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder34 := json.NewDecoder(bytes.NewReader(x))
- decoder34.DisallowUnknownFields()
- var h34 MarkedString
- if err := decoder34.Decode(&h34); err == nil {
- t.Value = h34
- return nil
- }
- decoder35 := json.NewDecoder(bytes.NewReader(x))
- decoder35.DisallowUnknownFields()
- var h35 MarkupContent
- if err := decoder35.Decode(&h35); err == nil {
- t.Value = h35
- return nil
- }
- decoder36 := json.NewDecoder(bytes.NewReader(x))
- decoder36.DisallowUnknownFields()
- var h36 []MarkedString
- if err := decoder36.Decode(&h36); err == nil {
- t.Value = h36
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkedString MarkupContent []MarkedString]"}
-}
-
-func (t Or_InlayHintLabelPart_tooltip) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_InlayHintLabelPart_tooltip) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder56 := json.NewDecoder(bytes.NewReader(x))
- decoder56.DisallowUnknownFields()
- var stringVal string
- if err := decoder56.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder57 := json.NewDecoder(bytes.NewReader(x))
- decoder57.DisallowUnknownFields()
- var h57 MarkupContent
- if err := decoder57.Decode(&h57); err == nil {
- t.Value = h57
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_InlayHint_label) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []InlayHintLabelPart:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]InlayHintLabelPart string]", t)
-}
-
-func (t *Or_InlayHint_label) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder9 := json.NewDecoder(bytes.NewReader(x))
- decoder9.DisallowUnknownFields()
- var stringVal string
- if err := decoder9.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder10 := json.NewDecoder(bytes.NewReader(x))
- decoder10.DisallowUnknownFields()
- var h10 []InlayHintLabelPart
- if err := decoder10.Decode(&h10); err == nil {
- t.Value = h10
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]InlayHintLabelPart string]"}
-}
-
-func (t Or_InlayHint_tooltip) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_InlayHint_tooltip) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder12 := json.NewDecoder(bytes.NewReader(x))
- decoder12.DisallowUnknownFields()
- var stringVal string
- if err := decoder12.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder13 := json.NewDecoder(bytes.NewReader(x))
- decoder13.DisallowUnknownFields()
- var h13 MarkupContent
- if err := decoder13.Decode(&h13); err == nil {
- t.Value = h13
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_InlineCompletionItem_insertText) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case StringValue:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [StringValue string]", t)
-}
-
-func (t *Or_InlineCompletionItem_insertText) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder19 := json.NewDecoder(bytes.NewReader(x))
- decoder19.DisallowUnknownFields()
- var stringVal string
- if err := decoder19.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder20 := json.NewDecoder(bytes.NewReader(x))
- decoder20.DisallowUnknownFields()
- var h20 StringValue
- if err := decoder20.Decode(&h20); err == nil {
- t.Value = h20
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [StringValue string]"}
-}
-
-func (t Or_InlineValue) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineValueEvaluatableExpression:
- return json.Marshal(x)
- case InlineValueText:
- return json.Marshal(x)
- case InlineValueVariableLookup:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]", t)
-}
-
-func (t *Or_InlineValue) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder242 := json.NewDecoder(bytes.NewReader(x))
- decoder242.DisallowUnknownFields()
- var h242 InlineValueEvaluatableExpression
- if err := decoder242.Decode(&h242); err == nil {
- t.Value = h242
- return nil
- }
- decoder243 := json.NewDecoder(bytes.NewReader(x))
- decoder243.DisallowUnknownFields()
- var h243 InlineValueText
- if err := decoder243.Decode(&h243); err == nil {
- t.Value = h243
- return nil
- }
- decoder244 := json.NewDecoder(bytes.NewReader(x))
- decoder244.DisallowUnknownFields()
- var h244 InlineValueVariableLookup
- if err := decoder244.Decode(&h244); err == nil {
- t.Value = h244
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]"}
-}
-
-func (t Or_LSPAny) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case LSPArray:
- return json.Marshal(x)
- case LSPObject:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case float64:
- return json.Marshal(x)
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case uint32:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [LSPArray LSPObject bool float64 int32 string uint32]", t)
-}
-
-func (t *Or_LSPAny) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder228 := json.NewDecoder(bytes.NewReader(x))
- decoder228.DisallowUnknownFields()
- var boolVal bool
- if err := decoder228.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder229 := json.NewDecoder(bytes.NewReader(x))
- decoder229.DisallowUnknownFields()
- var float64Val float64
- if err := decoder229.Decode(&float64Val); err == nil {
- t.Value = float64Val
- return nil
- }
- decoder230 := json.NewDecoder(bytes.NewReader(x))
- decoder230.DisallowUnknownFields()
- var int32Val int32
- if err := decoder230.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder231 := json.NewDecoder(bytes.NewReader(x))
- decoder231.DisallowUnknownFields()
- var stringVal string
- if err := decoder231.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder232 := json.NewDecoder(bytes.NewReader(x))
- decoder232.DisallowUnknownFields()
- var uint32Val uint32
- if err := decoder232.Decode(&uint32Val); err == nil {
- t.Value = uint32Val
- return nil
- }
- decoder233 := json.NewDecoder(bytes.NewReader(x))
- decoder233.DisallowUnknownFields()
- var h233 LSPArray
- if err := decoder233.Decode(&h233); err == nil {
- t.Value = h233
- return nil
- }
- decoder234 := json.NewDecoder(bytes.NewReader(x))
- decoder234.DisallowUnknownFields()
- var h234 LSPObject
- if err := decoder234.Decode(&h234); err == nil {
- t.Value = h234
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [LSPArray LSPObject bool float64 int32 string uint32]"}
-}
-
-func (t Or_MarkedString) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkedStringWithLanguage:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkedStringWithLanguage string]", t)
-}
-
-func (t *Or_MarkedString) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder266 := json.NewDecoder(bytes.NewReader(x))
- decoder266.DisallowUnknownFields()
- var stringVal string
- if err := decoder266.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder267 := json.NewDecoder(bytes.NewReader(x))
- decoder267.DisallowUnknownFields()
- var h267 MarkedStringWithLanguage
- if err := decoder267.Decode(&h267); err == nil {
- t.Value = h267
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkedStringWithLanguage string]"}
-}
-
-func (t Or_NotebookCellTextDocumentFilter_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookCellTextDocumentFilter_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder208 := json.NewDecoder(bytes.NewReader(x))
- decoder208.DisallowUnknownFields()
- var stringVal string
- if err := decoder208.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder209 := json.NewDecoder(bytes.NewReader(x))
- decoder209.DisallowUnknownFields()
- var h209 NotebookDocumentFilter
- if err := decoder209.Decode(&h209); err == nil {
- t.Value = h209
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilterNotebookType:
- return json.Marshal(x)
- case NotebookDocumentFilterPattern:
- return json.Marshal(x)
- case NotebookDocumentFilterScheme:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]", t)
-}
-
-func (t *Or_NotebookDocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder285 := json.NewDecoder(bytes.NewReader(x))
- decoder285.DisallowUnknownFields()
- var h285 NotebookDocumentFilterNotebookType
- if err := decoder285.Decode(&h285); err == nil {
- t.Value = h285
- return nil
- }
- decoder286 := json.NewDecoder(bytes.NewReader(x))
- decoder286.DisallowUnknownFields()
- var h286 NotebookDocumentFilterPattern
- if err := decoder286.Decode(&h286); err == nil {
- t.Value = h286
- return nil
- }
- decoder287 := json.NewDecoder(bytes.NewReader(x))
- decoder287.DisallowUnknownFields()
- var h287 NotebookDocumentFilterScheme
- if err := decoder287.Decode(&h287); err == nil {
- t.Value = h287
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]"}
-}
-
-func (t Or_NotebookDocumentFilterWithCells_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookDocumentFilterWithCells_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder192 := json.NewDecoder(bytes.NewReader(x))
- decoder192.DisallowUnknownFields()
- var stringVal string
- if err := decoder192.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder193 := json.NewDecoder(bytes.NewReader(x))
- decoder193.DisallowUnknownFields()
- var h193 NotebookDocumentFilter
- if err := decoder193.Decode(&h193); err == nil {
- t.Value = h193
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentFilterWithNotebook_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookDocumentFilterWithNotebook_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder189 := json.NewDecoder(bytes.NewReader(x))
- decoder189.DisallowUnknownFields()
- var stringVal string
- if err := decoder189.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder190 := json.NewDecoder(bytes.NewReader(x))
- decoder190.DisallowUnknownFields()
- var h190 NotebookDocumentFilter
- if err := decoder190.Decode(&h190); err == nil {
- t.Value = h190
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentSyncOptions_notebookSelector_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilterWithCells:
- return json.Marshal(x)
- case NotebookDocumentFilterWithNotebook:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]", t)
-}
-
-func (t *Or_NotebookDocumentSyncOptions_notebookSelector_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder68 := json.NewDecoder(bytes.NewReader(x))
- decoder68.DisallowUnknownFields()
- var h68 NotebookDocumentFilterWithCells
- if err := decoder68.Decode(&h68); err == nil {
- t.Value = h68
- return nil
- }
- decoder69 := json.NewDecoder(bytes.NewReader(x))
- decoder69.DisallowUnknownFields()
- var h69 NotebookDocumentFilterWithNotebook
- if err := decoder69.Decode(&h69); err == nil {
- t.Value = h69
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]"}
-}
-
-func (t Or_ParameterInformation_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_ParameterInformation_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder205 := json.NewDecoder(bytes.NewReader(x))
- decoder205.DisallowUnknownFields()
- var stringVal string
- if err := decoder205.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder206 := json.NewDecoder(bytes.NewReader(x))
- decoder206.DisallowUnknownFields()
- var h206 MarkupContent
- if err := decoder206.Decode(&h206); err == nil {
- t.Value = h206
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_ParameterInformation_label) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Tuple_ParameterInformation_label_Item1:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Tuple_ParameterInformation_label_Item1 string]", t)
-}
-
-func (t *Or_ParameterInformation_label) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder202 := json.NewDecoder(bytes.NewReader(x))
- decoder202.DisallowUnknownFields()
- var stringVal string
- if err := decoder202.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder203 := json.NewDecoder(bytes.NewReader(x))
- decoder203.DisallowUnknownFields()
- var h203 Tuple_ParameterInformation_label_Item1
- if err := decoder203.Decode(&h203); err == nil {
- t.Value = h203
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Tuple_ParameterInformation_label_Item1 string]"}
-}
-
-func (t Or_PrepareRenameResult) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case PrepareRenameDefaultBehavior:
- return json.Marshal(x)
- case PrepareRenamePlaceholder:
- return json.Marshal(x)
- case Range:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]", t)
-}
-
-func (t *Or_PrepareRenameResult) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder252 := json.NewDecoder(bytes.NewReader(x))
- decoder252.DisallowUnknownFields()
- var h252 PrepareRenameDefaultBehavior
- if err := decoder252.Decode(&h252); err == nil {
- t.Value = h252
- return nil
- }
- decoder253 := json.NewDecoder(bytes.NewReader(x))
- decoder253.DisallowUnknownFields()
- var h253 PrepareRenamePlaceholder
- if err := decoder253.Decode(&h253); err == nil {
- t.Value = h253
- return nil
- }
- decoder254 := json.NewDecoder(bytes.NewReader(x))
- decoder254.DisallowUnknownFields()
- var h254 Range
- if err := decoder254.Decode(&h254); err == nil {
- t.Value = h254
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]"}
-}
-
-func (t Or_ProgressToken) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_ProgressToken) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder255 := json.NewDecoder(bytes.NewReader(x))
- decoder255.DisallowUnknownFields()
- var int32Val int32
- if err := decoder255.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder256 := json.NewDecoder(bytes.NewReader(x))
- decoder256.DisallowUnknownFields()
- var stringVal string
- if err := decoder256.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder60 := json.NewDecoder(bytes.NewReader(x))
- decoder60.DisallowUnknownFields()
- var h60 FullDocumentDiagnosticReport
- if err := decoder60.Decode(&h60); err == nil {
- t.Value = h60
- return nil
- }
- decoder61 := json.NewDecoder(bytes.NewReader(x))
- decoder61.DisallowUnknownFields()
- var h61 UnchangedDocumentDiagnosticReport
- if err := decoder61.Decode(&h61); err == nil {
- t.Value = h61
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder64 := json.NewDecoder(bytes.NewReader(x))
- decoder64.DisallowUnknownFields()
- var h64 FullDocumentDiagnosticReport
- if err := decoder64.Decode(&h64); err == nil {
- t.Value = h64
- return nil
- }
- decoder65 := json.NewDecoder(bytes.NewReader(x))
- decoder65.DisallowUnknownFields()
- var h65 UnchangedDocumentDiagnosticReport
- if err := decoder65.Decode(&h65); err == nil {
- t.Value = h65
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_RelativePattern_baseUri) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case URI:
- return json.Marshal(x)
- case WorkspaceFolder:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [URI WorkspaceFolder]", t)
-}
-
-func (t *Or_RelativePattern_baseUri) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder214 := json.NewDecoder(bytes.NewReader(x))
- decoder214.DisallowUnknownFields()
- var h214 URI
- if err := decoder214.Decode(&h214); err == nil {
- t.Value = h214
- return nil
- }
- decoder215 := json.NewDecoder(bytes.NewReader(x))
- decoder215.DisallowUnknownFields()
- var h215 WorkspaceFolder
- if err := decoder215.Decode(&h215); err == nil {
- t.Value = h215
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [URI WorkspaceFolder]"}
-}
-
-func (t Or_Result_textDocument_codeAction_Item0_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CodeAction:
- return json.Marshal(x)
- case Command:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CodeAction Command]", t)
-}
-
-func (t *Or_Result_textDocument_codeAction_Item0_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder322 := json.NewDecoder(bytes.NewReader(x))
- decoder322.DisallowUnknownFields()
- var h322 CodeAction
- if err := decoder322.Decode(&h322); err == nil {
- t.Value = h322
- return nil
- }
- decoder323 := json.NewDecoder(bytes.NewReader(x))
- decoder323.DisallowUnknownFields()
- var h323 Command
- if err := decoder323.Decode(&h323); err == nil {
- t.Value = h323
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CodeAction Command]"}
-}
-
-func (t Or_Result_textDocument_completion) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CompletionList:
- return json.Marshal(x)
- case []CompletionItem:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CompletionList []CompletionItem]", t)
-}
-
-func (t *Or_Result_textDocument_completion) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder310 := json.NewDecoder(bytes.NewReader(x))
- decoder310.DisallowUnknownFields()
- var h310 CompletionList
- if err := decoder310.Decode(&h310); err == nil {
- t.Value = h310
- return nil
- }
- decoder311 := json.NewDecoder(bytes.NewReader(x))
- decoder311.DisallowUnknownFields()
- var h311 []CompletionItem
- if err := decoder311.Decode(&h311); err == nil {
- t.Value = h311
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CompletionList []CompletionItem]"}
-}
-
-func (t Or_Result_textDocument_declaration) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Declaration:
- return json.Marshal(x)
- case []DeclarationLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Declaration []DeclarationLink]", t)
-}
-
-func (t *Or_Result_textDocument_declaration) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder298 := json.NewDecoder(bytes.NewReader(x))
- decoder298.DisallowUnknownFields()
- var h298 Declaration
- if err := decoder298.Decode(&h298); err == nil {
- t.Value = h298
- return nil
- }
- decoder299 := json.NewDecoder(bytes.NewReader(x))
- decoder299.DisallowUnknownFields()
- var h299 []DeclarationLink
- if err := decoder299.Decode(&h299); err == nil {
- t.Value = h299
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Declaration []DeclarationLink]"}
-}
-
-func (t Or_Result_textDocument_definition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_definition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder314 := json.NewDecoder(bytes.NewReader(x))
- decoder314.DisallowUnknownFields()
- var h314 Definition
- if err := decoder314.Decode(&h314); err == nil {
- t.Value = h314
- return nil
- }
- decoder315 := json.NewDecoder(bytes.NewReader(x))
- decoder315.DisallowUnknownFields()
- var h315 []DefinitionLink
- if err := decoder315.Decode(&h315); err == nil {
- t.Value = h315
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_textDocument_documentSymbol) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []DocumentSymbol:
- return json.Marshal(x)
- case []SymbolInformation:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]DocumentSymbol []SymbolInformation]", t)
-}
-
-func (t *Or_Result_textDocument_documentSymbol) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder318 := json.NewDecoder(bytes.NewReader(x))
- decoder318.DisallowUnknownFields()
- var h318 []DocumentSymbol
- if err := decoder318.Decode(&h318); err == nil {
- t.Value = h318
- return nil
- }
- decoder319 := json.NewDecoder(bytes.NewReader(x))
- decoder319.DisallowUnknownFields()
- var h319 []SymbolInformation
- if err := decoder319.Decode(&h319); err == nil {
- t.Value = h319
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]DocumentSymbol []SymbolInformation]"}
-}
-
-func (t Or_Result_textDocument_implementation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_implementation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder290 := json.NewDecoder(bytes.NewReader(x))
- decoder290.DisallowUnknownFields()
- var h290 Definition
- if err := decoder290.Decode(&h290); err == nil {
- t.Value = h290
- return nil
- }
- decoder291 := json.NewDecoder(bytes.NewReader(x))
- decoder291.DisallowUnknownFields()
- var h291 []DefinitionLink
- if err := decoder291.Decode(&h291); err == nil {
- t.Value = h291
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_textDocument_inlineCompletion) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineCompletionList:
- return json.Marshal(x)
- case []InlineCompletionItem:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineCompletionList []InlineCompletionItem]", t)
-}
-
-func (t *Or_Result_textDocument_inlineCompletion) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder306 := json.NewDecoder(bytes.NewReader(x))
- decoder306.DisallowUnknownFields()
- var h306 InlineCompletionList
- if err := decoder306.Decode(&h306); err == nil {
- t.Value = h306
- return nil
- }
- decoder307 := json.NewDecoder(bytes.NewReader(x))
- decoder307.DisallowUnknownFields()
- var h307 []InlineCompletionItem
- if err := decoder307.Decode(&h307); err == nil {
- t.Value = h307
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionList []InlineCompletionItem]"}
-}
-
-func (t Or_Result_textDocument_semanticTokens_full_delta) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokens:
- return json.Marshal(x)
- case SemanticTokensDelta:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokens SemanticTokensDelta]", t)
-}
-
-func (t *Or_Result_textDocument_semanticTokens_full_delta) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder302 := json.NewDecoder(bytes.NewReader(x))
- decoder302.DisallowUnknownFields()
- var h302 SemanticTokens
- if err := decoder302.Decode(&h302); err == nil {
- t.Value = h302
- return nil
- }
- decoder303 := json.NewDecoder(bytes.NewReader(x))
- decoder303.DisallowUnknownFields()
- var h303 SemanticTokensDelta
- if err := decoder303.Decode(&h303); err == nil {
- t.Value = h303
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokens SemanticTokensDelta]"}
-}
-
-func (t Or_Result_textDocument_typeDefinition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_typeDefinition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder294 := json.NewDecoder(bytes.NewReader(x))
- decoder294.DisallowUnknownFields()
- var h294 Definition
- if err := decoder294.Decode(&h294); err == nil {
- t.Value = h294
- return nil
- }
- decoder295 := json.NewDecoder(bytes.NewReader(x))
- decoder295.DisallowUnknownFields()
- var h295 []DefinitionLink
- if err := decoder295.Decode(&h295); err == nil {
- t.Value = h295
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_workspace_symbol) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []SymbolInformation:
- return json.Marshal(x)
- case []WorkspaceSymbol:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]SymbolInformation []WorkspaceSymbol]", t)
-}
-
-func (t *Or_Result_workspace_symbol) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder326 := json.NewDecoder(bytes.NewReader(x))
- decoder326.DisallowUnknownFields()
- var h326 []SymbolInformation
- if err := decoder326.Decode(&h326); err == nil {
- t.Value = h326
- return nil
- }
- decoder327 := json.NewDecoder(bytes.NewReader(x))
- decoder327.DisallowUnknownFields()
- var h327 []WorkspaceSymbol
- if err := decoder327.Decode(&h327); err == nil {
- t.Value = h327
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]SymbolInformation []WorkspaceSymbol]"}
-}
-
-func (t Or_SemanticTokensOptions_full) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokensFullDelta:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokensFullDelta bool]", t)
-}
-
-func (t *Or_SemanticTokensOptions_full) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder47 := json.NewDecoder(bytes.NewReader(x))
- decoder47.DisallowUnknownFields()
- var boolVal bool
- if err := decoder47.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder48 := json.NewDecoder(bytes.NewReader(x))
- decoder48.DisallowUnknownFields()
- var h48 SemanticTokensFullDelta
- if err := decoder48.Decode(&h48); err == nil {
- t.Value = h48
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensFullDelta bool]"}
-}
-
-func (t Or_SemanticTokensOptions_range) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Lit_SemanticTokensOptions_range_Item1:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Lit_SemanticTokensOptions_range_Item1 bool]", t)
-}
-
-func (t *Or_SemanticTokensOptions_range) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder44 := json.NewDecoder(bytes.NewReader(x))
- decoder44.DisallowUnknownFields()
- var boolVal bool
- if err := decoder44.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder45 := json.NewDecoder(bytes.NewReader(x))
- decoder45.DisallowUnknownFields()
- var h45 Lit_SemanticTokensOptions_range_Item1
- if err := decoder45.Decode(&h45); err == nil {
- t.Value = h45
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Lit_SemanticTokensOptions_range_Item1 bool]"}
-}
-
-func (t Or_ServerCapabilities_callHierarchyProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CallHierarchyOptions:
- return json.Marshal(x)
- case CallHierarchyRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_callHierarchyProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder140 := json.NewDecoder(bytes.NewReader(x))
- decoder140.DisallowUnknownFields()
- var boolVal bool
- if err := decoder140.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder141 := json.NewDecoder(bytes.NewReader(x))
- decoder141.DisallowUnknownFields()
- var h141 CallHierarchyOptions
- if err := decoder141.Decode(&h141); err == nil {
- t.Value = h141
- return nil
- }
- decoder142 := json.NewDecoder(bytes.NewReader(x))
- decoder142.DisallowUnknownFields()
- var h142 CallHierarchyRegistrationOptions
- if err := decoder142.Decode(&h142); err == nil {
- t.Value = h142
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_codeActionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CodeActionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CodeActionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_codeActionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder109 := json.NewDecoder(bytes.NewReader(x))
- decoder109.DisallowUnknownFields()
- var boolVal bool
- if err := decoder109.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder110 := json.NewDecoder(bytes.NewReader(x))
- decoder110.DisallowUnknownFields()
- var h110 CodeActionOptions
- if err := decoder110.Decode(&h110); err == nil {
- t.Value = h110
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CodeActionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_colorProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentColorOptions:
- return json.Marshal(x)
- case DocumentColorRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentColorOptions DocumentColorRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_colorProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder113 := json.NewDecoder(bytes.NewReader(x))
- decoder113.DisallowUnknownFields()
- var boolVal bool
- if err := decoder113.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder114 := json.NewDecoder(bytes.NewReader(x))
- decoder114.DisallowUnknownFields()
- var h114 DocumentColorOptions
- if err := decoder114.Decode(&h114); err == nil {
- t.Value = h114
- return nil
- }
- decoder115 := json.NewDecoder(bytes.NewReader(x))
- decoder115.DisallowUnknownFields()
- var h115 DocumentColorRegistrationOptions
- if err := decoder115.Decode(&h115); err == nil {
- t.Value = h115
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentColorOptions DocumentColorRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_declarationProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DeclarationOptions:
- return json.Marshal(x)
- case DeclarationRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DeclarationOptions DeclarationRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_declarationProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder83 := json.NewDecoder(bytes.NewReader(x))
- decoder83.DisallowUnknownFields()
- var boolVal bool
- if err := decoder83.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder84 := json.NewDecoder(bytes.NewReader(x))
- decoder84.DisallowUnknownFields()
- var h84 DeclarationOptions
- if err := decoder84.Decode(&h84); err == nil {
- t.Value = h84
- return nil
- }
- decoder85 := json.NewDecoder(bytes.NewReader(x))
- decoder85.DisallowUnknownFields()
- var h85 DeclarationRegistrationOptions
- if err := decoder85.Decode(&h85); err == nil {
- t.Value = h85
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DeclarationOptions DeclarationRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_definitionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DefinitionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DefinitionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_definitionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder87 := json.NewDecoder(bytes.NewReader(x))
- decoder87.DisallowUnknownFields()
- var boolVal bool
- if err := decoder87.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder88 := json.NewDecoder(bytes.NewReader(x))
- decoder88.DisallowUnknownFields()
- var h88 DefinitionOptions
- if err := decoder88.Decode(&h88); err == nil {
- t.Value = h88
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DefinitionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_diagnosticProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DiagnosticOptions:
- return json.Marshal(x)
- case DiagnosticRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DiagnosticOptions DiagnosticRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_diagnosticProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder174 := json.NewDecoder(bytes.NewReader(x))
- decoder174.DisallowUnknownFields()
- var h174 DiagnosticOptions
- if err := decoder174.Decode(&h174); err == nil {
- t.Value = h174
- return nil
- }
- decoder175 := json.NewDecoder(bytes.NewReader(x))
- decoder175.DisallowUnknownFields()
- var h175 DiagnosticRegistrationOptions
- if err := decoder175.Decode(&h175); err == nil {
- t.Value = h175
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DiagnosticOptions DiagnosticRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_documentFormattingProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentFormattingOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentFormattingOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentFormattingProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder120 := json.NewDecoder(bytes.NewReader(x))
- decoder120.DisallowUnknownFields()
- var boolVal bool
- if err := decoder120.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder121 := json.NewDecoder(bytes.NewReader(x))
- decoder121.DisallowUnknownFields()
- var h121 DocumentFormattingOptions
- if err := decoder121.Decode(&h121); err == nil {
- t.Value = h121
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentFormattingOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentHighlightProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentHighlightOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentHighlightOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentHighlightProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder103 := json.NewDecoder(bytes.NewReader(x))
- decoder103.DisallowUnknownFields()
- var boolVal bool
- if err := decoder103.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder104 := json.NewDecoder(bytes.NewReader(x))
- decoder104.DisallowUnknownFields()
- var h104 DocumentHighlightOptions
- if err := decoder104.Decode(&h104); err == nil {
- t.Value = h104
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentHighlightOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentRangeFormattingProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentRangeFormattingOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentRangeFormattingOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentRangeFormattingProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder123 := json.NewDecoder(bytes.NewReader(x))
- decoder123.DisallowUnknownFields()
- var boolVal bool
- if err := decoder123.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder124 := json.NewDecoder(bytes.NewReader(x))
- decoder124.DisallowUnknownFields()
- var h124 DocumentRangeFormattingOptions
- if err := decoder124.Decode(&h124); err == nil {
- t.Value = h124
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentRangeFormattingOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentSymbolProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentSymbolOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentSymbolOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentSymbolProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder106 := json.NewDecoder(bytes.NewReader(x))
- decoder106.DisallowUnknownFields()
- var boolVal bool
- if err := decoder106.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder107 := json.NewDecoder(bytes.NewReader(x))
- decoder107.DisallowUnknownFields()
- var h107 DocumentSymbolOptions
- if err := decoder107.Decode(&h107); err == nil {
- t.Value = h107
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentSymbolOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_foldingRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FoldingRangeOptions:
- return json.Marshal(x)
- case FoldingRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_foldingRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder130 := json.NewDecoder(bytes.NewReader(x))
- decoder130.DisallowUnknownFields()
- var boolVal bool
- if err := decoder130.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder131 := json.NewDecoder(bytes.NewReader(x))
- decoder131.DisallowUnknownFields()
- var h131 FoldingRangeOptions
- if err := decoder131.Decode(&h131); err == nil {
- t.Value = h131
- return nil
- }
- decoder132 := json.NewDecoder(bytes.NewReader(x))
- decoder132.DisallowUnknownFields()
- var h132 FoldingRangeRegistrationOptions
- if err := decoder132.Decode(&h132); err == nil {
- t.Value = h132
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_hoverProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case HoverOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [HoverOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_hoverProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder79 := json.NewDecoder(bytes.NewReader(x))
- decoder79.DisallowUnknownFields()
- var boolVal bool
- if err := decoder79.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder80 := json.NewDecoder(bytes.NewReader(x))
- decoder80.DisallowUnknownFields()
- var h80 HoverOptions
- if err := decoder80.Decode(&h80); err == nil {
- t.Value = h80
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [HoverOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_implementationProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ImplementationOptions:
- return json.Marshal(x)
- case ImplementationRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ImplementationOptions ImplementationRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_implementationProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder96 := json.NewDecoder(bytes.NewReader(x))
- decoder96.DisallowUnknownFields()
- var boolVal bool
- if err := decoder96.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder97 := json.NewDecoder(bytes.NewReader(x))
- decoder97.DisallowUnknownFields()
- var h97 ImplementationOptions
- if err := decoder97.Decode(&h97); err == nil {
- t.Value = h97
- return nil
- }
- decoder98 := json.NewDecoder(bytes.NewReader(x))
- decoder98.DisallowUnknownFields()
- var h98 ImplementationRegistrationOptions
- if err := decoder98.Decode(&h98); err == nil {
- t.Value = h98
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ImplementationOptions ImplementationRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlayHintProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlayHintOptions:
- return json.Marshal(x)
- case InlayHintRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlayHintOptions InlayHintRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlayHintProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder169 := json.NewDecoder(bytes.NewReader(x))
- decoder169.DisallowUnknownFields()
- var boolVal bool
- if err := decoder169.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder170 := json.NewDecoder(bytes.NewReader(x))
- decoder170.DisallowUnknownFields()
- var h170 InlayHintOptions
- if err := decoder170.Decode(&h170); err == nil {
- t.Value = h170
- return nil
- }
- decoder171 := json.NewDecoder(bytes.NewReader(x))
- decoder171.DisallowUnknownFields()
- var h171 InlayHintRegistrationOptions
- if err := decoder171.Decode(&h171); err == nil {
- t.Value = h171
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlayHintOptions InlayHintRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlineCompletionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineCompletionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineCompletionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlineCompletionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder177 := json.NewDecoder(bytes.NewReader(x))
- decoder177.DisallowUnknownFields()
- var boolVal bool
- if err := decoder177.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder178 := json.NewDecoder(bytes.NewReader(x))
- decoder178.DisallowUnknownFields()
- var h178 InlineCompletionOptions
- if err := decoder178.Decode(&h178); err == nil {
- t.Value = h178
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlineValueProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineValueOptions:
- return json.Marshal(x)
- case InlineValueRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineValueOptions InlineValueRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlineValueProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder164 := json.NewDecoder(bytes.NewReader(x))
- decoder164.DisallowUnknownFields()
- var boolVal bool
- if err := decoder164.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder165 := json.NewDecoder(bytes.NewReader(x))
- decoder165.DisallowUnknownFields()
- var h165 InlineValueOptions
- if err := decoder165.Decode(&h165); err == nil {
- t.Value = h165
- return nil
- }
- decoder166 := json.NewDecoder(bytes.NewReader(x))
- decoder166.DisallowUnknownFields()
- var h166 InlineValueRegistrationOptions
- if err := decoder166.Decode(&h166); err == nil {
- t.Value = h166
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineValueOptions InlineValueRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_linkedEditingRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case LinkedEditingRangeOptions:
- return json.Marshal(x)
- case LinkedEditingRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_linkedEditingRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder145 := json.NewDecoder(bytes.NewReader(x))
- decoder145.DisallowUnknownFields()
- var boolVal bool
- if err := decoder145.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder146 := json.NewDecoder(bytes.NewReader(x))
- decoder146.DisallowUnknownFields()
- var h146 LinkedEditingRangeOptions
- if err := decoder146.Decode(&h146); err == nil {
- t.Value = h146
- return nil
- }
- decoder147 := json.NewDecoder(bytes.NewReader(x))
- decoder147.DisallowUnknownFields()
- var h147 LinkedEditingRangeRegistrationOptions
- if err := decoder147.Decode(&h147); err == nil {
- t.Value = h147
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_monikerProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MonikerOptions:
- return json.Marshal(x)
- case MonikerRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MonikerOptions MonikerRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_monikerProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder154 := json.NewDecoder(bytes.NewReader(x))
- decoder154.DisallowUnknownFields()
- var boolVal bool
- if err := decoder154.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder155 := json.NewDecoder(bytes.NewReader(x))
- decoder155.DisallowUnknownFields()
- var h155 MonikerOptions
- if err := decoder155.Decode(&h155); err == nil {
- t.Value = h155
- return nil
- }
- decoder156 := json.NewDecoder(bytes.NewReader(x))
- decoder156.DisallowUnknownFields()
- var h156 MonikerRegistrationOptions
- if err := decoder156.Decode(&h156); err == nil {
- t.Value = h156
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MonikerOptions MonikerRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_notebookDocumentSync) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentSyncOptions:
- return json.Marshal(x)
- case NotebookDocumentSyncRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_notebookDocumentSync) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder76 := json.NewDecoder(bytes.NewReader(x))
- decoder76.DisallowUnknownFields()
- var h76 NotebookDocumentSyncOptions
- if err := decoder76.Decode(&h76); err == nil {
- t.Value = h76
- return nil
- }
- decoder77 := json.NewDecoder(bytes.NewReader(x))
- decoder77.DisallowUnknownFields()
- var h77 NotebookDocumentSyncRegistrationOptions
- if err := decoder77.Decode(&h77); err == nil {
- t.Value = h77
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_referencesProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ReferenceOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ReferenceOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_referencesProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder100 := json.NewDecoder(bytes.NewReader(x))
- decoder100.DisallowUnknownFields()
- var boolVal bool
- if err := decoder100.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder101 := json.NewDecoder(bytes.NewReader(x))
- decoder101.DisallowUnknownFields()
- var h101 ReferenceOptions
- if err := decoder101.Decode(&h101); err == nil {
- t.Value = h101
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ReferenceOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_renameProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case RenameOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [RenameOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_renameProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder126 := json.NewDecoder(bytes.NewReader(x))
- decoder126.DisallowUnknownFields()
- var boolVal bool
- if err := decoder126.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder127 := json.NewDecoder(bytes.NewReader(x))
- decoder127.DisallowUnknownFields()
- var h127 RenameOptions
- if err := decoder127.Decode(&h127); err == nil {
- t.Value = h127
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [RenameOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_selectionRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SelectionRangeOptions:
- return json.Marshal(x)
- case SelectionRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_selectionRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder135 := json.NewDecoder(bytes.NewReader(x))
- decoder135.DisallowUnknownFields()
- var boolVal bool
- if err := decoder135.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder136 := json.NewDecoder(bytes.NewReader(x))
- decoder136.DisallowUnknownFields()
- var h136 SelectionRangeOptions
- if err := decoder136.Decode(&h136); err == nil {
- t.Value = h136
- return nil
- }
- decoder137 := json.NewDecoder(bytes.NewReader(x))
- decoder137.DisallowUnknownFields()
- var h137 SelectionRangeRegistrationOptions
- if err := decoder137.Decode(&h137); err == nil {
- t.Value = h137
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_semanticTokensProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokensOptions:
- return json.Marshal(x)
- case SemanticTokensRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokensOptions SemanticTokensRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_semanticTokensProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder150 := json.NewDecoder(bytes.NewReader(x))
- decoder150.DisallowUnknownFields()
- var h150 SemanticTokensOptions
- if err := decoder150.Decode(&h150); err == nil {
- t.Value = h150
- return nil
- }
- decoder151 := json.NewDecoder(bytes.NewReader(x))
- decoder151.DisallowUnknownFields()
- var h151 SemanticTokensRegistrationOptions
- if err := decoder151.Decode(&h151); err == nil {
- t.Value = h151
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensOptions SemanticTokensRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_textDocumentSync) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentSyncKind:
- return json.Marshal(x)
- case TextDocumentSyncOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentSyncKind TextDocumentSyncOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_textDocumentSync) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder72 := json.NewDecoder(bytes.NewReader(x))
- decoder72.DisallowUnknownFields()
- var h72 TextDocumentSyncKind
- if err := decoder72.Decode(&h72); err == nil {
- t.Value = h72
- return nil
- }
- decoder73 := json.NewDecoder(bytes.NewReader(x))
- decoder73.DisallowUnknownFields()
- var h73 TextDocumentSyncOptions
- if err := decoder73.Decode(&h73); err == nil {
- t.Value = h73
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentSyncKind TextDocumentSyncOptions]"}
-}
-
-func (t Or_ServerCapabilities_typeDefinitionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TypeDefinitionOptions:
- return json.Marshal(x)
- case TypeDefinitionRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_typeDefinitionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder91 := json.NewDecoder(bytes.NewReader(x))
- decoder91.DisallowUnknownFields()
- var boolVal bool
- if err := decoder91.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder92 := json.NewDecoder(bytes.NewReader(x))
- decoder92.DisallowUnknownFields()
- var h92 TypeDefinitionOptions
- if err := decoder92.Decode(&h92); err == nil {
- t.Value = h92
- return nil
- }
- decoder93 := json.NewDecoder(bytes.NewReader(x))
- decoder93.DisallowUnknownFields()
- var h93 TypeDefinitionRegistrationOptions
- if err := decoder93.Decode(&h93); err == nil {
- t.Value = h93
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_typeHierarchyProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TypeHierarchyOptions:
- return json.Marshal(x)
- case TypeHierarchyRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_typeHierarchyProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder159 := json.NewDecoder(bytes.NewReader(x))
- decoder159.DisallowUnknownFields()
- var boolVal bool
- if err := decoder159.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder160 := json.NewDecoder(bytes.NewReader(x))
- decoder160.DisallowUnknownFields()
- var h160 TypeHierarchyOptions
- if err := decoder160.Decode(&h160); err == nil {
- t.Value = h160
- return nil
- }
- decoder161 := json.NewDecoder(bytes.NewReader(x))
- decoder161.DisallowUnknownFields()
- var h161 TypeHierarchyRegistrationOptions
- if err := decoder161.Decode(&h161); err == nil {
- t.Value = h161
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_workspaceSymbolProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case WorkspaceSymbolOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [WorkspaceSymbolOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_workspaceSymbolProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder117 := json.NewDecoder(bytes.NewReader(x))
- decoder117.DisallowUnknownFields()
- var boolVal bool
- if err := decoder117.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder118 := json.NewDecoder(bytes.NewReader(x))
- decoder118.DisallowUnknownFields()
- var h118 WorkspaceSymbolOptions
- if err := decoder118.Decode(&h118); err == nil {
- t.Value = h118
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceSymbolOptions bool]"}
-}
-
-func (t Or_SignatureInformation_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_SignatureInformation_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder186 := json.NewDecoder(bytes.NewReader(x))
- decoder186.DisallowUnknownFields()
- var stringVal string
- if err := decoder186.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder187 := json.NewDecoder(bytes.NewReader(x))
- decoder187.DisallowUnknownFields()
- var h187 MarkupContent
- if err := decoder187.Decode(&h187); err == nil {
- t.Value = h187
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_TextDocumentContentChangeEvent) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentContentChangePartial:
- return json.Marshal(x)
- case TextDocumentContentChangeWholeDocument:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]", t)
-}
-
-func (t *Or_TextDocumentContentChangeEvent) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder263 := json.NewDecoder(bytes.NewReader(x))
- decoder263.DisallowUnknownFields()
- var h263 TextDocumentContentChangePartial
- if err := decoder263.Decode(&h263); err == nil {
- t.Value = h263
- return nil
- }
- decoder264 := json.NewDecoder(bytes.NewReader(x))
- decoder264.DisallowUnknownFields()
- var h264 TextDocumentContentChangeWholeDocument
- if err := decoder264.Decode(&h264); err == nil {
- t.Value = h264
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]"}
-}
-
-func (t Or_TextDocumentEdit_edits_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case AnnotatedTextEdit:
- return json.Marshal(x)
- case SnippetTextEdit:
- return json.Marshal(x)
- case TextEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [AnnotatedTextEdit SnippetTextEdit TextEdit]", t)
-}
-
-func (t *Or_TextDocumentEdit_edits_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder52 := json.NewDecoder(bytes.NewReader(x))
- decoder52.DisallowUnknownFields()
- var h52 AnnotatedTextEdit
- if err := decoder52.Decode(&h52); err == nil {
- t.Value = h52
- return nil
- }
- decoder53 := json.NewDecoder(bytes.NewReader(x))
- decoder53.DisallowUnknownFields()
- var h53 SnippetTextEdit
- if err := decoder53.Decode(&h53); err == nil {
- t.Value = h53
- return nil
- }
- decoder54 := json.NewDecoder(bytes.NewReader(x))
- decoder54.DisallowUnknownFields()
- var h54 TextEdit
- if err := decoder54.Decode(&h54); err == nil {
- t.Value = h54
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [AnnotatedTextEdit SnippetTextEdit TextEdit]"}
-}
-
-func (t Or_TextDocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentFilterLanguage:
- return json.Marshal(x)
- case TextDocumentFilterPattern:
- return json.Marshal(x)
- case TextDocumentFilterScheme:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]", t)
-}
-
-func (t *Or_TextDocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder279 := json.NewDecoder(bytes.NewReader(x))
- decoder279.DisallowUnknownFields()
- var h279 TextDocumentFilterLanguage
- if err := decoder279.Decode(&h279); err == nil {
- t.Value = h279
- return nil
- }
- decoder280 := json.NewDecoder(bytes.NewReader(x))
- decoder280.DisallowUnknownFields()
- var h280 TextDocumentFilterPattern
- if err := decoder280.Decode(&h280); err == nil {
- t.Value = h280
- return nil
- }
- decoder281 := json.NewDecoder(bytes.NewReader(x))
- decoder281.DisallowUnknownFields()
- var h281 TextDocumentFilterScheme
- if err := decoder281.Decode(&h281); err == nil {
- t.Value = h281
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]"}
-}
-
-func (t Or_TextDocumentSyncOptions_save) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SaveOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SaveOptions bool]", t)
-}
-
-func (t *Or_TextDocumentSyncOptions_save) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder195 := json.NewDecoder(bytes.NewReader(x))
- decoder195.DisallowUnknownFields()
- var boolVal bool
- if err := decoder195.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder196 := json.NewDecoder(bytes.NewReader(x))
- decoder196.DisallowUnknownFields()
- var h196 SaveOptions
- if err := decoder196.Decode(&h196); err == nil {
- t.Value = h196
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SaveOptions bool]"}
-}
-
-func (t Or_WorkspaceDocumentDiagnosticReport) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case WorkspaceFullDocumentDiagnosticReport:
- return json.Marshal(x)
- case WorkspaceUnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_WorkspaceDocumentDiagnosticReport) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder259 := json.NewDecoder(bytes.NewReader(x))
- decoder259.DisallowUnknownFields()
- var h259 WorkspaceFullDocumentDiagnosticReport
- if err := decoder259.Decode(&h259); err == nil {
- t.Value = h259
- return nil
- }
- decoder260 := json.NewDecoder(bytes.NewReader(x))
- decoder260.DisallowUnknownFields()
- var h260 WorkspaceUnchangedDocumentDiagnosticReport
- if err := decoder260.Decode(&h260); err == nil {
- t.Value = h260
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_WorkspaceEdit_documentChanges_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CreateFile:
- return json.Marshal(x)
- case DeleteFile:
- return json.Marshal(x)
- case RenameFile:
- return json.Marshal(x)
- case TextDocumentEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CreateFile DeleteFile RenameFile TextDocumentEdit]", t)
-}
-
-func (t *Or_WorkspaceEdit_documentChanges_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder4 := json.NewDecoder(bytes.NewReader(x))
- decoder4.DisallowUnknownFields()
- var h4 CreateFile
- if err := decoder4.Decode(&h4); err == nil {
- t.Value = h4
- return nil
- }
- decoder5 := json.NewDecoder(bytes.NewReader(x))
- decoder5.DisallowUnknownFields()
- var h5 DeleteFile
- if err := decoder5.Decode(&h5); err == nil {
- t.Value = h5
- return nil
- }
- decoder6 := json.NewDecoder(bytes.NewReader(x))
- decoder6.DisallowUnknownFields()
- var h6 RenameFile
- if err := decoder6.Decode(&h6); err == nil {
- t.Value = h6
- return nil
- }
- decoder7 := json.NewDecoder(bytes.NewReader(x))
- decoder7.DisallowUnknownFields()
- var h7 TextDocumentEdit
- if err := decoder7.Decode(&h7); err == nil {
- t.Value = h7
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CreateFile DeleteFile RenameFile TextDocumentEdit]"}
-}
-
-func (t Or_WorkspaceFoldersServerCapabilities_changeNotifications) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case bool:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [bool string]", t)
-}
-
-func (t *Or_WorkspaceFoldersServerCapabilities_changeNotifications) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder210 := json.NewDecoder(bytes.NewReader(x))
- decoder210.DisallowUnknownFields()
- var boolVal bool
- if err := decoder210.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder211 := json.NewDecoder(bytes.NewReader(x))
- decoder211.DisallowUnknownFields()
- var stringVal string
- if err := decoder211.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [bool string]"}
-}
-
-func (t Or_WorkspaceOptions_textDocumentContent) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentContentOptions:
- return json.Marshal(x)
- case TextDocumentContentRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]", t)
-}
-
-func (t *Or_WorkspaceOptions_textDocumentContent) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder199 := json.NewDecoder(bytes.NewReader(x))
- decoder199.DisallowUnknownFields()
- var h199 TextDocumentContentOptions
- if err := decoder199.Decode(&h199); err == nil {
- t.Value = h199
- return nil
- }
- decoder200 := json.NewDecoder(bytes.NewReader(x))
- decoder200.DisallowUnknownFields()
- var h200 TextDocumentContentRegistrationOptions
- if err := decoder200.Decode(&h200); err == nil {
- t.Value = h200
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]"}
-}
-
-func (t Or_WorkspaceSymbol_location) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case LocationUriOnly:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location LocationUriOnly]", t)
-}
-
-func (t *Or_WorkspaceSymbol_location) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder39 := json.NewDecoder(bytes.NewReader(x))
- decoder39.DisallowUnknownFields()
- var h39 Location
- if err := decoder39.Decode(&h39); err == nil {
- t.Value = h39
- return nil
- }
- decoder40 := json.NewDecoder(bytes.NewReader(x))
- decoder40.DisallowUnknownFields()
- var h40 LocationUriOnly
- if err := decoder40.Decode(&h40); err == nil {
- t.Value = h40
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location LocationUriOnly]"}
-}
diff --git a/internal/lsp/protocol/tsprotocol.go b/internal/lsp/protocol/tsprotocol.go
deleted file mode 100644
index 0e6356cdfe6203882c208fe9447fd5d9ee56923f..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tsprotocol.go
+++ /dev/null
@@ -1,6952 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Code generated for LSP. DO NOT EDIT.
-
-package protocol
-
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.9 (hash c94395b5da53729e6dff931293b051009ccaaaa4).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.9/protocol/metaModel.json
-// LSP metaData.version = 3.17.0.
-
-import "encoding/json"
-
-// created for And
-type And_RegOpt_textDocument_colorPresentation struct {
- WorkDoneProgressOptions
- TextDocumentRegistrationOptions
-}
-
-// A special text edit with an additional change annotation.
-//
-// @since 3.16.0.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#annotatedTextEdit
-type AnnotatedTextEdit struct {
- // The actual identifier of the change annotation
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
- TextEdit
-}
-
-// The parameters passed via an apply workspace edit request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditParams
-type ApplyWorkspaceEditParams struct {
- // An optional label of the workspace edit. This label is
- // presented in the user interface for example on an undo
- // stack to undo the workspace edit.
- Label string `json:"label,omitempty"`
- // The edits to apply.
- Edit WorkspaceEdit `json:"edit"`
- // Additional data about the edit.
- //
- // @since 3.18.0
- // @proposed
- Metadata *WorkspaceEditMetadata `json:"metadata,omitempty"`
-}
-
-// The result returned from the apply workspace edit request.
-//
-// @since 3.17 renamed from ApplyWorkspaceEditResponse
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditResult
-type ApplyWorkspaceEditResult struct {
- // Indicates whether the edit was applied or not.
- Applied bool `json:"applied"`
- // An optional textual description for why the edit was not applied.
- // This may be used by the server for diagnostic slog.or to provide
- // a suitable error for a request that triggered the edit.
- FailureReason string `json:"failureReason,omitempty"`
- // Depending on the client's failure handling strategy `failedChange` might
- // contain the index of the change that failed. This property is only available
- // if the client signals a `failureHandlingStrategy` in its client capabilities.
- FailedChange uint32 `json:"failedChange,omitempty"`
-}
-
-// A base for all symbol information.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#baseSymbolInformation
-type BaseSymbolInformation struct {
- // The name of this symbol.
- Name string `json:"name"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // The name of the symbol containing this symbol. This information is for
- // user interface purposes (e.g. to render a qualifier in the user interface
- // if necessary). It can't be used to re-infer a hierarchy for the document
- // symbols.
- ContainerName string `json:"containerName,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyClientCapabilities
-type CallHierarchyClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Represents an incoming call, e.g. a caller of a method or constructor.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCall
-type CallHierarchyIncomingCall struct {
- // The item that makes the call.
- From CallHierarchyItem `json:"from"`
- // The ranges at which the calls appear. This is relative to the caller
- // denoted by {@link CallHierarchyIncomingCall.from `this.from`}.
- FromRanges []Range `json:"fromRanges"`
-}
-
-// The parameter of a `callHierarchy/incomingCalls` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCallsParams
-type CallHierarchyIncomingCallsParams struct {
- Item CallHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Represents programming constructs like functions or constructors in the context
-// of call hierarchy.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyItem
-type CallHierarchyItem struct {
- // The name of this item.
- Name string `json:"name"`
- // The kind of this item.
- Kind SymbolKind `json:"kind"`
- // Tags for this item.
- Tags []SymbolTag `json:"tags,omitempty"`
- // More detail for this item, e.g. the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The resource identifier of this item.
- URI DocumentURI `json:"uri"`
- // The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function.
- // Must be contained by the {@link CallHierarchyItem.range `range`}.
- SelectionRange Range `json:"selectionRange"`
- // A data entry field that is preserved between a call hierarchy prepare and
- // incoming calls or outgoing calls requests.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Call hierarchy options used during static registration.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOptions
-type CallHierarchyOptions struct {
- WorkDoneProgressOptions
-}
-
-// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCall
-type CallHierarchyOutgoingCall struct {
- // The item that is called.
- To CallHierarchyItem `json:"to"`
- // The range at which this item is called. This is the range relative to the caller, e.g the item
- // passed to {@link CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls `provideCallHierarchyOutgoingCalls`}
- // and not {@link CallHierarchyOutgoingCall.to `this.to`}.
- FromRanges []Range `json:"fromRanges"`
-}
-
-// The parameter of a `callHierarchy/outgoingCalls` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCallsParams
-type CallHierarchyOutgoingCallsParams struct {
- Item CallHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The parameter of a `textDocument/prepareCallHierarchy` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyPrepareParams
-type CallHierarchyPrepareParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Call hierarchy options used during static or dynamic registration.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyRegistrationOptions
-type CallHierarchyRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CallHierarchyOptions
- StaticRegistrationOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#cancelParams
-type CancelParams struct {
- // The request id to cancel.
- ID interface{} `json:"id"`
-}
-
-// Additional information that describes document changes.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotation
-type ChangeAnnotation struct {
- // A human-readable string describing the actual change. The string
- // is rendered prominent in the user interface.
- Label string `json:"label"`
- // A flag which indicates that user confirmation is needed
- // before applying the change.
- NeedsConfirmation bool `json:"needsConfirmation,omitempty"`
- // A human-readable string which is rendered less prominent in
- // the user interface.
- Description string `json:"description,omitempty"`
-}
-
-// An identifier to refer to a change annotation stored with a workspace edit.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationIdentifier
-type (
- ChangeAnnotationIdentifier = string // (alias)
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationsSupportOptions
- ChangeAnnotationsSupportOptions struct {
- // Whether the client groups edits with equal labels into tree nodes,
- // for instance all edits labelled with "Changes in Strings" would
- // be a tree node.
- GroupsOnLabel bool `json:"groupsOnLabel,omitempty"`
- }
-)
-
-// Defines the capabilities provided by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCapabilities
-type ClientCapabilities struct {
- // Workspace specific client capabilities.
- Workspace WorkspaceClientCapabilities `json:"workspace,omitempty"`
- // Text document specific client capabilities.
- TextDocument TextDocumentClientCapabilities `json:"textDocument,omitempty"`
- // Capabilities specific to the notebook document support.
- //
- // @since 3.17.0
- NotebookDocument *NotebookDocumentClientCapabilities `json:"notebookDocument,omitempty"`
- // Window specific client capabilities.
- Window WindowClientCapabilities `json:"window,omitempty"`
- // General client capabilities.
- //
- // @since 3.16.0
- General *GeneralClientCapabilities `json:"general,omitempty"`
- // Experimental client capabilities.
- Experimental interface{} `json:"experimental,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionKindOptions
-type ClientCodeActionKindOptions struct {
- // The code action kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- ValueSet []CodeActionKind `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionLiteralOptions
-type ClientCodeActionLiteralOptions struct {
- // The code action kind is support with the following value
- // set.
- CodeActionKind ClientCodeActionKindOptions `json:"codeActionKind"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionResolveOptions
-type ClientCodeActionResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeLensResolveOptions
-type ClientCodeLensResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemInsertTextModeOptions
-type ClientCompletionItemInsertTextModeOptions struct {
- ValueSet []InsertTextMode `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptions
-type ClientCompletionItemOptions struct {
- // Client supports snippets as insert text.
- //
- // A snippet can define tab stops and placeholders with `$1`, `$2`
- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to
- // the end of the snippet. Placeholders with equal identifiers are linked,
- // that is typing in one will update others too.
- SnippetSupport bool `json:"snippetSupport,omitempty"`
- // Client supports commit characters on a completion item.
- CommitCharactersSupport bool `json:"commitCharactersSupport,omitempty"`
- // Client supports the following content formats for the documentation
- // property. The order describes the preferred format of the client.
- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"`
- // Client supports the deprecated property on a completion item.
- DeprecatedSupport bool `json:"deprecatedSupport,omitempty"`
- // Client supports the preselect property on a completion item.
- PreselectSupport bool `json:"preselectSupport,omitempty"`
- // Client supports the tag property on a completion item. Clients supporting
- // tags have to handle unknown tags gracefully. Clients especially need to
- // preserve unknown tags when sending a completion item back to the server in
- // a resolve call.
- //
- // @since 3.15.0
- TagSupport *CompletionItemTagOptions `json:"tagSupport,omitempty"`
- // Client support insert replace edit to control different behavior if a
- // completion item is inserted in the text or should replace text.
- //
- // @since 3.16.0
- InsertReplaceSupport bool `json:"insertReplaceSupport,omitempty"`
- // Indicates which properties a client can resolve lazily on a completion
- // item. Before version 3.16.0 only the predefined properties `documentation`
- // and `details` could be resolved lazily.
- //
- // @since 3.16.0
- ResolveSupport *ClientCompletionItemResolveOptions `json:"resolveSupport,omitempty"`
- // The client supports the `insertTextMode` property on
- // a completion item to override the whitespace handling mode
- // as defined by the client (see `insertTextMode`).
- //
- // @since 3.16.0
- InsertTextModeSupport *ClientCompletionItemInsertTextModeOptions `json:"insertTextModeSupport,omitempty"`
- // The client has support for completion item label
- // details (see also `CompletionItemLabelDetails`).
- //
- // @since 3.17.0
- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptionsKind
-type ClientCompletionItemOptionsKind struct {
- // The completion item kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- //
- // If this property is not present the client only supports
- // the completion items kinds from `Text` to `Reference` as defined in
- // the initial version of the protocol.
- ValueSet []CompletionItemKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemResolveOptions
-type ClientCompletionItemResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientDiagnosticsTagOptions
-type ClientDiagnosticsTagOptions struct {
- // The tags supported by the client.
- ValueSet []DiagnosticTag `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeKindOptions
-type ClientFoldingRangeKindOptions struct {
- // The folding range kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- ValueSet []FoldingRangeKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeOptions
-type ClientFoldingRangeOptions struct {
- // If set, the client signals that it supports setting collapsedText on
- // folding ranges to display custom labels instead of the default text.
- //
- // @since 3.17.0
- CollapsedText bool `json:"collapsedText,omitempty"`
-}
-
-// Information about the client
-//
-// @since 3.15.0
-// @since 3.18.0 ClientInfo type name added.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInfo
-type ClientInfo struct {
- // The name of the client as defined by the client.
- Name string `json:"name"`
- // The client's version as defined by the client.
- Version string `json:"version,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInlayHintResolveOptions
-type ClientInlayHintResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestFullDelta
-type ClientSemanticTokensRequestFullDelta struct {
- // The client will send the `textDocument/semanticTokens/full/delta` request if
- // the server provides a corresponding handler.
- Delta bool `json:"delta,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestOptions
-type ClientSemanticTokensRequestOptions struct {
- // The client will send the `textDocument/semanticTokens/range` request if
- // the server provides a corresponding handler.
- Range *Or_ClientSemanticTokensRequestOptions_range `json:"range,omitempty"`
- // The client will send the `textDocument/semanticTokens/full` request if
- // the server provides a corresponding handler.
- Full *Or_ClientSemanticTokensRequestOptions_full `json:"full,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientShowMessageActionItemOptions
-type ClientShowMessageActionItemOptions struct {
- // Whether the client supports additional attributes which
- // are preserved and send back to the server in the
- // request's response.
- AdditionalPropertiesSupport bool `json:"additionalPropertiesSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureInformationOptions
-type ClientSignatureInformationOptions struct {
- // Client supports the following content formats for the documentation
- // property. The order describes the preferred format of the client.
- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"`
- // Client capabilities specific to parameter information.
- ParameterInformation *ClientSignatureParameterInformationOptions `json:"parameterInformation,omitempty"`
- // The client supports the `activeParameter` property on `SignatureInformation`
- // literal.
- //
- // @since 3.16.0
- ActiveParameterSupport bool `json:"activeParameterSupport,omitempty"`
- // The client supports the `activeParameter` property on
- // `SignatureHelp`/`SignatureInformation` being set to `null` to
- // indicate that no parameter should be active.
- //
- // @since 3.18.0
- // @proposed
- NoActiveParameterSupport bool `json:"noActiveParameterSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureParameterInformationOptions
-type ClientSignatureParameterInformationOptions struct {
- // The client supports processing label offsets instead of a
- // simple label string.
- //
- // @since 3.14.0
- LabelOffsetSupport bool `json:"labelOffsetSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolKindOptions
-type ClientSymbolKindOptions struct {
- // The symbol kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- //
- // If this property is not present the client only supports
- // the symbol kinds from `File` to `Array` as defined in
- // the initial version of the protocol.
- ValueSet []SymbolKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolResolveOptions
-type ClientSymbolResolveOptions struct {
- // The properties that a client can resolve lazily. Usually
- // `location.range`
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolTagOptions
-type ClientSymbolTagOptions struct {
- // The tags supported by the client.
- ValueSet []SymbolTag `json:"valueSet"`
-}
-
-// A code action represents a change that can be performed in code, e.g. to fix a problem or
-// to refactor code.
-//
-// A CodeAction must set either `edit` and/or a `command`. If both are supplied, the `edit` is applied first, then the `command` is executed.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeAction
-type CodeAction struct {
- // A short, human-readable, title for this code action.
- Title string `json:"title"`
- // The kind of the code action.
- //
- // Used to filter code actions.
- Kind CodeActionKind `json:"kind,omitempty"`
- // The diagnostics that this code action resolves.
- Diagnostics []Diagnostic `json:"diagnostics,omitempty"`
- // Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted
- // by keybindings.
- //
- // A quick fix should be marked preferred if it properly addresses the underlying error.
- // A refactoring should be marked preferred if it is the most reasonable choice of actions to take.
- //
- // @since 3.15.0
- IsPreferred bool `json:"isPreferred,omitempty"`
- // Marks that the code action cannot currently be applied.
- //
- // Clients should follow the following guidelines regarding disabled code actions:
- //
- // - Disabled code actions are not shown in automatic [lightbulbs](https://code.visualstudio.com/docs/editor/editingevolved#_code-action)
- // code action menus.
- //
- // - Disabled actions are shown as faded out in the code action menu when the user requests a more specific type
- // of code action, such as refactorings.
- //
- // - If the user has a [keybinding](https://code.visualstudio.com/docs/editor/refactoring#_keybindings-for-code-actions)
- // that auto applies a code action and only disabled code actions are returned, the client should show the user an
- // error message with `reason` in the editor.
- //
- // @since 3.16.0
- Disabled *CodeActionDisabled `json:"disabled,omitempty"`
- // The workspace edit this code action performs.
- Edit *WorkspaceEdit `json:"edit,omitempty"`
- // A command this code action executes. If a code action
- // provides an edit and a command, first the edit is
- // executed and then the command.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a code action between
- // a `textDocument/codeAction` and a `codeAction/resolve` request.
- //
- // @since 3.16.0
- Data *json.RawMessage `json:"data,omitempty"`
-}
-
-// The Client Capabilities of a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionClientCapabilities
-type CodeActionClientCapabilities struct {
- // Whether code action supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client support code action literals of type `CodeAction` as a valid
- // response of the `textDocument/codeAction` request. If the property is not
- // set the request can only return `Command` literals.
- //
- // @since 3.8.0
- CodeActionLiteralSupport ClientCodeActionLiteralOptions `json:"codeActionLiteralSupport,omitempty"`
- // Whether code action supports the `isPreferred` property.
- //
- // @since 3.15.0
- IsPreferredSupport bool `json:"isPreferredSupport,omitempty"`
- // Whether code action supports the `disabled` property.
- //
- // @since 3.16.0
- DisabledSupport bool `json:"disabledSupport,omitempty"`
- // Whether code action supports the `data` property which is
- // preserved between a `textDocument/codeAction` and a
- // `codeAction/resolve` request.
- //
- // @since 3.16.0
- DataSupport bool `json:"dataSupport,omitempty"`
- // Whether the client supports resolving additional code action
- // properties via a separate `codeAction/resolve` request.
- //
- // @since 3.16.0
- ResolveSupport *ClientCodeActionResolveOptions `json:"resolveSupport,omitempty"`
- // Whether the client honors the change annotations in
- // text edits and resource operations returned via the
- // `CodeAction#edit` property by for example presenting
- // the workspace edit in the user interface and asking
- // for confirmation.
- //
- // @since 3.16.0
- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"`
- // Whether the client supports documentation for a class of
- // code actions.
- //
- // @since 3.18.0
- // @proposed
- DocumentationSupport bool `json:"documentationSupport,omitempty"`
-}
-
-// Contains additional diagnostic information about the context in which
-// a {@link CodeActionProvider.provideCodeActions code action} is run.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionContext
-type CodeActionContext struct {
- // An array of diagnostics known on the client side overlapping the range provided to the
- // `textDocument/codeAction` request. They are provided so that the server knows which
- // errors are currently presented to the user for the given range. There is no guarantee
- // that these accurately reflect the error state of the resource. The primary parameter
- // to compute code actions is the provided range.
- Diagnostics []Diagnostic `json:"diagnostics"`
- // Requested kind of actions to return.
- //
- // Actions not of this kind are filtered out by the client before being shown. So servers
- // can omit computing them.
- Only []CodeActionKind `json:"only,omitempty"`
- // The reason why code actions were requested.
- //
- // @since 3.17.0
- TriggerKind *CodeActionTriggerKind `json:"triggerKind,omitempty"`
-}
-
-// Captures why the code action is currently disabled.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionDisabled
-type CodeActionDisabled struct {
- // Human readable description of why the code action is currently disabled.
- //
- // This is displayed in the code actions UI.
- Reason string `json:"reason"`
-}
-
-// A set of predefined code action kinds
-type CodeActionKind string
-
-// Documentation for a class of code actions.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionKindDocumentation
-type CodeActionKindDocumentation struct {
- // The kind of the code action being documented.
- //
- // If the kind is generic, such as `CodeActionKind.Refactor`, the documentation will be shown whenever any
- // refactorings are returned. If the kind if more specific, such as `CodeActionKind.RefactorExtract`, the
- // documentation will only be shown when extract refactoring code actions are returned.
- Kind CodeActionKind `json:"kind"`
- // Command that is ued to display the documentation to the user.
- //
- // The title of this documentation code action is taken from {@linkcode Command.title}
- Command Command `json:"command"`
-}
-
-// Provider options for a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionOptions
-type CodeActionOptions struct {
- // CodeActionKinds that this server may return.
- //
- // The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server
- // may list out every specific kind they provide.
- CodeActionKinds []CodeActionKind `json:"codeActionKinds,omitempty"`
- // Static documentation for a class of code actions.
- //
- // Documentation from the provider should be shown in the code actions menu if either:
- //
- //
- // - Code actions of `kind` are requested by the editor. In this case, the editor will show the documentation that
- // most closely matches the requested code action kind. For example, if a provider has documentation for
- // both `Refactor` and `RefactorExtract`, when the user requests code actions for `RefactorExtract`,
- // the editor will use the documentation for `RefactorExtract` instead of the documentation for `Refactor`.
- //
- //
- // - Any code actions of `kind` are returned by the provider.
- //
- // At most one documentation entry should be shown per provider.
- //
- // @since 3.18.0
- // @proposed
- Documentation []CodeActionKindDocumentation `json:"documentation,omitempty"`
- // The server provides support to resolve additional
- // information for a code action.
- //
- // @since 3.16.0
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionParams
-type CodeActionParams struct {
- // The document in which the command was invoked.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range for which the command was invoked.
- Range Range `json:"range"`
- // Context carrying additional information.
- Context CodeActionContext `json:"context"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionRegistrationOptions
-type CodeActionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CodeActionOptions
-}
-
-// The reason why code actions were requested.
-//
-// @since 3.17.0
-type CodeActionTriggerKind uint32
-
-// Structure to capture a description for an error code.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeDescription
-type CodeDescription struct {
- // An URI to open with more information about the diagnostic error.
- Href URI `json:"href"`
-}
-
-// A code lens represents a {@link Command command} that should be shown along with
-// source text, like the number of references, a way to run tests, etc.
-//
-// A code lens is _unresolved_ when no command is associated to it. For performance
-// reasons the creation of a code lens and resolving should be done in two stages.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLens
-type CodeLens struct {
- // The range in which this code lens is valid. Should only span a single line.
- Range Range `json:"range"`
- // The command this code lens represents.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a code lens item between
- // a {@link CodeLensRequest} and a {@link CodeLensResolveRequest}
- Data interface{} `json:"data,omitempty"`
-}
-
-// The client capabilities of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensClientCapabilities
-type CodeLensClientCapabilities struct {
- // Whether code lens supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports resolving additional code lens
- // properties via a separate `codeLens/resolve` request.
- //
- // @since 3.18.0
- ResolveSupport *ClientCodeLensResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Code Lens provider options of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensOptions
-type CodeLensOptions struct {
- // Code lens has a resolve provider as well.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensParams
-type CodeLensParams struct {
- // The document to request code lens for.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensRegistrationOptions
-type CodeLensRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CodeLensOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensWorkspaceClientCapabilities
-type CodeLensWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // code lenses currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detect a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Represents a color in RGBA space.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#color
-type Color struct {
- // The red component of this color in the range [0-1].
- Red float64 `json:"red"`
- // The green component of this color in the range [0-1].
- Green float64 `json:"green"`
- // The blue component of this color in the range [0-1].
- Blue float64 `json:"blue"`
- // The alpha component of this color in the range [0-1].
- Alpha float64 `json:"alpha"`
-}
-
-// Represents a color range from a document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorInformation
-type ColorInformation struct {
- // The range in the document where this color appears.
- Range Range `json:"range"`
- // The actual color value for this color range.
- Color Color `json:"color"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentation
-type ColorPresentation struct {
- // The label of this color presentation. It will be shown on the color
- // picker header. By default this is also the text that is inserted when selecting
- // this color presentation.
- Label string `json:"label"`
- // An {@link TextEdit edit} which is applied to a document when selecting
- // this presentation for the color. When `falsy` the {@link ColorPresentation.label label}
- // is used.
- TextEdit *TextEdit `json:"textEdit,omitempty"`
- // An optional array of additional {@link TextEdit text edits} that are applied when
- // selecting this color presentation. Edits must not overlap with the main {@link ColorPresentation.textEdit edit} nor with themselves.
- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"`
-}
-
-// Parameters for a {@link ColorPresentationRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentationParams
-type ColorPresentationParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The color to request presentations for.
- Color Color `json:"color"`
- // The range where the color would be inserted. Serves as a context.
- Range Range `json:"range"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Represents a reference to a command. Provides a title which
-// will be used to represent a command in the UI and, optionally,
-// an array of arguments which will be passed to the command handler
-// function when invoked.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#command
-type Command struct {
- // Title of the command, like `save`.
- Title string `json:"title"`
- // An optional tooltip.
- //
- // @since 3.18.0
- // @proposed
- Tooltip string `json:"tooltip,omitempty"`
- // The identifier of the actual command handler.
- Command string `json:"command"`
- // Arguments that the command handler should be
- // invoked with.
- Arguments []json.RawMessage `json:"arguments,omitempty"`
-}
-
-// Completion client capabilities
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionClientCapabilities
-type CompletionClientCapabilities struct {
- // Whether completion supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports the following `CompletionItem` specific
- // capabilities.
- CompletionItem ClientCompletionItemOptions `json:"completionItem,omitempty"`
- CompletionItemKind *ClientCompletionItemOptionsKind `json:"completionItemKind,omitempty"`
- // Defines how the client handles whitespace and indentation
- // when accepting a completion item that uses multi line
- // text in either `insertText` or `textEdit`.
- //
- // @since 3.17.0
- InsertTextMode InsertTextMode `json:"insertTextMode,omitempty"`
- // The client supports to send additional context information for a
- // `textDocument/completion` request.
- ContextSupport bool `json:"contextSupport,omitempty"`
- // The client supports the following `CompletionList` specific
- // capabilities.
- //
- // @since 3.17.0
- CompletionList *CompletionListCapabilities `json:"completionList,omitempty"`
-}
-
-// Contains additional information about the context in which a completion request is triggered.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionContext
-type CompletionContext struct {
- // How the completion was triggered.
- TriggerKind CompletionTriggerKind `json:"triggerKind"`
- // The trigger character (a single character) that has trigger code complete.
- // Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter`
- TriggerCharacter string `json:"triggerCharacter,omitempty"`
-}
-
-// A completion item represents a text snippet that is
-// proposed to complete text that is being typed.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItem
-type CompletionItem struct {
- // The label of this completion item.
- //
- // The label property is also by default the text that
- // is inserted when selecting this completion.
- //
- // If label details are provided the label itself should
- // be an unqualified name of the completion item.
- Label string `json:"label"`
- // Additional details for the label
- //
- // @since 3.17.0
- LabelDetails *CompletionItemLabelDetails `json:"labelDetails,omitempty"`
- // The kind of this completion item. Based of the kind
- // an icon is chosen by the editor.
- Kind CompletionItemKind `json:"kind,omitempty"`
- // Tags for this completion item.
- //
- // @since 3.15.0
- Tags []CompletionItemTag `json:"tags,omitempty"`
- // A human-readable string with additional information
- // about this item, like type or symbol information.
- Detail string `json:"detail,omitempty"`
- // A human-readable string that represents a doc-comment.
- Documentation *Or_CompletionItem_documentation `json:"documentation,omitempty"`
- // Indicates if this item is deprecated.
- // @deprecated Use `tags` instead.
- Deprecated bool `json:"deprecated,omitempty"`
- // Select this item when showing.
- //
- // *Note* that only one completion item can be selected and that the
- // tool / client decides which item that is. The rule is that the *first*
- // item of those that match best is selected.
- Preselect bool `json:"preselect,omitempty"`
- // A string that should be used when comparing this item
- // with other items. When `falsy` the {@link CompletionItem.label label}
- // is used.
- SortText string `json:"sortText,omitempty"`
- // A string that should be used when filtering a set of
- // completion items. When `falsy` the {@link CompletionItem.label label}
- // is used.
- FilterText string `json:"filterText,omitempty"`
- // A string that should be inserted into a document when selecting
- // this completion. When `falsy` the {@link CompletionItem.label label}
- // is used.
- //
- // The `insertText` is subject to interpretation by the client side.
- // Some tools might not take the string literally. For example
- // VS Code when code complete is requested in this example
- // `con` and a completion item with an `insertText` of
- // `console` is provided it will only insert `sole`. Therefore it is
- // recommended to use `textEdit` instead since it avoids additional client
- // side interpretation.
- InsertText string `json:"insertText,omitempty"`
- // The format of the insert text. The format applies to both the
- // `insertText` property and the `newText` property of a provided
- // `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`.
- //
- // Please note that the insertTextFormat doesn't apply to
- // `additionalTextEdits`.
- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"`
- // How whitespace and indentation is handled during completion
- // item insertion. If not provided the clients default value depends on
- // the `textDocument.completion.insertTextMode` client capability.
- //
- // @since 3.16.0
- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"`
- // An {@link TextEdit edit} which is applied to a document when selecting
- // this completion. When an edit is provided the value of
- // {@link CompletionItem.insertText insertText} is ignored.
- //
- // Most editors support two different operations when accepting a completion
- // item. One is to insert a completion text and the other is to replace an
- // existing text with a completion text. Since this can usually not be
- // predetermined by a server it can report both ranges. Clients need to
- // signal support for `InsertReplaceEdits` via the
- // `textDocument.completion.insertReplaceSupport` client capability
- // property.
- //
- // *Note 1:* The text edit's range as well as both ranges from an insert
- // replace edit must be a [single line] and they must contain the position
- // at which completion has been requested.
- // *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range
- // must be a prefix of the edit's replace range, that means it must be
- // contained and starting at the same position.
- //
- // @since 3.16.0 additional type `InsertReplaceEdit`
- TextEdit *Or_CompletionItem_textEdit `json:"textEdit,omitempty"`
- // The edit text used if the completion item is part of a CompletionList and
- // CompletionList defines an item default for the text edit range.
- //
- // Clients will only honor this property if they opt into completion list
- // item defaults using the capability `completionList.itemDefaults`.
- //
- // If not provided and a list's default range is provided the label
- // property is used as a text.
- //
- // @since 3.17.0
- TextEditText string `json:"textEditText,omitempty"`
- // An optional array of additional {@link TextEdit text edits} that are applied when
- // selecting this completion. Edits must not overlap (including the same insert position)
- // with the main {@link CompletionItem.textEdit edit} nor with themselves.
- //
- // Additional text edits should be used to change text unrelated to the current cursor position
- // (for example adding an import statement at the top of the file if the completion item will
- // insert an unqualified type).
- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"`
- // An optional set of characters that when pressed while this completion is active will accept it first and
- // then type that character. *Note* that all commit characters should have `length=1` and that superfluous
- // characters will be ignored.
- CommitCharacters []string `json:"commitCharacters,omitempty"`
- // An optional {@link Command command} that is executed *after* inserting this completion. *Note* that
- // additional modifications to the current document should be described with the
- // {@link CompletionItem.additionalTextEdits additionalTextEdits}-property.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a completion item between a
- // {@link CompletionRequest} and a {@link CompletionResolveRequest}.
- Data interface{} `json:"data,omitempty"`
-}
-
-// In many cases the items of an actual completion result share the same
-// value for properties like `commitCharacters` or the range of a text
-// edit. A completion list can therefore define item defaults which will
-// be used if a completion item itself doesn't specify the value.
-//
-// If a completion list specifies a default value and a completion item
-// also specifies a corresponding value the one from the item is used.
-//
-// Servers are only allowed to return default values if the client
-// signals support for this via the `completionList.itemDefaults`
-// capability.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemDefaults
-type CompletionItemDefaults struct {
- // A default commit character set.
- //
- // @since 3.17.0
- CommitCharacters []string `json:"commitCharacters,omitempty"`
- // A default edit range.
- //
- // @since 3.17.0
- EditRange *Or_CompletionItemDefaults_editRange `json:"editRange,omitempty"`
- // A default insert text format.
- //
- // @since 3.17.0
- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"`
- // A default insert text mode.
- //
- // @since 3.17.0
- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"`
- // A default data value.
- //
- // @since 3.17.0
- Data interface{} `json:"data,omitempty"`
-}
-
-// The kind of a completion entry.
-type CompletionItemKind uint32
-
-// Additional details for a completion item label.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemLabelDetails
-type CompletionItemLabelDetails struct {
- // An optional string which is rendered less prominently directly after {@link CompletionItem.label label},
- // without any spacing. Should be used for function signatures and type annotations.
- Detail string `json:"detail,omitempty"`
- // An optional string which is rendered less prominently after {@link CompletionItem.detail}. Should be used
- // for fully qualified names and file paths.
- Description string `json:"description,omitempty"`
-}
-
-// Completion item tags are extra annotations that tweak the rendering of a completion
-// item.
-//
-// @since 3.15.0
-type CompletionItemTag uint32
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemTagOptions
-type CompletionItemTagOptions struct {
- // The tags supported by the client.
- ValueSet []CompletionItemTag `json:"valueSet"`
-}
-
-// Represents a collection of {@link CompletionItem completion items} to be presented
-// in the editor.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionList
-type CompletionList struct {
- // This list it not complete. Further typing results in recomputing this list.
- //
- // Recomputed lists have all their items replaced (not appended) in the
- // incomplete completion sessions.
- IsIncomplete bool `json:"isIncomplete"`
- // In many cases the items of an actual completion result share the same
- // value for properties like `commitCharacters` or the range of a text
- // edit. A completion list can therefore define item defaults which will
- // be used if a completion item itself doesn't specify the value.
- //
- // If a completion list specifies a default value and a completion item
- // also specifies a corresponding value the one from the item is used.
- //
- // Servers are only allowed to return default values if the client
- // signals support for this via the `completionList.itemDefaults`
- // capability.
- //
- // @since 3.17.0
- ItemDefaults *CompletionItemDefaults `json:"itemDefaults,omitempty"`
- // The completion items.
- Items []CompletionItem `json:"items"`
-}
-
-// The client supports the following `CompletionList` specific
-// capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionListCapabilities
-type CompletionListCapabilities struct {
- // The client supports the following itemDefaults on
- // a completion list.
- //
- // The value lists the supported property names of the
- // `CompletionList.itemDefaults` object. If omitted
- // no properties are supported.
- //
- // @since 3.17.0
- ItemDefaults []string `json:"itemDefaults,omitempty"`
-}
-
-// Completion options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionOptions
-type CompletionOptions struct {
- // Most tools trigger completion request automatically without explicitly requesting
- // it using a keyboard shortcut (e.g. Ctrl+Space). Typically they do so when the user
- // starts to type an identifier. For example if the user types `c` in a JavaScript file
- // code complete will automatically pop up present `console` besides others as a
- // completion item. Characters that make up identifiers don't need to be listed here.
- //
- // If code complete should automatically be trigger on characters not being valid inside
- // an identifier (for example `.` in JavaScript) list them in `triggerCharacters`.
- TriggerCharacters []string `json:"triggerCharacters,omitempty"`
- // The list of all possible characters that commit a completion. This field can be used
- // if clients don't support individual commit characters per completion item. See
- // `ClientCapabilities.textDocument.completion.completionItem.commitCharactersSupport`
- //
- // If a server provides both `allCommitCharacters` and commit characters on an individual
- // completion item the ones on the completion item win.
- //
- // @since 3.2.0
- AllCommitCharacters []string `json:"allCommitCharacters,omitempty"`
- // The server provides support to resolve additional
- // information for a completion item.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- // The server supports the following `CompletionItem` specific
- // capabilities.
- //
- // @since 3.17.0
- CompletionItem *ServerCompletionItemOptions `json:"completionItem,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Completion parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionParams
-type CompletionParams struct {
- // The completion context. This is only available it the client specifies
- // to send this using the client capability `textDocument.completion.contextSupport === true`
- Context CompletionContext `json:"context,omitempty"`
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CompletionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionRegistrationOptions
-type CompletionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CompletionOptions
-}
-
-// How a completion was triggered
-type CompletionTriggerKind uint32
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationItem
-type ConfigurationItem struct {
- // The scope to get the configuration section for.
- ScopeURI *URI `json:"scopeUri,omitempty"`
- // The configuration section asked for.
- Section string `json:"section,omitempty"`
-}
-
-// The parameters of a configuration request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams
-type ConfigurationParams struct {
- Items []ConfigurationItem `json:"items"`
-}
-
-// Create file operation.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFile
-type CreateFile struct {
- // A create
- Kind string `json:"kind"`
- // The resource to create.
- URI DocumentURI `json:"uri"`
- // Additional options
- Options *CreateFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Options to create a file.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFileOptions
-type CreateFileOptions struct {
- // Overwrite existing file. Overwrite wins over `ignoreIfExists`
- Overwrite bool `json:"overwrite,omitempty"`
- // Ignore if exists.
- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated creation of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFilesParams
-type CreateFilesParams struct {
- // An array of all files/folders created in this operation.
- Files []FileCreate `json:"files"`
-}
-
-// The declaration of a symbol representation as one or many {@link Location locations}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declaration
-type (
- Declaration = Or_Declaration // (alias)
- // @since 3.14.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationClientCapabilities
- DeclarationClientCapabilities struct {
- // Whether declaration supports dynamic registration. If this is set to `true`
- // the client supports the new `DeclarationRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of declaration links.
- LinkSupport bool `json:"linkSupport,omitempty"`
- }
-)
-
-// Information about where a symbol is declared.
-//
-// Provides additional metadata over normal {@link Location location} declarations, including the range of
-// the declaring symbol.
-//
-// Servers should prefer returning `DeclarationLink` over `Declaration` if supported
-// by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationLink
-type (
- DeclarationLink = LocationLink // (alias)
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationOptions
- DeclarationOptions struct {
- WorkDoneProgressOptions
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationParams
-type DeclarationParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationRegistrationOptions
-type DeclarationRegistrationOptions struct {
- DeclarationOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// The definition of a symbol represented as one or many {@link Location locations}.
-// For most programming languages there is only one location at which a symbol is
-// defined.
-//
-// Servers should prefer returning `DefinitionLink` over `Definition` if supported
-// by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definition
-type (
- Definition = Or_Definition // (alias)
- // Client Capabilities for a {@link DefinitionRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionClientCapabilities
- DefinitionClientCapabilities struct {
- // Whether definition supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // @since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
- }
-)
-
-// Information about where a symbol is defined.
-//
-// Provides additional metadata over normal {@link Location location} definitions, including the range of
-// the defining symbol
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionLink
-type (
- DefinitionLink = LocationLink // (alias)
- // Server Capabilities for a {@link DefinitionRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionOptions
- DefinitionOptions struct {
- WorkDoneProgressOptions
- }
-)
-
-// Parameters for a {@link DefinitionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionParams
-type DefinitionParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DefinitionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionRegistrationOptions
-type DefinitionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DefinitionOptions
-}
-
-// Delete file operation
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFile
-type DeleteFile struct {
- // A delete
- Kind string `json:"kind"`
- // The file to delete.
- URI DocumentURI `json:"uri"`
- // Delete options.
- Options *DeleteFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Delete file options
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFileOptions
-type DeleteFileOptions struct {
- // Delete the content recursively if a folder is denoted.
- Recursive bool `json:"recursive,omitempty"`
- // Ignore the operation if the file doesn't exist.
- IgnoreIfNotExists bool `json:"ignoreIfNotExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated deletes of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFilesParams
-type DeleteFilesParams struct {
- // An array of all files/folders deleted in this operation.
- Files []FileDelete `json:"files"`
-}
-
-// Represents a diagnostic, such as a compiler error or warning. Diagnostic objects
-// are only valid in the scope of a resource.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnostic
-type Diagnostic struct {
- // The range at which the message applies
- Range Range `json:"range"`
- // The diagnostic's severity. To avoid interpretation mismatches when a
- // server is used with different clients it is highly recommended that servers
- // always provide a severity value.
- Severity DiagnosticSeverity `json:"severity,omitempty"`
- // The diagnostic's code, which usually appear in the user interface.
- Code interface{} `json:"code,omitempty"`
- // An optional property to describe the error code.
- // Requires the code field (above) to be present/not null.
- //
- // @since 3.16.0
- CodeDescription *CodeDescription `json:"codeDescription,omitempty"`
- // A human-readable string describing the source of this
- // diagnostic, e.g. 'typescript' or 'super lint'. It usually
- // appears in the user interface.
- Source string `json:"source,omitempty"`
- // The diagnostic's message. It usually appears in the user interface
- Message string `json:"message"`
- // Additional metadata about the diagnostic.
- //
- // @since 3.15.0
- Tags []DiagnosticTag `json:"tags,omitempty"`
- // An array of related diagnostic information, e.g. when symbol-names within
- // a scope collide all definitions can be marked via this property.
- RelatedInformation []DiagnosticRelatedInformation `json:"relatedInformation,omitempty"`
- // A data entry field that is preserved between a `textDocument/publishDiagnostics`
- // notification and `textDocument/codeAction` request.
- //
- // @since 3.16.0
- Data *json.RawMessage `json:"data,omitempty"`
-}
-
-// Client capabilities specific to diagnostic pull requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticClientCapabilities
-type DiagnosticClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the clients supports related documents for document diagnostic pulls.
- RelatedDocumentSupport bool `json:"relatedDocumentSupport,omitempty"`
- DiagnosticsCapabilities
-}
-
-// Diagnostic options.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticOptions
-type DiagnosticOptions struct {
- // An optional identifier under which the diagnostics are
- // managed by the client.
- Identifier string `json:"identifier,omitempty"`
- // Whether the language has inter file dependencies meaning that
- // editing code in one file can result in a different diagnostic
- // set in another file. Inter file dependencies are common for
- // most programming languages and typically uncommon for linters.
- InterFileDependencies bool `json:"interFileDependencies"`
- // The server provides support for workspace diagnostics as well.
- WorkspaceDiagnostics bool `json:"workspaceDiagnostics"`
- WorkDoneProgressOptions
-}
-
-// Diagnostic registration options.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRegistrationOptions
-type DiagnosticRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DiagnosticOptions
- StaticRegistrationOptions
-}
-
-// Represents a related message and source code location for a diagnostic. This should be
-// used to point to code locations that cause or related to a diagnostics, e.g when duplicating
-// a symbol in a scope.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRelatedInformation
-type DiagnosticRelatedInformation struct {
- // The location of this related diagnostic information.
- Location Location `json:"location"`
- // The message of this related diagnostic information.
- Message string `json:"message"`
-}
-
-// Cancellation data returned from a diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticServerCancellationData
-type DiagnosticServerCancellationData struct {
- RetriggerRequest bool `json:"retriggerRequest"`
-}
-
-// The diagnostic's severity.
-type DiagnosticSeverity uint32
-
-// The diagnostic tags.
-//
-// @since 3.15.0
-type DiagnosticTag uint32
-
-// Workspace client capabilities specific to diagnostic pull requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticWorkspaceClientCapabilities
-type DiagnosticWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // pulled diagnostics currently shown. It should be used with absolute care and
- // is useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// General diagnostics capabilities for pull and push model.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticsCapabilities
-type DiagnosticsCapabilities struct {
- // Whether the clients accepts diagnostics with related information.
- RelatedInformation bool `json:"relatedInformation,omitempty"`
- // Client supports the tag property to provide meta data about a diagnostic.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.15.0
- TagSupport *ClientDiagnosticsTagOptions `json:"tagSupport,omitempty"`
- // Client supports a codeDescription property
- //
- // @since 3.16.0
- CodeDescriptionSupport bool `json:"codeDescriptionSupport,omitempty"`
- // Whether code action supports the `data` property which is
- // preserved between a `textDocument/publishDiagnostics` and
- // `textDocument/codeAction` request.
- //
- // @since 3.16.0
- DataSupport bool `json:"dataSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationClientCapabilities
-type DidChangeConfigurationClientCapabilities struct {
- // Did change configuration notification supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The parameters of a change configuration notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationParams
-type DidChangeConfigurationParams struct {
- // The actual changed settings
- Settings interface{} `json:"settings"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationRegistrationOptions
-type DidChangeConfigurationRegistrationOptions struct {
- Section *Or_DidChangeConfigurationRegistrationOptions_section `json:"section,omitempty"`
-}
-
-// The params sent in a change notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeNotebookDocumentParams
-type DidChangeNotebookDocumentParams struct {
- // The notebook document that did change. The version number points
- // to the version after all provided changes have been applied. If
- // only the text document content of a cell changes the notebook version
- // doesn't necessarily have to change.
- NotebookDocument VersionedNotebookDocumentIdentifier `json:"notebookDocument"`
- // The actual changes to the notebook document.
- //
- // The changes describe single state changes to the notebook document.
- // So if there are two changes c1 (at array index 0) and c2 (at array
- // index 1) for a notebook in state S then c1 moves the notebook from
- // S to S' and c2 from S' to S''. So c1 is computed on the state S and
- // c2 is computed on the state S'.
- //
- // To mirror the content of a notebook using change events use the following approach:
- //
- // - start with the same initial content
- // - apply the 'notebookDocument/didChange' notifications in the order you receive them.
- // - apply the `NotebookChangeEvent`s in a single notification in the order
- // you receive them.
- Change NotebookDocumentChangeEvent `json:"change"`
-}
-
-// The change text document notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeTextDocumentParams
-type DidChangeTextDocumentParams struct {
- // The document that did change. The version number points
- // to the version after all provided content changes have
- // been applied.
- TextDocument VersionedTextDocumentIdentifier `json:"textDocument"`
- // The actual content changes. The content changes describe single state changes
- // to the document. So if there are two content changes c1 (at array index 0) and
- // c2 (at array index 1) for a document in state S then c1 moves the document from
- // S to S' and c2 from S' to S''. So c1 is computed on the state S and c2 is computed
- // on the state S'.
- //
- // To mirror the content of a document using change events use the following approach:
- //
- // - start with the same initial content
- // - apply the 'textDocument/didChange' notifications in the order you receive them.
- // - apply the `TextDocumentContentChangeEvent`s in a single notification in the order
- // you receive them.
- ContentChanges []TextDocumentContentChangeEvent `json:"contentChanges"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesClientCapabilities
-type DidChangeWatchedFilesClientCapabilities struct {
- // Did change watched files notification supports dynamic registration. Please note
- // that the current protocol doesn't support static configuration for file changes
- // from the server side.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client has support for {@link RelativePattern relative pattern}
- // or not.
- //
- // @since 3.17.0
- RelativePatternSupport bool `json:"relativePatternSupport,omitempty"`
-}
-
-// The watched files change notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesParams
-type DidChangeWatchedFilesParams struct {
- // The actual file events.
- Changes []FileEvent `json:"changes"`
-}
-
-// Describe options to be used when registered for text document change events.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesRegistrationOptions
-type DidChangeWatchedFilesRegistrationOptions struct {
- // The watchers to register.
- Watchers []FileSystemWatcher `json:"watchers"`
-}
-
-// The parameters of a `workspace/didChangeWorkspaceFolders` notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWorkspaceFoldersParams
-type DidChangeWorkspaceFoldersParams struct {
- // The actual workspace folder change event.
- Event WorkspaceFoldersChangeEvent `json:"event"`
-}
-
-// The params sent in a close notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseNotebookDocumentParams
-type DidCloseNotebookDocumentParams struct {
- // The notebook document that got closed.
- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
- // The text documents that represent the content
- // of a notebook cell that got closed.
- CellTextDocuments []TextDocumentIdentifier `json:"cellTextDocuments"`
-}
-
-// The parameters sent in a close text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseTextDocumentParams
-type DidCloseTextDocumentParams struct {
- // The document that was closed.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
-}
-
-// The params sent in an open notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenNotebookDocumentParams
-type DidOpenNotebookDocumentParams struct {
- // The notebook document that got opened.
- NotebookDocument NotebookDocument `json:"notebookDocument"`
- // The text documents that represent the content
- // of a notebook cell.
- CellTextDocuments []TextDocumentItem `json:"cellTextDocuments"`
-}
-
-// The parameters sent in an open text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenTextDocumentParams
-type DidOpenTextDocumentParams struct {
- // The document that was opened.
- TextDocument TextDocumentItem `json:"textDocument"`
-}
-
-// The params sent in a save notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveNotebookDocumentParams
-type DidSaveNotebookDocumentParams struct {
- // The notebook document that got saved.
- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
-}
-
-// The parameters sent in a save text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveTextDocumentParams
-type DidSaveTextDocumentParams struct {
- // The document that was saved.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // Optional the content when saved. Depends on the includeText value
- // when the save notification was requested.
- Text *string `json:"text,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorClientCapabilities
-type DocumentColorClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `DocumentColorRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorOptions
-type DocumentColorOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentColorRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorParams
-type DocumentColorParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorRegistrationOptions
-type DocumentColorRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentColorOptions
- StaticRegistrationOptions
-}
-
-// Parameters of the document diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticParams
-type DocumentDiagnosticParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The additional identifier provided during registration.
- Identifier string `json:"identifier,omitempty"`
- // The result id of a previous response if provided.
- PreviousResultID string `json:"previousResultId,omitempty"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The result of a document diagnostic pull request. A report can
-// either be a full report containing all diagnostics for the
-// requested document or an unchanged report indicating that nothing
-// has changed in terms of diagnostics in comparison to the last
-// pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReport
-type (
- DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias)
- // The document diagnostic report kinds.
- //
- // @since 3.17.0
- DocumentDiagnosticReportKind string
-)
-
-// A partial result for a document diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReportPartialResult
-type DocumentDiagnosticReportPartialResult struct {
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments"`
-}
-
-// A document filter describes a top level text document or
-// a notebook cell document.
-//
-// @since 3.17.0 - proposed support for NotebookCellTextDocumentFilter.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFilter
-type (
- DocumentFilter = Or_DocumentFilter // (alias)
- // Client capabilities of a {@link DocumentFormattingRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingClientCapabilities
- DocumentFormattingClientCapabilities struct {
- // Whether formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- }
-)
-
-// Provider options for a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingOptions
-type DocumentFormattingOptions struct {
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingParams
-type DocumentFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The format options.
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingRegistrationOptions
-type DocumentFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentFormattingOptions
-}
-
-// A document highlight is a range inside a text document which deserves
-// special attention. Usually a document highlight is visualized by changing
-// the background color of its range.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlight
-type DocumentHighlight struct {
- // The range this highlight applies to.
- Range Range `json:"range"`
- // The highlight kind, default is {@link DocumentHighlightKind.Text text}.
- Kind DocumentHighlightKind `json:"kind,omitempty"`
-}
-
-// Client Capabilities for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightClientCapabilities
-type DocumentHighlightClientCapabilities struct {
- // Whether document highlight supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// A document highlight kind.
-type DocumentHighlightKind uint32
-
-// Provider options for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightOptions
-type DocumentHighlightOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightParams
-type DocumentHighlightParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightRegistrationOptions
-type DocumentHighlightRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentHighlightOptions
-}
-
-// A document link is a range in a text document that links to an internal or external resource, like another
-// text document or a web site.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLink
-type DocumentLink struct {
- // The range this link applies to.
- Range Range `json:"range"`
- // The uri this link points to. If missing a resolve request is sent later.
- Target *URI `json:"target,omitempty"`
- // The tooltip text when you hover over this link.
- //
- // If a tooltip is provided, is will be displayed in a string that includes instructions on how to
- // trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS,
- // user settings, and localization.
- //
- // @since 3.15.0
- Tooltip string `json:"tooltip,omitempty"`
- // A data entry field that is preserved on a document link between a
- // DocumentLinkRequest and a DocumentLinkResolveRequest.
- Data interface{} `json:"data,omitempty"`
-}
-
-// The client capabilities of a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkClientCapabilities
-type DocumentLinkClientCapabilities struct {
- // Whether document link supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports the `tooltip` property on `DocumentLink`.
- //
- // @since 3.15.0
- TooltipSupport bool `json:"tooltipSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkOptions
-type DocumentLinkOptions struct {
- // Document links have a resolve provider as well.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkParams
-type DocumentLinkParams struct {
- // The document to provide document links for.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkRegistrationOptions
-type DocumentLinkRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentLinkOptions
-}
-
-// Client capabilities of a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingClientCapabilities
-type DocumentOnTypeFormattingClientCapabilities struct {
- // Whether on type formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Provider options for a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingOptions
-type DocumentOnTypeFormattingOptions struct {
- // A character on which formatting should be triggered, like `{`.
- FirstTriggerCharacter string `json:"firstTriggerCharacter"`
- // More trigger characters.
- MoreTriggerCharacter []string `json:"moreTriggerCharacter,omitempty"`
-}
-
-// The parameters of a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingParams
-type DocumentOnTypeFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position around which the on type formatting should happen.
- // This is not necessarily the exact position where the character denoted
- // by the property `ch` got typed.
- Position Position `json:"position"`
- // The character that has been typed that triggered the formatting
- // on type request. That is not necessarily the last character that
- // got inserted into the document since the client could auto insert
- // characters as well (e.g. like automatic brace completion).
- Ch string `json:"ch"`
- // The formatting options.
- Options FormattingOptions `json:"options"`
-}
-
-// Registration options for a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingRegistrationOptions
-type DocumentOnTypeFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentOnTypeFormattingOptions
-}
-
-// Client capabilities of a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingClientCapabilities
-type DocumentRangeFormattingClientCapabilities struct {
- // Whether range formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports formatting multiple ranges at once.
- //
- // @since 3.18.0
- // @proposed
- RangesSupport bool `json:"rangesSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingOptions
-type DocumentRangeFormattingOptions struct {
- // Whether the server supports formatting multiple ranges at once.
- //
- // @since 3.18.0
- // @proposed
- RangesSupport bool `json:"rangesSupport,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingParams
-type DocumentRangeFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range to format
- Range Range `json:"range"`
- // The format options
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingRegistrationOptions
-type DocumentRangeFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentRangeFormattingOptions
-}
-
-// The parameters of a {@link DocumentRangesFormattingRequest}.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangesFormattingParams
-type DocumentRangesFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The ranges to format
- Ranges []Range `json:"ranges"`
- // The format options
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// A document selector is the combination of one or many document filters.
-//
-// @sample `let sel:DocumentSelector = [{ language: 'typescript' }, { language: 'json', pattern: '**∕tsconfig.json' }]`;
-//
-// The use of a string as a document filter is deprecated @since 3.16.0.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSelector
-type (
- DocumentSelector = []DocumentFilter // (alias)
- // Represents programming constructs like variables, classes, interfaces etc.
- // that appear in a document. Document symbols can be hierarchical and they
- // have two ranges: one that encloses its definition and one that points to
- // its most interesting range, e.g. the range of an identifier.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbol
- DocumentSymbol struct {
- // The name of this symbol. Will be displayed in the user interface and therefore must not be
- // an empty string or a string only consisting of white spaces.
- Name string `json:"name"`
- // More detail for this symbol, e.g the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this document symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // Indicates if this symbol is deprecated.
- //
- // @deprecated Use tags instead
- Deprecated bool `json:"deprecated,omitempty"`
- // The range enclosing this symbol not including leading/trailing whitespace but everything else
- // like comments. This information is typically used to determine if the clients cursor is
- // inside the symbol to reveal in the symbol in the UI.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being picked, e.g the name of a function.
- // Must be contained by the `range`.
- SelectionRange Range `json:"selectionRange"`
- // Children of this symbol, e.g. properties of a class.
- Children []DocumentSymbol `json:"children,omitempty"`
- }
-)
-
-// Client Capabilities for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolClientCapabilities
-type DocumentSymbolClientCapabilities struct {
- // Whether document symbol supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Specific capabilities for the `SymbolKind` in the
- // `textDocument/documentSymbol` request.
- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"`
- // The client supports hierarchical document symbols.
- HierarchicalDocumentSymbolSupport bool `json:"hierarchicalDocumentSymbolSupport,omitempty"`
- // The client supports tags on `SymbolInformation`. Tags are supported on
- // `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.16.0
- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"`
- // The client supports an additional label presented in the UI when
- // registering a document symbol provider.
- //
- // @since 3.16.0
- LabelSupport bool `json:"labelSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolOptions
-type DocumentSymbolOptions struct {
- // A human-readable string that is shown when multiple outlines trees
- // are shown for the same document.
- //
- // @since 3.16.0
- Label string `json:"label,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolParams
-type DocumentSymbolParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolRegistrationOptions
-type DocumentSymbolRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentSymbolOptions
-}
-
-// Edit range variant that includes ranges for insert and replace operations.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#editRangeWithInsertReplace
-type EditRangeWithInsertReplace struct {
- Insert Range `json:"insert"`
- Replace Range `json:"replace"`
-}
-
-// Predefined error codes.
-type ErrorCodes int32
-
-// The client capabilities of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandClientCapabilities
-type ExecuteCommandClientCapabilities struct {
- // Execute command supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The server capabilities of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandOptions
-type ExecuteCommandOptions struct {
- // The commands to be executed on the server
- Commands []string `json:"commands"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandParams
-type ExecuteCommandParams struct {
- // The identifier of the actual command handler.
- Command string `json:"command"`
- // Arguments that the command should be invoked with.
- Arguments []json.RawMessage `json:"arguments,omitempty"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandRegistrationOptions
-type ExecuteCommandRegistrationOptions struct {
- ExecuteCommandOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executionSummary
-type ExecutionSummary struct {
- // A strict monotonically increasing value
- // indicating the execution order of a cell
- // inside a notebook.
- ExecutionOrder uint32 `json:"executionOrder"`
- // Whether the execution was successful or
- // not if known by the client.
- Success bool `json:"success,omitempty"`
-}
-type FailureHandlingKind string
-
-// The file event type
-type FileChangeType uint32
-
-// Represents information on a file/folder create.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileCreate
-type FileCreate struct {
- // A file:// URI for the location of the file/folder being created.
- URI string `json:"uri"`
-}
-
-// Represents information on a file/folder delete.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileDelete
-type FileDelete struct {
- // A file:// URI for the location of the file/folder being deleted.
- URI string `json:"uri"`
-}
-
-// An event describing a file change.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileEvent
-type FileEvent struct {
- // The file's uri.
- URI DocumentURI `json:"uri"`
- // The change type.
- Type FileChangeType `json:"type"`
-}
-
-// Capabilities relating to events from file operations by the user in the client.
-//
-// These events do not come from the file system, they come from user operations
-// like renaming a file in the UI.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationClientCapabilities
-type FileOperationClientCapabilities struct {
- // Whether the client supports dynamic registration for file requests/notifications.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client has support for sending didCreateFiles notifications.
- DidCreate bool `json:"didCreate,omitempty"`
- // The client has support for sending willCreateFiles requests.
- WillCreate bool `json:"willCreate,omitempty"`
- // The client has support for sending didRenameFiles notifications.
- DidRename bool `json:"didRename,omitempty"`
- // The client has support for sending willRenameFiles requests.
- WillRename bool `json:"willRename,omitempty"`
- // The client has support for sending didDeleteFiles notifications.
- DidDelete bool `json:"didDelete,omitempty"`
- // The client has support for sending willDeleteFiles requests.
- WillDelete bool `json:"willDelete,omitempty"`
-}
-
-// A filter to describe in which file operation requests or notifications
-// the server is interested in receiving.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationFilter
-type FileOperationFilter struct {
- // A Uri scheme like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // The actual file operation pattern.
- Pattern FileOperationPattern `json:"pattern"`
-}
-
-// Options for notifications/requests for user operations on files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationOptions
-type FileOperationOptions struct {
- // The server is interested in receiving didCreateFiles notifications.
- DidCreate *FileOperationRegistrationOptions `json:"didCreate,omitempty"`
- // The server is interested in receiving willCreateFiles requests.
- WillCreate *FileOperationRegistrationOptions `json:"willCreate,omitempty"`
- // The server is interested in receiving didRenameFiles notifications.
- DidRename *FileOperationRegistrationOptions `json:"didRename,omitempty"`
- // The server is interested in receiving willRenameFiles requests.
- WillRename *FileOperationRegistrationOptions `json:"willRename,omitempty"`
- // The server is interested in receiving didDeleteFiles file notifications.
- DidDelete *FileOperationRegistrationOptions `json:"didDelete,omitempty"`
- // The server is interested in receiving willDeleteFiles file requests.
- WillDelete *FileOperationRegistrationOptions `json:"willDelete,omitempty"`
-}
-
-// A pattern to describe in which file operation requests or notifications
-// the server is interested in receiving.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPattern
-type FileOperationPattern struct {
- // The glob pattern to match. Glob patterns can have the following syntax:
- //
- // - `*` to match one or more characters in a path segment
- // - `?` to match on one character in a path segment
- // - `**` to match any number of path segments, including none
- // - `{}` to group sub patterns into an OR expression. (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
- // - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
- // - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
- Glob string `json:"glob"`
- // Whether to match files or folders with this pattern.
- //
- // Matches both if undefined.
- Matches *FileOperationPatternKind `json:"matches,omitempty"`
- // Additional options used during matching.
- Options *FileOperationPatternOptions `json:"options,omitempty"`
-}
-
-// A pattern kind describing if a glob pattern matches a file a folder or
-// both.
-//
-// @since 3.16.0
-type FileOperationPatternKind string
-
-// Matching options for the file operation pattern.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPatternOptions
-type FileOperationPatternOptions struct {
- // The pattern should be matched ignoring casing.
- IgnoreCase bool `json:"ignoreCase,omitempty"`
-}
-
-// The options to register for file operations.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationRegistrationOptions
-type FileOperationRegistrationOptions struct {
- // The actual filters.
- Filters []FileOperationFilter `json:"filters"`
-}
-
-// Represents information on a file/folder rename.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileRename
-type FileRename struct {
- // A file:// URI for the original location of the file/folder being renamed.
- OldURI string `json:"oldUri"`
- // A file:// URI for the new location of the file/folder being renamed.
- NewURI string `json:"newUri"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileSystemWatcher
-type FileSystemWatcher struct {
- // The glob pattern to watch. See {@link GlobPattern glob pattern} for more detail.
- //
- // @since 3.17.0 support for relative patterns.
- GlobPattern GlobPattern `json:"globPattern"`
- // The kind of events of interest. If omitted it defaults
- // to WatchKind.Create | WatchKind.Change | WatchKind.Delete
- // which is 7.
- Kind *WatchKind `json:"kind,omitempty"`
-}
-
-// Represents a folding range. To be valid, start and end line must be bigger than zero and smaller
-// than the number of lines in the document. Clients are free to ignore invalid ranges.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRange
-type FoldingRange struct {
- // The zero-based start line of the range to fold. The folded area starts after the line's last character.
- // To be valid, the end must be zero or larger and smaller than the number of lines in the document.
- StartLine uint32 `json:"startLine"`
- // The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line.
- StartCharacter uint32 `json:"startCharacter,omitempty"`
- // The zero-based end line of the range to fold. The folded area ends with the line's last character.
- // To be valid, the end must be zero or larger and smaller than the number of lines in the document.
- EndLine uint32 `json:"endLine"`
- // The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line.
- EndCharacter uint32 `json:"endCharacter,omitempty"`
- // Describes the kind of the folding range such as 'comment' or 'region'. The kind
- // is used to categorize folding ranges and used by commands like 'Fold all comments'.
- // See {@link FoldingRangeKind} for an enumeration of standardized kinds.
- Kind string `json:"kind,omitempty"`
- // The text that the client should show when the specified range is
- // collapsed. If not defined or not supported by the client, a default
- // will be chosen by the client.
- //
- // @since 3.17.0
- CollapsedText string `json:"collapsedText,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeClientCapabilities
-type FoldingRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration for folding range
- // providers. If this is set to `true` the client supports the new
- // `FoldingRangeRegistrationOptions` return value for the corresponding
- // server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The maximum number of folding ranges that the client prefers to receive
- // per document. The value serves as a hint, servers are free to follow the
- // limit.
- RangeLimit uint32 `json:"rangeLimit,omitempty"`
- // If set, the client signals that it only supports folding complete lines.
- // If set, client will ignore specified `startCharacter` and `endCharacter`
- // properties in a FoldingRange.
- LineFoldingOnly bool `json:"lineFoldingOnly,omitempty"`
- // Specific options for the folding range kind.
- //
- // @since 3.17.0
- FoldingRangeKind *ClientFoldingRangeKindOptions `json:"foldingRangeKind,omitempty"`
- // Specific options for the folding range.
- //
- // @since 3.17.0
- FoldingRange *ClientFoldingRangeOptions `json:"foldingRange,omitempty"`
-}
-
-// A set of predefined range kinds.
-type FoldingRangeKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeOptions
-type FoldingRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link FoldingRangeRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeParams
-type FoldingRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeRegistrationOptions
-type FoldingRangeRegistrationOptions struct {
- TextDocumentRegistrationOptions
- FoldingRangeOptions
- StaticRegistrationOptions
-}
-
-// Client workspace capabilities specific to folding ranges
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeWorkspaceClientCapabilities
-type FoldingRangeWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // folding ranges currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- //
- // @since 3.18.0
- // @proposed
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Value-object describing what options formatting should use.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#formattingOptions
-type FormattingOptions struct {
- // Size of a tab in spaces.
- TabSize uint32 `json:"tabSize"`
- // Prefer spaces over tabs.
- InsertSpaces bool `json:"insertSpaces"`
- // Trim trailing whitespace on a line.
- //
- // @since 3.15.0
- TrimTrailingWhitespace bool `json:"trimTrailingWhitespace,omitempty"`
- // Insert a newline character at the end of the file if one does not exist.
- //
- // @since 3.15.0
- InsertFinalNewline bool `json:"insertFinalNewline,omitempty"`
- // Trim all newlines after the final newline at the end of the file.
- //
- // @since 3.15.0
- TrimFinalNewlines bool `json:"trimFinalNewlines,omitempty"`
-}
-
-// A diagnostic report with a full set of problems.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fullDocumentDiagnosticReport
-type FullDocumentDiagnosticReport struct {
- // A full document diagnostic report.
- Kind string `json:"kind"`
- // An optional result id. If provided it will
- // be sent on the next diagnostic request for the
- // same document.
- ResultID string `json:"resultId,omitempty"`
- // The actual items.
- Items []Diagnostic `json:"items"`
-}
-
-// General client capabilities.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#generalClientCapabilities
-type GeneralClientCapabilities struct {
- // Client capability that signals how the client
- // handles stale requests (e.g. a request
- // for which the client will not process the response
- // anymore since the information is outdated).
- //
- // @since 3.17.0
- StaleRequestSupport *StaleRequestSupportOptions `json:"staleRequestSupport,omitempty"`
- // Client capabilities specific to regular expressions.
- //
- // @since 3.16.0
- RegularExpressions *RegularExpressionsClientCapabilities `json:"regularExpressions,omitempty"`
- // Client capabilities specific to the client's markdown parser.
- //
- // @since 3.16.0
- Markdown *MarkdownClientCapabilities `json:"markdown,omitempty"`
- // The position encodings supported by the client. Client and server
- // have to agree on the same position encoding to ensure that offsets
- // (e.g. character position in a line) are interpreted the same on both
- // sides.
- //
- // To keep the protocol backwards compatible the following applies: if
- // the value 'utf-16' is missing from the array of position encodings
- // servers can assume that the client supports UTF-16. UTF-16 is
- // therefore a mandatory encoding.
- //
- // If omitted it defaults to ['utf-16'].
- //
- // Implementation considerations: since the conversion from one encoding
- // into another requires the content of the file / line the conversion
- // is best done where the file is read which is usually on the server
- // side.
- //
- // @since 3.17.0
- PositionEncodings []PositionEncodingKind `json:"positionEncodings,omitempty"`
-}
-
-// The glob pattern. Either a string pattern or a relative pattern.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#globPattern
-type (
- GlobPattern = Or_GlobPattern // (alias)
- // The result of a hover request.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hover
- Hover struct {
- // The hover's content
- Contents MarkupContent `json:"contents"`
- // An optional range inside the text document that is used to
- // visualize the hover, e.g. by changing the background color.
- Range Range `json:"range,omitempty"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverClientCapabilities
-type HoverClientCapabilities struct {
- // Whether hover supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Client supports the following content formats for the content
- // property. The order describes the preferred format of the client.
- ContentFormat []MarkupKind `json:"contentFormat,omitempty"`
-}
-
-// Hover options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverOptions
-type HoverOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link HoverRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverParams
-type HoverParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link HoverRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverRegistrationOptions
-type HoverRegistrationOptions struct {
- TextDocumentRegistrationOptions
- HoverOptions
-}
-
-// @since 3.6.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationClientCapabilities
-type ImplementationClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `ImplementationRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // @since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationOptions
-type ImplementationOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationParams
-type ImplementationParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationRegistrationOptions
-type ImplementationRegistrationOptions struct {
- TextDocumentRegistrationOptions
- ImplementationOptions
- StaticRegistrationOptions
-}
-
-// The data type of the ResponseError if the
-// initialize request fails.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeError
-type InitializeError struct {
- // Indicates whether the client execute the following retry logic:
- // (1) show the message provided by the ResponseError to the user
- // (2) user selects retry or cancel
- // (3) if user selected retry the initialize method is sent again.
- Retry bool `json:"retry"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams
-type InitializeParams struct {
- XInitializeParams
- WorkspaceFoldersInitializeParams
-}
-
-// The result returned from an initialize request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeResult
-type InitializeResult struct {
- // The capabilities the language server provides.
- Capabilities ServerCapabilities `json:"capabilities"`
- // Information about the server.
- //
- // @since 3.15.0
- ServerInfo *ServerInfo `json:"serverInfo,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializedParams
-type InitializedParams struct{}
-
-// Inlay hint information.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHint
-type InlayHint struct {
- // The position of this hint.
- //
- // If multiple hints have the same position, they will be shown in the order
- // they appear in the response.
- Position Position `json:"position"`
- // The label of this hint. A human readable string or an array of
- // InlayHintLabelPart label parts.
- //
- // *Note* that neither the string nor the label part can be empty.
- Label []InlayHintLabelPart `json:"label"`
- // The kind of this hint. Can be omitted in which case the client
- // should fall back to a reasonable default.
- Kind InlayHintKind `json:"kind,omitempty"`
- // Optional text edits that are performed when accepting this inlay hint.
- //
- // *Note* that edits are expected to change the document so that the inlay
- // hint (or its nearest variant) is now part of the document and the inlay
- // hint itself is now obsolete.
- TextEdits []TextEdit `json:"textEdits,omitempty"`
- // The tooltip text when you hover over this item.
- Tooltip *Or_InlayHint_tooltip `json:"tooltip,omitempty"`
- // Render padding before the hint.
- //
- // Note: Padding should use the editor's background color, not the
- // background color of the hint itself. That means padding can be used
- // to visually align/separate an inlay hint.
- PaddingLeft bool `json:"paddingLeft,omitempty"`
- // Render padding after the hint.
- //
- // Note: Padding should use the editor's background color, not the
- // background color of the hint itself. That means padding can be used
- // to visually align/separate an inlay hint.
- PaddingRight bool `json:"paddingRight,omitempty"`
- // A data entry field that is preserved on an inlay hint between
- // a `textDocument/inlayHint` and a `inlayHint/resolve` request.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Inlay hint client capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintClientCapabilities
-type InlayHintClientCapabilities struct {
- // Whether inlay hints support dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Indicates which properties a client can resolve lazily on an inlay
- // hint.
- ResolveSupport *ClientInlayHintResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Inlay hint kinds.
-//
-// @since 3.17.0
-type InlayHintKind uint32
-
-// An inlay hint label part allows for interactive and composite labels
-// of inlay hints.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintLabelPart
-type InlayHintLabelPart struct {
- // The value of this label part.
- Value string `json:"value"`
- // The tooltip text when you hover over this label part. Depending on
- // the client capability `inlayHint.resolveSupport` clients might resolve
- // this property late using the resolve request.
- Tooltip *Or_InlayHintLabelPart_tooltip `json:"tooltip,omitempty"`
- // An optional source code location that represents this
- // label part.
- //
- // The editor will use this location for the hover and for code navigation
- // features: This part will become a clickable link that resolves to the
- // definition of the symbol at the given location (not necessarily the
- // location itself), it shows the hover that shows at the given location,
- // and it shows a context menu with further code navigation commands.
- //
- // Depending on the client capability `inlayHint.resolveSupport` clients
- // might resolve this property late using the resolve request.
- Location *Location `json:"location,omitempty"`
- // An optional command for this label part.
- //
- // Depending on the client capability `inlayHint.resolveSupport` clients
- // might resolve this property late using the resolve request.
- Command *Command `json:"command,omitempty"`
-}
-
-// Inlay hint options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintOptions
-type InlayHintOptions struct {
- // The server provides support to resolve additional
- // information for an inlay hint item.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inlay hint requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintParams
-type InlayHintParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The document range for which inlay hints should be computed.
- Range Range `json:"range"`
- WorkDoneProgressParams
-}
-
-// Inlay hint options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintRegistrationOptions
-type InlayHintRegistrationOptions struct {
- InlayHintOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Client workspace capabilities specific to inlay hints.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintWorkspaceClientCapabilities
-type InlayHintWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // inlay hints currently shown. It should be used with absolute care and
- // is useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Client capabilities specific to inline completions.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionClientCapabilities
-type InlineCompletionClientCapabilities struct {
- // Whether implementation supports dynamic registration for inline completion providers.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Provides information about the context in which an inline completion was requested.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionContext
-type InlineCompletionContext struct {
- // Describes how the inline completion was triggered.
- TriggerKind InlineCompletionTriggerKind `json:"triggerKind"`
- // Provides information about the currently selected item in the autocomplete widget if it is visible.
- SelectedCompletionInfo *SelectedCompletionInfo `json:"selectedCompletionInfo,omitempty"`
-}
-
-// An inline completion item represents a text snippet that is proposed inline to complete text that is being typed.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionItem
-type InlineCompletionItem struct {
- // The text to replace the range with. Must be set.
- InsertText Or_InlineCompletionItem_insertText `json:"insertText"`
- // A text that is used to decide if this inline completion should be shown. When `falsy` the {@link InlineCompletionItem.insertText} is used.
- FilterText string `json:"filterText,omitempty"`
- // The range to replace. Must begin and end on the same line.
- Range *Range `json:"range,omitempty"`
- // An optional {@link Command} that is executed *after* inserting this completion.
- Command *Command `json:"command,omitempty"`
-}
-
-// Represents a collection of {@link InlineCompletionItem inline completion items} to be presented in the editor.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionList
-type InlineCompletionList struct {
- // The inline completion items
- Items []InlineCompletionItem `json:"items"`
-}
-
-// Inline completion options used during static registration.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionOptions
-type InlineCompletionOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inline completion requests.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionParams
-type InlineCompletionParams struct {
- // Additional information about the context in which inline completions were
- // requested.
- Context InlineCompletionContext `json:"context"`
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Inline completion options used during static or dynamic registration.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionRegistrationOptions
-type InlineCompletionRegistrationOptions struct {
- InlineCompletionOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.
-//
-// @since 3.18.0
-// @proposed
-type InlineCompletionTriggerKind uint32
-
-// Inline value information can be provided by different means:
-//
-// - directly as a text value (class InlineValueText).
-// - as a name to use for a variable lookup (class InlineValueVariableLookup)
-// - as an evaluatable expression (class InlineValueEvaluatableExpression)
-//
-// The InlineValue types combines all inline value types into one type.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValue
-type (
- InlineValue = Or_InlineValue // (alias)
- // Client capabilities specific to inline values.
- //
- // @since 3.17.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueClientCapabilities
- InlineValueClientCapabilities struct {
- // Whether implementation supports dynamic registration for inline value providers.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- }
-)
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueContext
-type InlineValueContext struct {
- // The stack frame (as a DAP Id) where the execution has stopped.
- FrameID int32 `json:"frameId"`
- // The document range where execution has stopped.
- // Typically the end position of the range denotes the line where the inline values are shown.
- StoppedLocation Range `json:"stoppedLocation"`
-}
-
-// Provide an inline value through an expression evaluation.
-// If only a range is specified, the expression will be extracted from the underlying document.
-// An optional expression can be used to override the extracted expression.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueEvaluatableExpression
-type InlineValueEvaluatableExpression struct {
- // The document range for which the inline value applies.
- // The range is used to extract the evaluatable expression from the underlying document.
- Range Range `json:"range"`
- // If specified the expression overrides the extracted expression.
- Expression string `json:"expression,omitempty"`
-}
-
-// Inline value options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueOptions
-type InlineValueOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inline value requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueParams
-type InlineValueParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The document range for which inline values should be computed.
- Range Range `json:"range"`
- // Additional information about the context in which inline values were
- // requested.
- Context InlineValueContext `json:"context"`
- WorkDoneProgressParams
-}
-
-// Inline value options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueRegistrationOptions
-type InlineValueRegistrationOptions struct {
- InlineValueOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Provide inline value as text.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueText
-type InlineValueText struct {
- // The document range for which the inline value applies.
- Range Range `json:"range"`
- // The text of the inline value.
- Text string `json:"text"`
-}
-
-// Provide inline value through a variable lookup.
-// If only a range is specified, the variable name will be extracted from the underlying document.
-// An optional variable name can be used to override the extracted name.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueVariableLookup
-type InlineValueVariableLookup struct {
- // The document range for which the inline value applies.
- // The range is used to extract the variable name from the underlying document.
- Range Range `json:"range"`
- // If specified the name of the variable to look up.
- VariableName string `json:"variableName,omitempty"`
- // How to perform the lookup.
- CaseSensitiveLookup bool `json:"caseSensitiveLookup"`
-}
-
-// Client workspace capabilities specific to inline values.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueWorkspaceClientCapabilities
-type InlineValueWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // inline values currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// A special text edit to provide an insert and a replace operation.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#insertReplaceEdit
-type InsertReplaceEdit struct {
- // The string to be inserted.
- NewText string `json:"newText"`
- // The range if the insert is requested
- Insert Range `json:"insert"`
- // The range if the replace is requested.
- Replace Range `json:"replace"`
-}
-
-// Defines whether the insert text in a completion item should be interpreted as
-// plain text or a snippet.
-type InsertTextFormat uint32
-
-// How whitespace and indentation is handled during completion
-// item insertion.
-//
-// @since 3.16.0
-type (
- InsertTextMode uint32
- LSPAny = interface{}
-)
-
-// LSP arrays.
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPArray
-type (
- LSPArray = []interface{} // (alias)
- LSPErrorCodes int32
-)
-
-// LSP object definition.
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPObject
-type (
- LSPObject = map[string]LSPAny // (alias)
- // Predefined Language kinds
- // @since 3.18.0
- // @proposed
- LanguageKind string
-)
-
-// Client capabilities for the linked editing range request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeClientCapabilities
-type LinkedEditingRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeOptions
-type LinkedEditingRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeParams
-type LinkedEditingRangeParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeRegistrationOptions
-type LinkedEditingRangeRegistrationOptions struct {
- TextDocumentRegistrationOptions
- LinkedEditingRangeOptions
- StaticRegistrationOptions
-}
-
-// The result of a linked editing range request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRanges
-type LinkedEditingRanges struct {
- // A list of ranges that can be edited together. The ranges must have
- // identical length and contain identical text content. The ranges cannot overlap.
- Ranges []Range `json:"ranges"`
- // An optional word pattern (regular expression) that describes valid contents for
- // the given ranges. If no pattern is provided, the client configuration's word
- // pattern will be used.
- WordPattern string `json:"wordPattern,omitempty"`
-}
-
-// created for Literal (Lit_ClientSemanticTokensRequestOptions_range_Item1)
-type Lit_ClientSemanticTokensRequestOptions_range_Item1 struct{}
-
-// created for Literal (Lit_SemanticTokensOptions_range_Item1)
-type Lit_SemanticTokensOptions_range_Item1 struct{}
-
-// Represents a location inside a resource, such as a line
-// inside a text file.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#location
-type Location struct {
- URI DocumentURI `json:"uri"`
- Range Range `json:"range"`
-}
-
-// Represents the connection of two locations. Provides additional metadata over normal {@link Location locations},
-// including an origin range.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationLink
-type LocationLink struct {
- // Span of the origin of this link.
- //
- // Used as the underlined span for mouse interaction. Defaults to the word range at
- // the definition position.
- OriginSelectionRange *Range `json:"originSelectionRange,omitempty"`
- // The target resource identifier of this link.
- TargetURI DocumentURI `json:"targetUri"`
- // The full target range of this link. If the target for example is a symbol then target range is the
- // range enclosing this symbol not including leading/trailing whitespace but everything else
- // like comments. This information is typically used to highlight the range in the editor.
- TargetRange Range `json:"targetRange"`
- // The range that should be selected and revealed when this link is being followed, e.g the name of a function.
- // Must be contained by the `targetRange`. See also `DocumentSymbol#range`
- TargetSelectionRange Range `json:"targetSelectionRange"`
-}
-
-// Location with only uri and does not include range.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationUriOnly
-type LocationUriOnly struct {
- URI DocumentURI `json:"uri"`
-}
-
-// The log message parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logMessageParams
-type LogMessageParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logTraceParams
-type LogTraceParams struct {
- Message string `json:"message"`
- Verbose string `json:"verbose,omitempty"`
-}
-
-// Client capabilities specific to the used markdown parser.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markdownClientCapabilities
-type MarkdownClientCapabilities struct {
- // The name of the parser.
- Parser string `json:"parser"`
- // The version of the parser.
- Version string `json:"version,omitempty"`
- // A list of HTML tags that the client allows / supports in
- // Markdown.
- //
- // @since 3.17.0
- AllowedTags []string `json:"allowedTags,omitempty"`
-}
-
-// MarkedString can be used to render human readable text. It is either a markdown string
-// or a code-block that provides a language and a code snippet. The language identifier
-// is semantically equal to the optional language identifier in fenced code blocks in GitHub
-// issues. See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
-//
-// The pair of a language and a value is an equivalent to markdown:
-// ```${language}
-// ${value}
-// ```
-//
-// Note that markdown strings will be sanitized - that means html will be escaped.
-// @deprecated use MarkupContent instead.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedString
-type (
- MarkedString = Or_MarkedString // (alias)
- // @since 3.18.0
- // @deprecated use MarkupContent instead.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedStringWithLanguage
- MarkedStringWithLanguage struct {
- Language string `json:"language"`
- Value string `json:"value"`
- }
-)
-
-// A `MarkupContent` literal represents a string value which content is interpreted base on its
-// kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds.
-//
-// If the kind is `markdown` then the value can contain fenced code blocks like in GitHub issues.
-// See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
-//
-// Here is an example how such a string can be constructed using JavaScript / TypeScript:
-// ```ts
-//
-// let markdown: MarkdownContent = {
-// kind: MarkupKind.Markdown,
-// value: [
-// '# Header',
-// 'Some text',
-// '```typescript',
-// 'someCode();',
-// '```'
-// ].join('\n')
-// };
-//
-// ```
-//
-// *Please Note* that clients might sanitize the return markdown. A client could decide to
-// remove HTML from the markdown to avoid script execution.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markupContent
-type MarkupContent struct {
- // The type of the Markup
- Kind MarkupKind `json:"kind"`
- // The content itself
- Value string `json:"value"`
-}
-
-// Describes the content type that a client supports in various
-// result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
-//
-// Please note that `MarkupKinds` must not start with a `$`. This kinds
-// are reserved for internal usage.
-type MarkupKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#messageActionItem
-type MessageActionItem struct {
- // A short title like 'Retry', 'Open Log' etc.
- Title string `json:"title"`
-}
-
-// The message type
-type MessageType uint32
-
-// Moniker definition to match LSIF 0.5 moniker definition.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#moniker
-type Moniker struct {
- // The scheme of the moniker. For example tsc or .Net
- Scheme string `json:"scheme"`
- // The identifier of the moniker. The value is opaque in LSIF however
- // schema owners are allowed to define the structure if they want.
- Identifier string `json:"identifier"`
- // The scope in which the moniker is unique
- Unique UniquenessLevel `json:"unique"`
- // The moniker kind if known.
- Kind *MonikerKind `json:"kind,omitempty"`
-}
-
-// Client capabilities specific to the moniker request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerClientCapabilities
-type MonikerClientCapabilities struct {
- // Whether moniker supports dynamic registration. If this is set to `true`
- // the client supports the new `MonikerRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The moniker kind.
-//
-// @since 3.16.0
-type MonikerKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerOptions
-type MonikerOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerParams
-type MonikerParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerRegistrationOptions
-type MonikerRegistrationOptions struct {
- TextDocumentRegistrationOptions
- MonikerOptions
-}
-
-// A notebook cell.
-//
-// A cell's document URI must be unique across ALL notebook
-// cells and can therefore be used to uniquely identify a
-// notebook cell or the cell's text document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCell
-type NotebookCell struct {
- // The cell's kind
- Kind NotebookCellKind `json:"kind"`
- // The URI of the cell's text document
- // content.
- Document DocumentURI `json:"document"`
- // Additional metadata stored with the cell.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // Additional execution summary information
- // if supported by the client.
- ExecutionSummary *ExecutionSummary `json:"executionSummary,omitempty"`
-}
-
-// A change describing how to move a `NotebookCell`
-// array from state S to S'.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellArrayChange
-type NotebookCellArrayChange struct {
- // The start oftest of the cell that changed.
- Start uint32 `json:"start"`
- // The deleted cells
- DeleteCount uint32 `json:"deleteCount"`
- // The new cells, if any
- Cells []NotebookCell `json:"cells,omitempty"`
-}
-
-// A notebook cell kind.
-//
-// @since 3.17.0
-type NotebookCellKind uint32
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellLanguage
-type NotebookCellLanguage struct {
- Language string `json:"language"`
-}
-
-// A notebook cell text document filter denotes a cell text
-// document by different properties.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellTextDocumentFilter
-type NotebookCellTextDocumentFilter struct {
- // A filter that matches against the notebook
- // containing the notebook cell. If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook Or_NotebookCellTextDocumentFilter_notebook `json:"notebook"`
- // A language id like `python`.
- //
- // Will be matched against the language id of the
- // notebook cell document. '*' matches every language.
- Language string `json:"language,omitempty"`
-}
-
-// A notebook document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument
-type NotebookDocument struct {
- // The notebook document's uri.
- URI URI `json:"uri"`
- // The type of the notebook.
- NotebookType string `json:"notebookType"`
- // The version number of this document (it will increase after each
- // change, including undo/redo).
- Version int32 `json:"version"`
- // Additional metadata stored with the notebook
- // document.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // The cells of a notebook.
- Cells []NotebookCell `json:"cells"`
-}
-
-// Structural changes to cells in a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChangeStructure
-type NotebookDocumentCellChangeStructure struct {
- // The change to the cell array.
- Array NotebookCellArrayChange `json:"array"`
- // Additional opened cell text documents.
- DidOpen []TextDocumentItem `json:"didOpen,omitempty"`
- // Additional closed cell text documents.
- DidClose []TextDocumentIdentifier `json:"didClose,omitempty"`
-}
-
-// Cell changes to a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChanges
-type NotebookDocumentCellChanges struct {
- // Changes to the cell structure to add or
- // remove cells.
- Structure *NotebookDocumentCellChangeStructure `json:"structure,omitempty"`
- // Changes to notebook cells properties like its
- // kind, execution summary or metadata.
- Data []NotebookCell `json:"data,omitempty"`
- // Changes to the text content of notebook cells.
- TextContent []NotebookDocumentCellContentChanges `json:"textContent,omitempty"`
-}
-
-// Content changes to a cell in a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellContentChanges
-type NotebookDocumentCellContentChanges struct {
- Document VersionedTextDocumentIdentifier `json:"document"`
- Changes []TextDocumentContentChangeEvent `json:"changes"`
-}
-
-// A change event for a notebook document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentChangeEvent
-type NotebookDocumentChangeEvent struct {
- // The changed meta data if any.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // Changes to cells
- Cells *NotebookDocumentCellChanges `json:"cells,omitempty"`
-}
-
-// Capabilities specific to the notebook document support.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentClientCapabilities
-type NotebookDocumentClientCapabilities struct {
- // Capabilities specific to notebook document synchronization
- //
- // @since 3.17.0
- Synchronization NotebookDocumentSyncClientCapabilities `json:"synchronization"`
-}
-
-// A notebook document filter denotes a notebook document by
-// different properties. The properties will be match
-// against the notebook's URI (same as with documents)
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilter
-type (
- NotebookDocumentFilter = Or_NotebookDocumentFilter // (alias)
- // A notebook document filter where `notebookType` is required field.
- //
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterNotebookType
- NotebookDocumentFilterNotebookType struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern.
- Pattern *GlobPattern `json:"pattern,omitempty"`
- }
-)
-
-// A notebook document filter where `pattern` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterPattern
-type NotebookDocumentFilterPattern struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern.
- Pattern GlobPattern `json:"pattern"`
-}
-
-// A notebook document filter where `scheme` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterScheme
-type NotebookDocumentFilterScheme struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme"`
- // A glob pattern.
- Pattern *GlobPattern `json:"pattern,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithCells
-type NotebookDocumentFilterWithCells struct {
- // The notebook to be synced If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook *Or_NotebookDocumentFilterWithCells_notebook `json:"notebook,omitempty"`
- // The cells of the matching notebook to be synced.
- Cells []NotebookCellLanguage `json:"cells"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithNotebook
-type NotebookDocumentFilterWithNotebook struct {
- // The notebook to be synced If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook Or_NotebookDocumentFilterWithNotebook_notebook `json:"notebook"`
- // The cells of the matching notebook to be synced.
- Cells []NotebookCellLanguage `json:"cells,omitempty"`
-}
-
-// A literal to identify a notebook document in the client.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentIdentifier
-type NotebookDocumentIdentifier struct {
- // The notebook document's uri.
- URI URI `json:"uri"`
-}
-
-// Notebook specific client capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncClientCapabilities
-type NotebookDocumentSyncClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is
- // set to `true` the client supports the new
- // `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports sending execution summary data per cell.
- ExecutionSummarySupport bool `json:"executionSummarySupport,omitempty"`
-}
-
-// Options specific to a notebook plus its cells
-// to be synced to the server.
-//
-// If a selector provides a notebook document
-// filter but no cell selector all cells of a
-// matching notebook document will be synced.
-//
-// If a selector provides no notebook document
-// filter but only a cell selector all notebook
-// document that contain at least one matching
-// cell will be synced.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncOptions
-type NotebookDocumentSyncOptions struct {
- // The notebooks to be synced
- NotebookSelector []Or_NotebookDocumentSyncOptions_notebookSelector_Elem `json:"notebookSelector"`
- // Whether save notification should be forwarded to
- // the server. Will only be honored if mode === `notebook`.
- Save bool `json:"save,omitempty"`
-}
-
-// Registration options specific to a notebook.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncRegistrationOptions
-type NotebookDocumentSyncRegistrationOptions struct {
- NotebookDocumentSyncOptions
- StaticRegistrationOptions
-}
-
-// A text document identifier to optionally denote a specific version of a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#optionalVersionedTextDocumentIdentifier
-type OptionalVersionedTextDocumentIdentifier struct {
- // The version number of this document. If a versioned text document identifier
- // is sent from the server to the client and the file is not open in the editor
- // (the server has not received an open notification before) the server can send
- // `null` to indicate that the version is unknown and the content on disk is the
- // truth (as specified with document content ownership).
- Version int32 `json:"version"`
- TextDocumentIdentifier
-}
-
-// created for Or [int32 string]
-type Or_CancelParams_id struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ClientSemanticTokensRequestFullDelta bool]
-type Or_ClientSemanticTokensRequestOptions_full struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]
-type Or_ClientSemanticTokensRequestOptions_range struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [EditRangeWithInsertReplace Range]
-type Or_CompletionItemDefaults_editRange struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_CompletionItem_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InsertReplaceEdit TextEdit]
-type Or_CompletionItem_textEdit struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location []Location]
-type Or_Declaration struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location []Location]
-type Or_Definition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [int32 string]
-type Or_Diagnostic_code struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]string string]
-type Or_DidChangeConfigurationRegistrationOptions_section struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]
-type Or_DocumentDiagnosticReport struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookCellTextDocumentFilter TextDocumentFilter]
-type Or_DocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Pattern RelativePattern]
-type Or_GlobPattern struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkedString MarkupContent []MarkedString]
-type Or_Hover_contents struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_InlayHintLabelPart_tooltip struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]InlayHintLabelPart string]
-type Or_InlayHint_label struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_InlayHint_tooltip struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [StringValue string]
-type Or_InlineCompletionItem_insertText struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]
-type Or_InlineValue struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [LSPArray LSPObject bool float64 int32 string uint32]
-type Or_LSPAny struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkedStringWithLanguage string]
-type Or_MarkedString struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookCellTextDocumentFilter_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]
-type Or_NotebookDocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookDocumentFilterWithCells_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookDocumentFilterWithNotebook_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]
-type Or_NotebookDocumentSyncOptions_notebookSelector_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_ParameterInformation_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Tuple_ParameterInformation_label_Item1 string]
-type Or_ParameterInformation_label struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]
-type Or_PrepareRenameResult struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [int32 string]
-type Or_ProgressToken struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [URI WorkspaceFolder]
-type Or_RelativePattern_baseUri struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CodeAction Command]
-type Or_Result_textDocument_codeAction_Item0_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CompletionList []CompletionItem]
-type Or_Result_textDocument_completion struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Declaration []DeclarationLink]
-type Or_Result_textDocument_declaration struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_definition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]DocumentSymbol []SymbolInformation]
-type Or_Result_textDocument_documentSymbol struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_implementation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineCompletionList []InlineCompletionItem]
-type Or_Result_textDocument_inlineCompletion struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokens SemanticTokensDelta]
-type Or_Result_textDocument_semanticTokens_full_delta struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_typeDefinition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]SymbolInformation []WorkspaceSymbol]
-type Or_Result_workspace_symbol struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokensFullDelta bool]
-type Or_SemanticTokensOptions_full struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Lit_SemanticTokensOptions_range_Item1 bool]
-type Or_SemanticTokensOptions_range struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CallHierarchyOptions CallHierarchyRegistrationOptions bool]
-type Or_ServerCapabilities_callHierarchyProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CodeActionOptions bool]
-type Or_ServerCapabilities_codeActionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentColorOptions DocumentColorRegistrationOptions bool]
-type Or_ServerCapabilities_colorProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DeclarationOptions DeclarationRegistrationOptions bool]
-type Or_ServerCapabilities_declarationProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DefinitionOptions bool]
-type Or_ServerCapabilities_definitionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DiagnosticOptions DiagnosticRegistrationOptions]
-type Or_ServerCapabilities_diagnosticProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentFormattingOptions bool]
-type Or_ServerCapabilities_documentFormattingProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentHighlightOptions bool]
-type Or_ServerCapabilities_documentHighlightProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentRangeFormattingOptions bool]
-type Or_ServerCapabilities_documentRangeFormattingProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentSymbolOptions bool]
-type Or_ServerCapabilities_documentSymbolProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FoldingRangeOptions FoldingRangeRegistrationOptions bool]
-type Or_ServerCapabilities_foldingRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [HoverOptions bool]
-type Or_ServerCapabilities_hoverProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ImplementationOptions ImplementationRegistrationOptions bool]
-type Or_ServerCapabilities_implementationProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlayHintOptions InlayHintRegistrationOptions bool]
-type Or_ServerCapabilities_inlayHintProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineCompletionOptions bool]
-type Or_ServerCapabilities_inlineCompletionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineValueOptions InlineValueRegistrationOptions bool]
-type Or_ServerCapabilities_inlineValueProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]
-type Or_ServerCapabilities_linkedEditingRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MonikerOptions MonikerRegistrationOptions bool]
-type Or_ServerCapabilities_monikerProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]
-type Or_ServerCapabilities_notebookDocumentSync struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ReferenceOptions bool]
-type Or_ServerCapabilities_referencesProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [RenameOptions bool]
-type Or_ServerCapabilities_renameProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool]
-type Or_ServerCapabilities_selectionRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions]
-type Or_ServerCapabilities_semanticTokensProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentSyncKind TextDocumentSyncOptions]
-type Or_ServerCapabilities_textDocumentSync struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]
-type Or_ServerCapabilities_typeDefinitionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]
-type Or_ServerCapabilities_typeHierarchyProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [WorkspaceSymbolOptions bool]
-type Or_ServerCapabilities_workspaceSymbolProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_SignatureInformation_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]
-type Or_TextDocumentContentChangeEvent struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [AnnotatedTextEdit SnippetTextEdit TextEdit]
-type Or_TextDocumentEdit_edits_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]
-type Or_TextDocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SaveOptions bool]
-type Or_TextDocumentSyncOptions_save struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]
-type Or_WorkspaceDocumentDiagnosticReport struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit]
-type Or_WorkspaceEdit_documentChanges_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [bool string]
-type Or_WorkspaceFoldersServerCapabilities_changeNotifications struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentContentOptions TextDocumentContentRegistrationOptions]
-type Or_WorkspaceOptions_textDocumentContent struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location LocationUriOnly]
-type Or_WorkspaceSymbol_location struct {
- Value interface{} `json:"value"`
-}
-
-// The parameters of a configuration request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams
-type ParamConfiguration struct {
- Items []ConfigurationItem `json:"items"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams
-type ParamInitialize struct {
- XInitializeParams
- WorkspaceFoldersInitializeParams
-}
-
-// Represents a parameter of a callable-signature. A parameter can
-// have a label and a doc-comment.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#parameterInformation
-type ParameterInformation struct {
- // The label of this parameter information.
- //
- // Either a string or an inclusive start and exclusive end offsets within its containing
- // signature label. (see SignatureInformation.label). The offsets are based on a UTF-16
- // string representation as `Position` and `Range` does.
- //
- // To avoid ambiguities a server should use the [start, end] offset value instead of using
- // a substring. Whether a client support this is controlled via `labelOffsetSupport` client
- // capability.
- //
- // *Note*: a label of type string should be a substring of its containing signature label.
- // Its intended use case is to highlight the parameter label part in the `SignatureInformation.label`.
- Label Or_ParameterInformation_label `json:"label"`
- // The human-readable doc-comment of this parameter. Will be shown
- // in the UI but can be omitted.
- Documentation *Or_ParameterInformation_documentation `json:"documentation,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#partialResultParams
-type PartialResultParams struct {
- // An optional token that a server can use to report partial results (e.g. streaming) to
- // the client.
- PartialResultToken *ProgressToken `json:"partialResultToken,omitempty"`
-}
-
-// The glob pattern to watch relative to the base path. Glob patterns can have the following syntax:
-//
-// - `*` to match one or more characters in a path segment
-// - `?` to match on one character in a path segment
-// - `**` to match any number of path segments, including none
-// - `{}` to group conditions (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#pattern
-type (
- Pattern = string // (alias)
- // Position in a text document expressed as zero-based line and character
- // offset. Prior to 3.17 the offsets were always based on a UTF-16 string
- // representation. So a string of the form `a𐐀b` the character offset of the
- // character `a` is 0, the character offset of `𐐀` is 1 and the character
- // offset of b is 3 since `𐐀` is represented using two code units in UTF-16.
- // Since 3.17 clients and servers can agree on a different string encoding
- // representation (e.g. UTF-8). The client announces it's supported encoding
- // via the client capability [`general.positionEncodings`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#clientCapabilities).
- // The value is an array of position encodings the client supports, with
- // decreasing preference (e.g. the encoding at index `0` is the most preferred
- // one). To stay backwards compatible the only mandatory encoding is UTF-16
- // represented via the string `utf-16`. The server can pick one of the
- // encodings offered by the client and signals that encoding back to the
- // client via the initialize result's property
- // [`capabilities.positionEncoding`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#serverCapabilities). If the string value
- // `utf-16` is missing from the client's capability `general.positionEncodings`
- // servers can safely assume that the client supports UTF-16. If the server
- // omits the position encoding in its initialize result the encoding defaults
- // to the string value `utf-16`. Implementation considerations: since the
- // conversion from one encoding into another requires the content of the
- // file / line the conversion is best done where the file is read which is
- // usually on the server side.
- //
- // Positions are line end character agnostic. So you can not specify a position
- // that denotes `\r|\n` or `\n|` where `|` represents the character offset.
- //
- // @since 3.17.0 - support for negotiated position encoding.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#position
- Position struct {
- // Line position in a document (zero-based).
- //
- // If a line number is greater than the number of lines in a document, it defaults back to the number of lines in the document.
- // If a line number is negative, it defaults to 0.
- Line uint32 `json:"line"`
- // Character offset on a line in a document (zero-based).
- //
- // The meaning of this offset is determined by the negotiated
- // `PositionEncodingKind`.
- //
- // If the character value is greater than the line length it defaults back to the
- // line length.
- Character uint32 `json:"character"`
- }
-)
-
-// A set of predefined position encoding kinds.
-//
-// @since 3.17.0
-type PositionEncodingKind string
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameDefaultBehavior
-type PrepareRenameDefaultBehavior struct {
- DefaultBehavior bool `json:"defaultBehavior"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameParams
-type PrepareRenameParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenamePlaceholder
-type PrepareRenamePlaceholder struct {
- Range Range `json:"range"`
- Placeholder string `json:"placeholder"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameResult
-type (
- PrepareRenameResult = Or_PrepareRenameResult // (alias)
- PrepareSupportDefaultBehavior uint32
-)
-
-// A previous result id in a workspace pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId
-type PreviousResultID struct {
- // The URI for which the client knowns a
- // result id.
- URI DocumentURI `json:"uri"`
- // The value of the previous result id.
- Value string `json:"value"`
-}
-
-// A previous result id in a workspace pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId
-type PreviousResultId struct {
- // The URI for which the client knowns a
- // result id.
- URI DocumentURI `json:"uri"`
- // The value of the previous result id.
- Value string `json:"value"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressParams
-type ProgressParams struct {
- // The progress token provided by the client or server.
- Token ProgressToken `json:"token"`
- // The progress data.
- Value interface{} `json:"value"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressToken
-type (
- ProgressToken = Or_ProgressToken // (alias)
- // The publish diagnostic client capabilities.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsClientCapabilities
- PublishDiagnosticsClientCapabilities struct {
- // Whether the client interprets the version property of the
- // `textDocument/publishDiagnostics` notification's parameter.
- //
- // @since 3.15.0
- VersionSupport bool `json:"versionSupport,omitempty"`
- DiagnosticsCapabilities
- }
-)
-
-// The publish diagnostic notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsParams
-type PublishDiagnosticsParams struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // Optional the version number of the document the diagnostics are published for.
- //
- // @since 3.15.0
- Version int32 `json:"version,omitempty"`
- // An array of diagnostic information items.
- Diagnostics []Diagnostic `json:"diagnostics"`
-}
-
-// A range in a text document expressed as (zero-based) start and end positions.
-//
-// If you want to specify a range that contains a line including the line ending
-// character(s) then use an end position denoting the start of the next line.
-// For example:
-// ```ts
-//
-// {
-// start: { line: 5, character: 23 }
-// end : { line 6, character : 0 }
-// }
-//
-// ```
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#range
-type Range struct {
- // The range's start position.
- Start Position `json:"start"`
- // The range's end position.
- End Position `json:"end"`
-}
-
-// Client Capabilities for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceClientCapabilities
-type ReferenceClientCapabilities struct {
- // Whether references supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Value-object that contains additional information when
-// requesting references.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceContext
-type ReferenceContext struct {
- // Include the declaration of the current symbol.
- IncludeDeclaration bool `json:"includeDeclaration"`
-}
-
-// Reference options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceOptions
-type ReferenceOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceParams
-type ReferenceParams struct {
- Context ReferenceContext `json:"context"`
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceRegistrationOptions
-type ReferenceRegistrationOptions struct {
- TextDocumentRegistrationOptions
- ReferenceOptions
-}
-
-// General parameters to register for a notification or to register a provider.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registration
-type Registration struct {
- // The id used to register the request. The id can be used to deregister
- // the request again.
- ID string `json:"id"`
- // The method / capability to register for.
- Method string `json:"method"`
- // Options necessary for the registration.
- RegisterOptions interface{} `json:"registerOptions,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registrationParams
-type RegistrationParams struct {
- Registrations []Registration `json:"registrations"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionEngineKind
-type (
- RegularExpressionEngineKind = string // (alias)
- // Client capabilities specific to regular expressions.
- //
- // @since 3.16.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionsClientCapabilities
- RegularExpressionsClientCapabilities struct {
- // The engine's name.
- Engine RegularExpressionEngineKind `json:"engine"`
- // The engine's version.
- Version string `json:"version,omitempty"`
- }
-)
-
-// A full diagnostic report with a set of related documents.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedFullDocumentDiagnosticReport
-type RelatedFullDocumentDiagnosticReport struct {
- // Diagnostics of related documents. This information is useful
- // in programming languages where code in a file A can generate
- // diagnostics in a file B which A depends on. An example of
- // such a language is C/C++ where marco definitions in a file
- // a.cpp and result in errors in a header file b.hpp.
- //
- // @since 3.17.0
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"`
- FullDocumentDiagnosticReport
-}
-
-// An unchanged diagnostic report with a set of related documents.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedUnchangedDocumentDiagnosticReport
-type RelatedUnchangedDocumentDiagnosticReport struct {
- // Diagnostics of related documents. This information is useful
- // in programming languages where code in a file A can generate
- // diagnostics in a file B which A depends on. An example of
- // such a language is C/C++ where marco definitions in a file
- // a.cpp and result in errors in a header file b.hpp.
- //
- // @since 3.17.0
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"`
- UnchangedDocumentDiagnosticReport
-}
-
-// A relative pattern is a helper to construct glob patterns that are matched
-// relatively to a base URI. The common value for a `baseUri` is a workspace
-// folder root, but it can be another absolute URI as well.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relativePattern
-type RelativePattern struct {
- // A workspace folder or a base URI to which this pattern will be matched
- // against relatively.
- BaseURI Or_RelativePattern_baseUri `json:"baseUri"`
- // The actual glob pattern;
- Pattern Pattern `json:"pattern"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameClientCapabilities
-type RenameClientCapabilities struct {
- // Whether rename supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Client supports testing for validity of rename operations
- // before execution.
- //
- // @since 3.12.0
- PrepareSupport bool `json:"prepareSupport,omitempty"`
- // Client supports the default behavior result.
- //
- // The value indicates the default behavior used by the
- // client.
- //
- // @since 3.16.0
- PrepareSupportDefaultBehavior *PrepareSupportDefaultBehavior `json:"prepareSupportDefaultBehavior,omitempty"`
- // Whether the client honors the change annotations in
- // text edits and resource operations returned via the
- // rename request's workspace edit by for example presenting
- // the workspace edit in the user interface and asking
- // for confirmation.
- //
- // @since 3.16.0
- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"`
-}
-
-// Rename file operation
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFile
-type RenameFile struct {
- // A rename
- Kind string `json:"kind"`
- // The old (existing) location.
- OldURI DocumentURI `json:"oldUri"`
- // The new location.
- NewURI DocumentURI `json:"newUri"`
- // Rename options.
- Options *RenameFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Rename file options
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFileOptions
-type RenameFileOptions struct {
- // Overwrite target if existing. Overwrite wins over `ignoreIfExists`
- Overwrite bool `json:"overwrite,omitempty"`
- // Ignores if target exists.
- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated renames of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFilesParams
-type RenameFilesParams struct {
- // An array of all files/folders renamed in this operation. When a folder is renamed, only
- // the folder will be included, and not its children.
- Files []FileRename `json:"files"`
-}
-
-// Provider options for a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameOptions
-type RenameOptions struct {
- // Renames should be checked and tested before being executed.
- //
- // @since version 3.12.0
- PrepareProvider bool `json:"prepareProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameParams
-type RenameParams struct {
- // The document to rename.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position at which this request was sent.
- Position Position `json:"position"`
- // The new name of the symbol. If the given name is not valid the
- // request must return a {@link ResponseError} with an
- // appropriate message set.
- NewName string `json:"newName"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameRegistrationOptions
-type RenameRegistrationOptions struct {
- TextDocumentRegistrationOptions
- RenameOptions
-}
-
-// A generic resource operation.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#resourceOperation
-type ResourceOperation struct {
- // The resource operation kind.
- Kind string `json:"kind"`
- // An optional annotation identifier describing the operation.
- //
- // @since 3.16.0
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
-}
-type ResourceOperationKind string
-
-// Save options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#saveOptions
-type SaveOptions struct {
- // The client is supposed to include the content on save.
- IncludeText bool `json:"includeText,omitempty"`
-}
-
-// Describes the currently selected completion item.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectedCompletionInfo
-type SelectedCompletionInfo struct {
- // The range that will be replaced if this completion item is accepted.
- Range Range `json:"range"`
- // The text the range will be replaced with if this completion is accepted.
- Text string `json:"text"`
-}
-
-// A selection range represents a part of a selection hierarchy. A selection range
-// may have a parent selection range that contains it.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRange
-type SelectionRange struct {
- // The {@link Range range} of this selection range.
- Range Range `json:"range"`
- // The parent selection range containing this range. Therefore `parent.range` must contain `this.range`.
- Parent *SelectionRange `json:"parent,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeClientCapabilities
-type SelectionRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration for selection range providers. If this is set to `true`
- // the client supports the new `SelectionRangeRegistrationOptions` return value for the corresponding server
- // capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeOptions
-type SelectionRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in selection range requests.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeParams
-type SelectionRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The positions inside the text document.
- Positions []Position `json:"positions"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeRegistrationOptions
-type SelectionRangeRegistrationOptions struct {
- SelectionRangeOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// A set of predefined token modifiers. This set is not fixed
-// an clients can specify additional token types via the
-// corresponding client capabilities.
-//
-// @since 3.16.0
-type SemanticTokenModifiers string
-
-// A set of predefined token types. This set is not fixed
-// an clients can specify additional token types via the
-// corresponding client capabilities.
-//
-// @since 3.16.0
-type SemanticTokenTypes string
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokens
-type SemanticTokens struct {
- // An optional result id. If provided and clients support delta updating
- // the client will include the result id in the next semantic token request.
- // A server can then instead of computing all semantic tokens again simply
- // send a delta.
- ResultID string `json:"resultId,omitempty"`
- // The actual tokens.
- Data []uint32 `json:"data"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensClientCapabilities
-type SemanticTokensClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Which requests the client supports and might send to the server
- // depending on the server's capability. Please note that clients might not
- // show semantic tokens or degrade some of the user experience if a range
- // or full request is advertised by the client but not provided by the
- // server. If for example the client capability `requests.full` and
- // `request.range` are both set to true but the server only provides a
- // range provider the client might not render a minimap correctly or might
- // even decide to not show any semantic tokens at all.
- Requests ClientSemanticTokensRequestOptions `json:"requests"`
- // The token types that the client supports.
- TokenTypes []string `json:"tokenTypes"`
- // The token modifiers that the client supports.
- TokenModifiers []string `json:"tokenModifiers"`
- // The token formats the clients supports.
- Formats []TokenFormat `json:"formats"`
- // Whether the client supports tokens that can overlap each other.
- OverlappingTokenSupport bool `json:"overlappingTokenSupport,omitempty"`
- // Whether the client supports tokens that can span multiple lines.
- MultilineTokenSupport bool `json:"multilineTokenSupport,omitempty"`
- // Whether the client allows the server to actively cancel a
- // semantic token request, e.g. supports returning
- // LSPErrorCodes.ServerCancelled. If a server does the client
- // needs to retrigger the request.
- //
- // @since 3.17.0
- ServerCancelSupport bool `json:"serverCancelSupport,omitempty"`
- // Whether the client uses semantic tokens to augment existing
- // syntax tokens. If set to `true` client side created syntax
- // tokens and semantic tokens are both used for colorization. If
- // set to `false` the client only uses the returned semantic tokens
- // for colorization.
- //
- // If the value is `undefined` then the client behavior is not
- // specified.
- //
- // @since 3.17.0
- AugmentsSyntaxTokens bool `json:"augmentsSyntaxTokens,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDelta
-type SemanticTokensDelta struct {
- ResultID string `json:"resultId,omitempty"`
- // The semantic token edits to transform a previous result into a new result.
- Edits []SemanticTokensEdit `json:"edits"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaParams
-type SemanticTokensDeltaParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The result id of a previous response. The result Id can either point to a full response
- // or a delta response depending on what was received last.
- PreviousResultID string `json:"previousResultId"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaPartialResult
-type SemanticTokensDeltaPartialResult struct {
- Edits []SemanticTokensEdit `json:"edits"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensEdit
-type SemanticTokensEdit struct {
- // The start offset of the edit.
- Start uint32 `json:"start"`
- // The count of elements to remove.
- DeleteCount uint32 `json:"deleteCount"`
- // The elements to insert.
- Data []uint32 `json:"data,omitempty"`
-}
-
-// Semantic tokens options to support deltas for full documents
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensFullDelta
-type SemanticTokensFullDelta struct {
- // The server supports deltas for full documents.
- Delta bool `json:"delta,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensLegend
-type SemanticTokensLegend struct {
- // The token types a server uses.
- TokenTypes []string `json:"tokenTypes"`
- // The token modifiers a server uses.
- TokenModifiers []string `json:"tokenModifiers"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensOptions
-type SemanticTokensOptions struct {
- // The legend used by the server
- Legend SemanticTokensLegend `json:"legend"`
- // Server supports providing semantic tokens for a specific range
- // of a document.
- Range *Or_SemanticTokensOptions_range `json:"range,omitempty"`
- // Server supports providing semantic tokens for a full document.
- Full *Or_SemanticTokensOptions_full `json:"full,omitempty"`
- WorkDoneProgressOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensParams
-type SemanticTokensParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensPartialResult
-type SemanticTokensPartialResult struct {
- Data []uint32 `json:"data"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRangeParams
-type SemanticTokensRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range the semantic tokens are requested for.
- Range Range `json:"range"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRegistrationOptions
-type SemanticTokensRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SemanticTokensOptions
- StaticRegistrationOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensWorkspaceClientCapabilities
-type SemanticTokensWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // semantic tokens currently shown. It should be used with absolute care
- // and is useful for situation where a server for example detects a project
- // wide change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Defines the capabilities provided by a language
-// server.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCapabilities
-type ServerCapabilities struct {
- // The position encoding the server picked from the encodings offered
- // by the client via the client capability `general.positionEncodings`.
- //
- // If the client didn't provide any position encodings the only valid
- // value that a server can return is 'utf-16'.
- //
- // If omitted it defaults to 'utf-16'.
- //
- // @since 3.17.0
- PositionEncoding *PositionEncodingKind `json:"positionEncoding,omitempty"`
- // Defines how text documents are synced. Is either a detailed structure
- // defining each notification or for backwards compatibility the
- // TextDocumentSyncKind number.
- TextDocumentSync interface{} `json:"textDocumentSync,omitempty"`
- // Defines how notebook documents are synced.
- //
- // @since 3.17.0
- NotebookDocumentSync *Or_ServerCapabilities_notebookDocumentSync `json:"notebookDocumentSync,omitempty"`
- // The server provides completion support.
- CompletionProvider *CompletionOptions `json:"completionProvider,omitempty"`
- // The server provides hover support.
- HoverProvider *Or_ServerCapabilities_hoverProvider `json:"hoverProvider,omitempty"`
- // The server provides signature help support.
- SignatureHelpProvider *SignatureHelpOptions `json:"signatureHelpProvider,omitempty"`
- // The server provides Goto Declaration support.
- DeclarationProvider *Or_ServerCapabilities_declarationProvider `json:"declarationProvider,omitempty"`
- // The server provides goto definition support.
- DefinitionProvider *Or_ServerCapabilities_definitionProvider `json:"definitionProvider,omitempty"`
- // The server provides Goto Type Definition support.
- TypeDefinitionProvider *Or_ServerCapabilities_typeDefinitionProvider `json:"typeDefinitionProvider,omitempty"`
- // The server provides Goto Implementation support.
- ImplementationProvider *Or_ServerCapabilities_implementationProvider `json:"implementationProvider,omitempty"`
- // The server provides find references support.
- ReferencesProvider *Or_ServerCapabilities_referencesProvider `json:"referencesProvider,omitempty"`
- // The server provides document highlight support.
- DocumentHighlightProvider *Or_ServerCapabilities_documentHighlightProvider `json:"documentHighlightProvider,omitempty"`
- // The server provides document symbol support.
- DocumentSymbolProvider *Or_ServerCapabilities_documentSymbolProvider `json:"documentSymbolProvider,omitempty"`
- // The server provides code actions. CodeActionOptions may only be
- // specified if the client states that it supports
- // `codeActionLiteralSupport` in its initial `initialize` request.
- CodeActionProvider interface{} `json:"codeActionProvider,omitempty"`
- // The server provides code lens.
- CodeLensProvider *CodeLensOptions `json:"codeLensProvider,omitempty"`
- // The server provides document link support.
- DocumentLinkProvider *DocumentLinkOptions `json:"documentLinkProvider,omitempty"`
- // The server provides color provider support.
- ColorProvider *Or_ServerCapabilities_colorProvider `json:"colorProvider,omitempty"`
- // The server provides workspace symbol support.
- WorkspaceSymbolProvider *Or_ServerCapabilities_workspaceSymbolProvider `json:"workspaceSymbolProvider,omitempty"`
- // The server provides document formatting.
- DocumentFormattingProvider *Or_ServerCapabilities_documentFormattingProvider `json:"documentFormattingProvider,omitempty"`
- // The server provides document range formatting.
- DocumentRangeFormattingProvider *Or_ServerCapabilities_documentRangeFormattingProvider `json:"documentRangeFormattingProvider,omitempty"`
- // The server provides document formatting on typing.
- DocumentOnTypeFormattingProvider *DocumentOnTypeFormattingOptions `json:"documentOnTypeFormattingProvider,omitempty"`
- // The server provides rename support. RenameOptions may only be
- // specified if the client states that it supports
- // `prepareSupport` in its initial `initialize` request.
- RenameProvider interface{} `json:"renameProvider,omitempty"`
- // The server provides folding provider support.
- FoldingRangeProvider *Or_ServerCapabilities_foldingRangeProvider `json:"foldingRangeProvider,omitempty"`
- // The server provides selection range support.
- SelectionRangeProvider *Or_ServerCapabilities_selectionRangeProvider `json:"selectionRangeProvider,omitempty"`
- // The server provides execute command support.
- ExecuteCommandProvider *ExecuteCommandOptions `json:"executeCommandProvider,omitempty"`
- // The server provides call hierarchy support.
- //
- // @since 3.16.0
- CallHierarchyProvider *Or_ServerCapabilities_callHierarchyProvider `json:"callHierarchyProvider,omitempty"`
- // The server provides linked editing range support.
- //
- // @since 3.16.0
- LinkedEditingRangeProvider *Or_ServerCapabilities_linkedEditingRangeProvider `json:"linkedEditingRangeProvider,omitempty"`
- // The server provides semantic tokens support.
- //
- // @since 3.16.0
- SemanticTokensProvider interface{} `json:"semanticTokensProvider,omitempty"`
- // The server provides moniker support.
- //
- // @since 3.16.0
- MonikerProvider *Or_ServerCapabilities_monikerProvider `json:"monikerProvider,omitempty"`
- // The server provides type hierarchy support.
- //
- // @since 3.17.0
- TypeHierarchyProvider *Or_ServerCapabilities_typeHierarchyProvider `json:"typeHierarchyProvider,omitempty"`
- // The server provides inline values.
- //
- // @since 3.17.0
- InlineValueProvider *Or_ServerCapabilities_inlineValueProvider `json:"inlineValueProvider,omitempty"`
- // The server provides inlay hints.
- //
- // @since 3.17.0
- InlayHintProvider interface{} `json:"inlayHintProvider,omitempty"`
- // The server has support for pull model diagnostics.
- //
- // @since 3.17.0
- DiagnosticProvider *Or_ServerCapabilities_diagnosticProvider `json:"diagnosticProvider,omitempty"`
- // Inline completion options used during static registration.
- //
- // @since 3.18.0
- // @proposed
- InlineCompletionProvider *Or_ServerCapabilities_inlineCompletionProvider `json:"inlineCompletionProvider,omitempty"`
- // Workspace specific server capabilities.
- Workspace *WorkspaceOptions `json:"workspace,omitempty"`
- // Experimental server capabilities.
- Experimental interface{} `json:"experimental,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCompletionItemOptions
-type ServerCompletionItemOptions struct {
- // The server has support for completion item label
- // details (see also `CompletionItemLabelDetails`) when
- // receiving a completion item in a resolve call.
- //
- // @since 3.17.0
- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"`
-}
-
-// Information about the server
-//
-// @since 3.15.0
-// @since 3.18.0 ServerInfo type name added.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverInfo
-type ServerInfo struct {
- // The name of the server as defined by the server.
- Name string `json:"name"`
- // The server's version as defined by the server.
- Version string `json:"version,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#setTraceParams
-type SetTraceParams struct {
- Value TraceValue `json:"value"`
-}
-
-// Client capabilities for the showDocument request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentClientCapabilities
-type ShowDocumentClientCapabilities struct {
- // The client has support for the showDocument
- // request.
- Support bool `json:"support"`
-}
-
-// Params to show a resource in the UI.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentParams
-type ShowDocumentParams struct {
- // The uri to show.
- URI URI `json:"uri"`
- // Indicates to show the resource in an external program.
- // To show, for example, `https://code.visualstudio.com/`
- // in the default WEB browser set `external` to `true`.
- External bool `json:"external,omitempty"`
- // An optional property to indicate whether the editor
- // showing the document should take focus or not.
- // Clients might ignore this property if an external
- // program is started.
- TakeFocus bool `json:"takeFocus,omitempty"`
- // An optional selection range if the document is a text
- // document. Clients might ignore the property if an
- // external program is started or the file is not a text
- // file.
- Selection *Range `json:"selection,omitempty"`
-}
-
-// The result of a showDocument request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentResult
-type ShowDocumentResult struct {
- // A boolean indicating if the show was successful.
- Success bool `json:"success"`
-}
-
-// The parameters of a notification message.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageParams
-type ShowMessageParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
-}
-
-// Show message request client capabilities
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestClientCapabilities
-type ShowMessageRequestClientCapabilities struct {
- // Capabilities specific to the `MessageActionItem` type.
- MessageActionItem *ClientShowMessageActionItemOptions `json:"messageActionItem,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestParams
-type ShowMessageRequestParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
- // The message action items to present.
- Actions []MessageActionItem `json:"actions,omitempty"`
-}
-
-// Signature help represents the signature of something
-// callable. There can be multiple signature but only one
-// active and only one active parameter.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelp
-type SignatureHelp struct {
- // One or more signatures.
- Signatures []SignatureInformation `json:"signatures"`
- // The active signature. If omitted or the value lies outside the
- // range of `signatures` the value defaults to zero or is ignored if
- // the `SignatureHelp` has no signatures.
- //
- // Whenever possible implementors should make an active decision about
- // the active signature and shouldn't rely on a default value.
- //
- // In future version of the protocol this property might become
- // mandatory to better express this.
- ActiveSignature uint32 `json:"activeSignature,omitempty"`
- // The active parameter of the active signature.
- //
- // If `null`, no parameter of the signature is active (for example a named
- // argument that does not match any declared parameters). This is only valid
- // if the client specifies the client capability
- // `textDocument.signatureHelp.noActiveParameterSupport === true`
- //
- // If omitted or the value lies outside the range of
- // `signatures[activeSignature].parameters` defaults to 0 if the active
- // signature has parameters.
- //
- // If the active signature has no parameters it is ignored.
- //
- // In future version of the protocol this property might become
- // mandatory (but still nullable) to better express the active parameter if
- // the active signature does have any.
- ActiveParameter uint32 `json:"activeParameter,omitempty"`
-}
-
-// Client Capabilities for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpClientCapabilities
-type SignatureHelpClientCapabilities struct {
- // Whether signature help supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports the following `SignatureInformation`
- // specific properties.
- SignatureInformation *ClientSignatureInformationOptions `json:"signatureInformation,omitempty"`
- // The client supports to send additional context information for a
- // `textDocument/signatureHelp` request. A client that opts into
- // contextSupport will also support the `retriggerCharacters` on
- // `SignatureHelpOptions`.
- //
- // @since 3.15.0
- ContextSupport bool `json:"contextSupport,omitempty"`
-}
-
-// Additional information about the context in which a signature help request was triggered.
-//
-// @since 3.15.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpContext
-type SignatureHelpContext struct {
- // Action that caused signature help to be triggered.
- TriggerKind SignatureHelpTriggerKind `json:"triggerKind"`
- // Character that caused signature help to be triggered.
- //
- // This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter`
- TriggerCharacter string `json:"triggerCharacter,omitempty"`
- // `true` if signature help was already showing when it was triggered.
- //
- // Retriggers occurs when the signature help is already active and can be caused by actions such as
- // typing a trigger character, a cursor move, or document content changes.
- IsRetrigger bool `json:"isRetrigger"`
- // The currently active `SignatureHelp`.
- //
- // The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on
- // the user navigating through available signatures.
- ActiveSignatureHelp *SignatureHelp `json:"activeSignatureHelp,omitempty"`
-}
-
-// Server Capabilities for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpOptions
-type SignatureHelpOptions struct {
- // List of characters that trigger signature help automatically.
- TriggerCharacters []string `json:"triggerCharacters,omitempty"`
- // List of characters that re-trigger signature help.
- //
- // These trigger characters are only active when signature help is already showing. All trigger characters
- // are also counted as re-trigger characters.
- //
- // @since 3.15.0
- RetriggerCharacters []string `json:"retriggerCharacters,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpParams
-type SignatureHelpParams struct {
- // The signature help context. This is only available if the client specifies
- // to send this using the client capability `textDocument.signatureHelp.contextSupport === true`
- //
- // @since 3.15.0
- Context *SignatureHelpContext `json:"context,omitempty"`
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpRegistrationOptions
-type SignatureHelpRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SignatureHelpOptions
-}
-
-// How a signature help was triggered.
-//
-// @since 3.15.0
-type SignatureHelpTriggerKind uint32
-
-// Represents the signature of something callable. A signature
-// can have a label, like a function-name, a doc-comment, and
-// a set of parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureInformation
-type SignatureInformation struct {
- // The label of this signature. Will be shown in
- // the UI.
- Label string `json:"label"`
- // The human-readable doc-comment of this signature. Will be shown
- // in the UI but can be omitted.
- Documentation *Or_SignatureInformation_documentation `json:"documentation,omitempty"`
- // The parameters of this signature.
- Parameters []ParameterInformation `json:"parameters,omitempty"`
- // The index of the active parameter.
- //
- // If `null`, no parameter of the signature is active (for example a named
- // argument that does not match any declared parameters). This is only valid
- // if the client specifies the client capability
- // `textDocument.signatureHelp.noActiveParameterSupport === true`
- //
- // If provided (or `null`), this is used in place of
- // `SignatureHelp.activeParameter`.
- //
- // @since 3.16.0
- ActiveParameter uint32 `json:"activeParameter,omitempty"`
-}
-
-// An interactive text edit.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#snippetTextEdit
-type SnippetTextEdit struct {
- // The range of the text document to be manipulated.
- Range Range `json:"range"`
- // The snippet to be inserted.
- Snippet StringValue `json:"snippet"`
- // The actual identifier of the snippet edit.
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staleRequestSupportOptions
-type StaleRequestSupportOptions struct {
- // The client will actively cancel the request.
- Cancel bool `json:"cancel"`
- // The list of requests for which the client
- // will retry the request if it receives a
- // response with error code `ContentModified`
- RetryOnContentModified []string `json:"retryOnContentModified"`
-}
-
-// Static registration options to be returned in the initialize
-// request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staticRegistrationOptions
-type StaticRegistrationOptions struct {
- // The id used to register the request. The id can be used to deregister
- // the request again. See also Registration#id.
- ID string `json:"id,omitempty"`
-}
-
-// A string value used as a snippet is a template which allows to insert text
-// and to control the editor cursor when insertion happens.
-//
-// A snippet can define tab stops and placeholders with `$1`, `$2`
-// and `${3:foo}`. `$0` defines the final tab stop, it defaults to
-// the end of the snippet. Variables are defined with `$name` and
-// `${name:default value}`.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#stringValue
-type StringValue struct {
- // The kind of string value.
- Kind string `json:"kind"`
- // The snippet string.
- Value string `json:"value"`
-}
-
-// Represents information about programming constructs like variables, classes,
-// interfaces etc.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#symbolInformation
-type SymbolInformation struct {
- // extends BaseSymbolInformation
- // Indicates if this symbol is deprecated.
- //
- // @deprecated Use tags instead
- Deprecated bool `json:"deprecated,omitempty"`
- // The location of this symbol. The location's range is used by a tool
- // to reveal the location in the editor. If the symbol is selected in the
- // tool the range's start information is used to position the cursor. So
- // the range usually spans more than the actual symbol's name and does
- // normally include things like visibility modifiers.
- //
- // The range doesn't have to denote a node range in the sense of an abstract
- // syntax tree. It can therefore not be used to re-construct a hierarchy of
- // the symbols.
- Location Location `json:"location"`
- // The name of this symbol.
- Name string `json:"name"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // The name of the symbol containing this symbol. This information is for
- // user interface purposes (e.g. to render a qualifier in the user interface
- // if necessary). It can't be used to re-infer a hierarchy for the document
- // symbols.
- ContainerName string `json:"containerName,omitempty"`
-}
-
-// A symbol kind.
-type SymbolKind uint32
-
-// Symbol tags are extra annotations that tweak the rendering of a symbol.
-//
-// @since 3.16
-type SymbolTag uint32
-
-// Describe options to be used when registered for text document change events.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentChangeRegistrationOptions
-type TextDocumentChangeRegistrationOptions struct {
- // How documents are synced to the server.
- SyncKind TextDocumentSyncKind `json:"syncKind"`
- TextDocumentRegistrationOptions
-}
-
-// Text document specific client capabilities.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentClientCapabilities
-type TextDocumentClientCapabilities struct {
- // Defines which synchronization capabilities the client supports.
- Synchronization *TextDocumentSyncClientCapabilities `json:"synchronization,omitempty"`
- // Capabilities specific to the `textDocument/completion` request.
- Completion CompletionClientCapabilities `json:"completion,omitempty"`
- // Capabilities specific to the `textDocument/hover` request.
- Hover *HoverClientCapabilities `json:"hover,omitempty"`
- // Capabilities specific to the `textDocument/signatureHelp` request.
- SignatureHelp *SignatureHelpClientCapabilities `json:"signatureHelp,omitempty"`
- // Capabilities specific to the `textDocument/declaration` request.
- //
- // @since 3.14.0
- Declaration *DeclarationClientCapabilities `json:"declaration,omitempty"`
- // Capabilities specific to the `textDocument/definition` request.
- Definition *DefinitionClientCapabilities `json:"definition,omitempty"`
- // Capabilities specific to the `textDocument/typeDefinition` request.
- //
- // @since 3.6.0
- TypeDefinition *TypeDefinitionClientCapabilities `json:"typeDefinition,omitempty"`
- // Capabilities specific to the `textDocument/implementation` request.
- //
- // @since 3.6.0
- Implementation *ImplementationClientCapabilities `json:"implementation,omitempty"`
- // Capabilities specific to the `textDocument/references` request.
- References *ReferenceClientCapabilities `json:"references,omitempty"`
- // Capabilities specific to the `textDocument/documentHighlight` request.
- DocumentHighlight *DocumentHighlightClientCapabilities `json:"documentHighlight,omitempty"`
- // Capabilities specific to the `textDocument/documentSymbol` request.
- DocumentSymbol DocumentSymbolClientCapabilities `json:"documentSymbol,omitempty"`
- // Capabilities specific to the `textDocument/codeAction` request.
- CodeAction CodeActionClientCapabilities `json:"codeAction,omitempty"`
- // Capabilities specific to the `textDocument/codeLens` request.
- CodeLens *CodeLensClientCapabilities `json:"codeLens,omitempty"`
- // Capabilities specific to the `textDocument/documentLink` request.
- DocumentLink *DocumentLinkClientCapabilities `json:"documentLink,omitempty"`
- // Capabilities specific to the `textDocument/documentColor` and the
- // `textDocument/colorPresentation` request.
- //
- // @since 3.6.0
- ColorProvider *DocumentColorClientCapabilities `json:"colorProvider,omitempty"`
- // Capabilities specific to the `textDocument/formatting` request.
- Formatting *DocumentFormattingClientCapabilities `json:"formatting,omitempty"`
- // Capabilities specific to the `textDocument/rangeFormatting` request.
- RangeFormatting *DocumentRangeFormattingClientCapabilities `json:"rangeFormatting,omitempty"`
- // Capabilities specific to the `textDocument/onTypeFormatting` request.
- OnTypeFormatting *DocumentOnTypeFormattingClientCapabilities `json:"onTypeFormatting,omitempty"`
- // Capabilities specific to the `textDocument/rename` request.
- Rename *RenameClientCapabilities `json:"rename,omitempty"`
- // Capabilities specific to the `textDocument/foldingRange` request.
- //
- // @since 3.10.0
- FoldingRange *FoldingRangeClientCapabilities `json:"foldingRange,omitempty"`
- // Capabilities specific to the `textDocument/selectionRange` request.
- //
- // @since 3.15.0
- SelectionRange *SelectionRangeClientCapabilities `json:"selectionRange,omitempty"`
- // Capabilities specific to the `textDocument/publishDiagnostics` notification.
- PublishDiagnostics PublishDiagnosticsClientCapabilities `json:"publishDiagnostics,omitempty"`
- // Capabilities specific to the various call hierarchy requests.
- //
- // @since 3.16.0
- CallHierarchy *CallHierarchyClientCapabilities `json:"callHierarchy,omitempty"`
- // Capabilities specific to the various semantic token request.
- //
- // @since 3.16.0
- SemanticTokens SemanticTokensClientCapabilities `json:"semanticTokens,omitempty"`
- // Capabilities specific to the `textDocument/linkedEditingRange` request.
- //
- // @since 3.16.0
- LinkedEditingRange *LinkedEditingRangeClientCapabilities `json:"linkedEditingRange,omitempty"`
- // Client capabilities specific to the `textDocument/moniker` request.
- //
- // @since 3.16.0
- Moniker *MonikerClientCapabilities `json:"moniker,omitempty"`
- // Capabilities specific to the various type hierarchy requests.
- //
- // @since 3.17.0
- TypeHierarchy *TypeHierarchyClientCapabilities `json:"typeHierarchy,omitempty"`
- // Capabilities specific to the `textDocument/inlineValue` request.
- //
- // @since 3.17.0
- InlineValue *InlineValueClientCapabilities `json:"inlineValue,omitempty"`
- // Capabilities specific to the `textDocument/inlayHint` request.
- //
- // @since 3.17.0
- InlayHint *InlayHintClientCapabilities `json:"inlayHint,omitempty"`
- // Capabilities specific to the diagnostic pull model.
- //
- // @since 3.17.0
- Diagnostic *DiagnosticClientCapabilities `json:"diagnostic,omitempty"`
- // Client capabilities specific to inline completions.
- //
- // @since 3.18.0
- // @proposed
- InlineCompletion *InlineCompletionClientCapabilities `json:"inlineCompletion,omitempty"`
-}
-
-// An event describing a change to a text document. If only a text is provided
-// it is considered to be the full content of the document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeEvent
-type (
- TextDocumentContentChangeEvent = Or_TextDocumentContentChangeEvent // (alias)
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangePartial
- TextDocumentContentChangePartial struct {
- // The range of the document that changed.
- Range *Range `json:"range,omitempty"`
- // The optional length of the range that got replaced.
- //
- // @deprecated use range instead.
- RangeLength uint32 `json:"rangeLength,omitempty"`
- // The new text for the provided range.
- Text string `json:"text"`
- }
-)
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeWholeDocument
-type TextDocumentContentChangeWholeDocument struct {
- // The new text of the whole document.
- Text string `json:"text"`
-}
-
-// Client capabilities for a text document content provider.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentClientCapabilities
-type TextDocumentContentClientCapabilities struct {
- // Text document content provider supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Text document content provider options.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentOptions
-type TextDocumentContentOptions struct {
- // The scheme for which the server provides content.
- Scheme string `json:"scheme"`
-}
-
-// Parameters for the `workspace/textDocumentContent` request.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentParams
-type TextDocumentContentParams struct {
- // The uri of the text document.
- URI DocumentURI `json:"uri"`
-}
-
-// Parameters for the `workspace/textDocumentContent/refresh` request.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRefreshParams
-type TextDocumentContentRefreshParams struct {
- // The uri of the text document to refresh.
- URI DocumentURI `json:"uri"`
-}
-
-// Text document content provider registration options.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRegistrationOptions
-type TextDocumentContentRegistrationOptions struct {
- TextDocumentContentOptions
- StaticRegistrationOptions
-}
-
-// Describes textual changes on a text document. A TextDocumentEdit describes all changes
-// on a document version Si and after they are applied move the document to version Si+1.
-// So the creator of a TextDocumentEdit doesn't need to sort the array of edits or do any
-// kind of ordering. However the edits must be non overlapping.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentEdit
-type TextDocumentEdit struct {
- // The text document to change.
- TextDocument OptionalVersionedTextDocumentIdentifier `json:"textDocument"`
- // The edits to be applied.
- //
- // @since 3.16.0 - support for AnnotatedTextEdit. This is guarded using a
- // client capability.
- //
- // @since 3.18.0 - support for SnippetTextEdit. This is guarded using a
- // client capability.
- Edits []Or_TextDocumentEdit_edits_Elem `json:"edits"`
-}
-
-// A document filter denotes a document by different properties like
-// the {@link TextDocument.languageId language}, the {@link Uri.scheme scheme} of
-// its resource, or a glob-pattern that is applied to the {@link TextDocument.fileName path}.
-//
-// Glob patterns can have the following syntax:
-//
-// - `*` to match one or more characters in a path segment
-// - `?` to match on one character in a path segment
-// - `**` to match any number of path segments, including none
-// - `{}` to group sub patterns into an OR expression. (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
-//
-// @sample A language filter that applies to typescript files on disk: `{ language: 'typescript', scheme: 'file' }`
-// @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }`
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilter
-type (
- TextDocumentFilter = Or_TextDocumentFilter // (alias)
- // A document filter where `language` is required field.
- //
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterLanguage
- TextDocumentFilterLanguage struct {
- // A language id, like `typescript`.
- Language string `json:"language"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern *GlobPattern `json:"pattern,omitempty"`
- }
-)
-
-// A document filter where `pattern` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterPattern
-type TextDocumentFilterPattern struct {
- // A language id, like `typescript`.
- Language string `json:"language,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern GlobPattern `json:"pattern"`
-}
-
-// A document filter where `scheme` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterScheme
-type TextDocumentFilterScheme struct {
- // A language id, like `typescript`.
- Language string `json:"language,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern *GlobPattern `json:"pattern,omitempty"`
-}
-
-// A literal to identify a text document in the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentIdentifier
-type TextDocumentIdentifier struct {
- // The text document's uri.
- URI DocumentURI `json:"uri"`
-}
-
-// An item to transfer a text document from the client to the
-// server.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentItem
-type TextDocumentItem struct {
- // The text document's uri.
- URI DocumentURI `json:"uri"`
- // The text document's language identifier.
- LanguageID LanguageKind `json:"languageId"`
- // The version number of this document (it will increase after each
- // change, including undo/redo).
- Version int32 `json:"version"`
- // The content of the opened text document.
- Text string `json:"text"`
-}
-
-// A parameter literal used in requests to pass a text document and a position inside that
-// document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentPositionParams
-type TextDocumentPositionParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position inside the text document.
- Position Position `json:"position"`
-}
-
-// General text document registration options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentRegistrationOptions
-type TextDocumentRegistrationOptions struct {
- // A document selector to identify the scope of the registration. If set to null
- // the document selector provided on the client side will be used.
- DocumentSelector DocumentSelector `json:"documentSelector"`
-}
-
-// Represents reasons why a text document is saved.
-type TextDocumentSaveReason uint32
-
-// Save registration options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSaveRegistrationOptions
-type TextDocumentSaveRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SaveOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncClientCapabilities
-type TextDocumentSyncClientCapabilities struct {
- // Whether text document synchronization supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports sending will save notifications.
- WillSave bool `json:"willSave,omitempty"`
- // The client supports sending a will save request and
- // waits for a response providing text edits which will
- // be applied to the document before it is saved.
- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"`
- // The client supports did save notifications.
- DidSave bool `json:"didSave,omitempty"`
-}
-
-// Defines how the host (editor) should sync
-// document changes to the language server.
-type TextDocumentSyncKind uint32
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncOptions
-type TextDocumentSyncOptions struct {
- // Open and close notifications are sent to the server. If omitted open close notification should not
- // be sent.
- OpenClose bool `json:"openClose,omitempty"`
- // Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full
- // and TextDocumentSyncKind.Incremental. If omitted it defaults to TextDocumentSyncKind.None.
- Change TextDocumentSyncKind `json:"change,omitempty"`
- // If present will save notifications are sent to the server. If omitted the notification should not be
- // sent.
- WillSave bool `json:"willSave,omitempty"`
- // If present will save wait until requests are sent to the server. If omitted the request should not be
- // sent.
- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"`
- // If present save notifications are sent to the server. If omitted the notification should not be
- // sent.
- Save *SaveOptions `json:"save,omitempty"`
-}
-
-// A text edit applicable to a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textEdit
-type TextEdit struct {
- // The range of the text document to be manipulated. To insert
- // text into a document create a range where start === end.
- Range Range `json:"range"`
- // The string to be inserted. For delete operations use an
- // empty string.
- NewText string `json:"newText"`
-}
-type (
- TokenFormat string
- TraceValue string
-)
-
-// created for Tuple
-type Tuple_ParameterInformation_label_Item1 struct {
- Fld0 uint32 `json:"fld0"`
- Fld1 uint32 `json:"fld1"`
-}
-
-// Since 3.6.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionClientCapabilities
-type TypeDefinitionClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `TypeDefinitionRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // Since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionOptions
-type TypeDefinitionOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionParams
-type TypeDefinitionParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionRegistrationOptions
-type TypeDefinitionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- TypeDefinitionOptions
- StaticRegistrationOptions
-}
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyClientCapabilities
-type TypeHierarchyClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyItem
-type TypeHierarchyItem struct {
- // The name of this item.
- Name string `json:"name"`
- // The kind of this item.
- Kind SymbolKind `json:"kind"`
- // Tags for this item.
- Tags []SymbolTag `json:"tags,omitempty"`
- // More detail for this item, e.g. the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The resource identifier of this item.
- URI DocumentURI `json:"uri"`
- // The range enclosing this symbol not including leading/trailing whitespace
- // but everything else, e.g. comments and code.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being
- // picked, e.g. the name of a function. Must be contained by the
- // {@link TypeHierarchyItem.range `range`}.
- SelectionRange Range `json:"selectionRange"`
- // A data entry field that is preserved between a type hierarchy prepare and
- // supertypes or subtypes requests. It could also be used to identify the
- // type hierarchy in the server, helping improve the performance on
- // resolving supertypes and subtypes.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Type hierarchy options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyOptions
-type TypeHierarchyOptions struct {
- WorkDoneProgressOptions
-}
-
-// The parameter of a `textDocument/prepareTypeHierarchy` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyPrepareParams
-type TypeHierarchyPrepareParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Type hierarchy options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyRegistrationOptions
-type TypeHierarchyRegistrationOptions struct {
- TextDocumentRegistrationOptions
- TypeHierarchyOptions
- StaticRegistrationOptions
-}
-
-// The parameter of a `typeHierarchy/subtypes` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySubtypesParams
-type TypeHierarchySubtypesParams struct {
- Item TypeHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The parameter of a `typeHierarchy/supertypes` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySupertypesParams
-type TypeHierarchySupertypesParams struct {
- Item TypeHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// A diagnostic report indicating that the last returned
-// report is still accurate.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unchangedDocumentDiagnosticReport
-type UnchangedDocumentDiagnosticReport struct {
- // A document diagnostic report indicating
- // no changes to the last result. A server can
- // only return `unchanged` if result ids are
- // provided.
- Kind string `json:"kind"`
- // A result id which will be sent on the next
- // diagnostic request for the same document.
- ResultID string `json:"resultId"`
-}
-
-// Moniker uniqueness level to define scope of the moniker.
-//
-// @since 3.16.0
-type UniquenessLevel string
-
-// General parameters to unregister a request or notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistration
-type Unregistration struct {
- // The id used to unregister the request or notification. Usually an id
- // provided during the register request.
- ID string `json:"id"`
- // The method to unregister for.
- Method string `json:"method"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistrationParams
-type UnregistrationParams struct {
- Unregisterations []Unregistration `json:"unregisterations"`
-}
-
-// A versioned notebook document identifier.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedNotebookDocumentIdentifier
-type VersionedNotebookDocumentIdentifier struct {
- // The version number of this notebook document.
- Version int32 `json:"version"`
- // The notebook document's uri.
- URI URI `json:"uri"`
-}
-
-// A text document identifier to denote a specific version of a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedTextDocumentIdentifier
-type VersionedTextDocumentIdentifier struct {
- // The version number of this document.
- Version int32 `json:"version"`
- TextDocumentIdentifier
-}
-type (
- WatchKind = uint32 // The parameters sent in a will save text document notification.
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#willSaveTextDocumentParams
- WillSaveTextDocumentParams struct {
- // The document that will be saved.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The 'TextDocumentSaveReason'.
- Reason TextDocumentSaveReason `json:"reason"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#windowClientCapabilities
-type WindowClientCapabilities struct {
- // It indicates whether the client supports server initiated
- // progress using the `window/workDoneProgress/create` request.
- //
- // The capability also controls Whether client supports handling
- // of progress notifications. If set servers are allowed to report a
- // `workDoneProgress` property in the request specific server
- // capabilities.
- //
- // @since 3.15.0
- WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
- // Capabilities specific to the showMessage request.
- //
- // @since 3.16.0
- ShowMessage *ShowMessageRequestClientCapabilities `json:"showMessage,omitempty"`
- // Capabilities specific to the showDocument request.
- //
- // @since 3.16.0
- ShowDocument *ShowDocumentClientCapabilities `json:"showDocument,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressBegin
-type WorkDoneProgressBegin struct {
- Kind string `json:"kind"`
- // Mandatory title of the progress operation. Used to briefly inform about
- // the kind of operation being performed.
- //
- // Examples: "Indexing" or "Linking dependencies".
- Title string `json:"title"`
- // Controls if a cancel button should show to allow the user to cancel the
- // long running operation. Clients that don't support cancellation are allowed
- // to ignore the setting.
- Cancellable bool `json:"cancellable,omitempty"`
- // Optional, more detailed associated progress message. Contains
- // complementary information to the `title`.
- //
- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- // If unset, the previous progress message (if any) is still valid.
- Message string `json:"message,omitempty"`
- // Optional progress percentage to display (value 100 is considered 100%).
- // If not provided infinite progress is assumed and clients are allowed
- // to ignore the `percentage` value in subsequent in report notifications.
- //
- // The value should be steadily rising. Clients are free to ignore values
- // that are not following this rule. The value range is [0, 100].
- Percentage uint32 `json:"percentage,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCancelParams
-type WorkDoneProgressCancelParams struct {
- // The token to be used to report progress.
- Token ProgressToken `json:"token"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCreateParams
-type WorkDoneProgressCreateParams struct {
- // The token to be used to report progress.
- Token ProgressToken `json:"token"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressEnd
-type WorkDoneProgressEnd struct {
- Kind string `json:"kind"`
- // Optional, a final message indicating to for example indicate the outcome
- // of the operation.
- Message string `json:"message,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressOptions
-type WorkDoneProgressOptions struct {
- WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressParams
-type WorkDoneProgressParams struct {
- // An optional token that a server can use to report work done progress.
- WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressReport
-type WorkDoneProgressReport struct {
- Kind string `json:"kind"`
- // Controls enablement state of a cancel button.
- //
- // Clients that don't support cancellation or don't support controlling the button's
- // enablement state are allowed to ignore the property.
- Cancellable bool `json:"cancellable,omitempty"`
- // Optional, more detailed associated progress message. Contains
- // complementary information to the `title`.
- //
- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- // If unset, the previous progress message (if any) is still valid.
- Message string `json:"message,omitempty"`
- // Optional progress percentage to display (value 100 is considered 100%).
- // If not provided infinite progress is assumed and clients are allowed
- // to ignore the `percentage` value in subsequent in report notifications.
- //
- // The value should be steadily rising. Clients are free to ignore values
- // that are not following this rule. The value range is [0, 100]
- Percentage uint32 `json:"percentage,omitempty"`
-}
-
-// Workspace specific client capabilities.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceClientCapabilities
-type WorkspaceClientCapabilities struct {
- // The client supports applying batch edits
- // to the workspace by supporting the request
- // 'workspace/applyEdit'
- ApplyEdit bool `json:"applyEdit,omitempty"`
- // Capabilities specific to `WorkspaceEdit`s.
- WorkspaceEdit *WorkspaceEditClientCapabilities `json:"workspaceEdit,omitempty"`
- // Capabilities specific to the `workspace/didChangeConfiguration` notification.
- DidChangeConfiguration DidChangeConfigurationClientCapabilities `json:"didChangeConfiguration,omitempty"`
- // Capabilities specific to the `workspace/didChangeWatchedFiles` notification.
- DidChangeWatchedFiles DidChangeWatchedFilesClientCapabilities `json:"didChangeWatchedFiles,omitempty"`
- // Capabilities specific to the `workspace/symbol` request.
- Symbol *WorkspaceSymbolClientCapabilities `json:"symbol,omitempty"`
- // Capabilities specific to the `workspace/executeCommand` request.
- ExecuteCommand *ExecuteCommandClientCapabilities `json:"executeCommand,omitempty"`
- // The client has support for workspace folders.
- //
- // @since 3.6.0
- WorkspaceFolders bool `json:"workspaceFolders,omitempty"`
- // The client supports `workspace/configuration` requests.
- //
- // @since 3.6.0
- Configuration bool `json:"configuration,omitempty"`
- // Capabilities specific to the semantic token requests scoped to the
- // workspace.
- //
- // @since 3.16.0.
- SemanticTokens *SemanticTokensWorkspaceClientCapabilities `json:"semanticTokens,omitempty"`
- // Capabilities specific to the code lens requests scoped to the
- // workspace.
- //
- // @since 3.16.0.
- CodeLens *CodeLensWorkspaceClientCapabilities `json:"codeLens,omitempty"`
- // The client has support for file notifications/requests for user operations on files.
- //
- // Since 3.16.0
- FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"`
- // Capabilities specific to the inline values requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- InlineValue *InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"`
- // Capabilities specific to the inlay hint requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- InlayHint *InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"`
- // Capabilities specific to the diagnostic requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- Diagnostics *DiagnosticWorkspaceClientCapabilities `json:"diagnostics,omitempty"`
- // Capabilities specific to the folding range requests scoped to the workspace.
- //
- // @since 3.18.0
- // @proposed
- FoldingRange *FoldingRangeWorkspaceClientCapabilities `json:"foldingRange,omitempty"`
- // Capabilities specific to the `workspace/textDocumentContent` request.
- //
- // @since 3.18.0
- // @proposed
- TextDocumentContent *TextDocumentContentClientCapabilities `json:"textDocumentContent,omitempty"`
-}
-
-// Parameters of the workspace diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticParams
-type WorkspaceDiagnosticParams struct {
- // The additional identifier provided during registration.
- Identifier string `json:"identifier,omitempty"`
- // The currently known diagnostic reports with their
- // previous result ids.
- PreviousResultIds []PreviousResultId `json:"previousResultIds"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// A workspace diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReport
-type WorkspaceDiagnosticReport struct {
- Items []WorkspaceDocumentDiagnosticReport `json:"items"`
-}
-
-// A partial result for a workspace diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReportPartialResult
-type WorkspaceDiagnosticReportPartialResult struct {
- Items []WorkspaceDocumentDiagnosticReport `json:"items"`
-}
-
-// A workspace diagnostic document report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDocumentDiagnosticReport
-type (
- WorkspaceDocumentDiagnosticReport = Or_WorkspaceDocumentDiagnosticReport // (alias)
- // A workspace edit represents changes to many resources managed in the workspace. The edit
- // should either provide `changes` or `documentChanges`. If documentChanges are present
- // they are preferred over `changes` if the client can handle versioned document edits.
- //
- // Since version 3.13.0 a workspace edit can contain resource operations as well. If resource
- // operations are present clients need to execute the operations in the order in which they
- // are provided. So a workspace edit for example can consist of the following two changes:
- // (1) a create file a.txt and (2) a text document edit which insert text into file a.txt.
- //
- // An invalid sequence (e.g. (1) delete file a.txt and (2) insert text into file a.txt) will
- // cause failure of the operation. How the client recovers from the failure is described by
- // the client capability: `workspace.workspaceEdit.failureHandling`
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEdit
- WorkspaceEdit struct {
- // Holds changes to existing resources.
- Changes map[DocumentURI][]TextEdit `json:"changes,omitempty"`
- // Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes
- // are either an array of `TextDocumentEdit`s to express changes to n different text documents
- // where each text document edit addresses a specific version of a text document. Or it can contain
- // above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations.
- //
- // Whether a client supports versioned document edits is expressed via
- // `workspace.workspaceEdit.documentChanges` client capability.
- //
- // If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then
- // only plain `TextEdit`s using the `changes` property are supported.
- DocumentChanges []DocumentChange `json:"documentChanges,omitempty"`
- // A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and
- // delete file / folder operations.
- //
- // Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`.
- //
- // @since 3.16.0
- ChangeAnnotations map[ChangeAnnotationIdentifier]ChangeAnnotation `json:"changeAnnotations,omitempty"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditClientCapabilities
-type WorkspaceEditClientCapabilities struct {
- // The client supports versioned document changes in `WorkspaceEdit`s
- DocumentChanges bool `json:"documentChanges,omitempty"`
- // The resource operations the client supports. Clients should at least
- // support 'create', 'rename' and 'delete' files and folders.
- //
- // @since 3.13.0
- ResourceOperations []ResourceOperationKind `json:"resourceOperations,omitempty"`
- // The failure handling strategy of a client if applying the workspace edit
- // fails.
- //
- // @since 3.13.0
- FailureHandling *FailureHandlingKind `json:"failureHandling,omitempty"`
- // Whether the client normalizes line endings to the client specific
- // setting.
- // If set to `true` the client will normalize line ending characters
- // in a workspace edit to the client-specified new line
- // character.
- //
- // @since 3.16.0
- NormalizesLineEndings bool `json:"normalizesLineEndings,omitempty"`
- // Whether the client in general supports change annotations on text edits,
- // create file, rename file and delete file changes.
- //
- // @since 3.16.0
- ChangeAnnotationSupport *ChangeAnnotationsSupportOptions `json:"changeAnnotationSupport,omitempty"`
- // Whether the client supports `WorkspaceEditMetadata` in `WorkspaceEdit`s.
- //
- // @since 3.18.0
- // @proposed
- MetadataSupport bool `json:"metadataSupport,omitempty"`
- // Whether the client supports snippets as text edits.
- //
- // @since 3.18.0
- // @proposed
- SnippetEditSupport bool `json:"snippetEditSupport,omitempty"`
-}
-
-// Additional data about a workspace edit.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditMetadata
-type WorkspaceEditMetadata struct {
- // Signal to the editor that this edit is a refactoring.
- IsRefactoring bool `json:"isRefactoring,omitempty"`
-}
-
-// A workspace folder inside a client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFolder
-type WorkspaceFolder struct {
- // The associated URI for this workspace folder.
- URI URI `json:"uri"`
- // The name of the workspace folder. Used to refer to this
- // workspace folder in the user interface.
- Name string `json:"name"`
-}
-
-// The workspace folder change event.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersChangeEvent
-type WorkspaceFoldersChangeEvent struct {
- // The array of added workspace folders
- Added []WorkspaceFolder `json:"added"`
- // The array of the removed workspace folders
- Removed []WorkspaceFolder `json:"removed"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersInitializeParams
-type WorkspaceFoldersInitializeParams struct {
- // The workspace folders configured in the client when the server starts.
- //
- // This property is only available if the client supports workspace folders.
- // It can be `null` if the client supports workspace folders but none are
- // configured.
- //
- // @since 3.6.0
- WorkspaceFolders []WorkspaceFolder `json:"workspaceFolders,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersServerCapabilities
-type WorkspaceFoldersServerCapabilities struct {
- // The server has support for workspace folders
- Supported bool `json:"supported,omitempty"`
- // Whether the server wants to receive workspace folder
- // change notifications.
- //
- // If a string is provided the string is treated as an ID
- // under which the notification is registered on the client
- // side. The ID can be used to unregister for these events
- // using the `client/unregisterCapability` request.
- ChangeNotifications *Or_WorkspaceFoldersServerCapabilities_changeNotifications `json:"changeNotifications,omitempty"`
-}
-
-// A full document diagnostic report for a workspace diagnostic result.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFullDocumentDiagnosticReport
-type WorkspaceFullDocumentDiagnosticReport struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // The version number for which the diagnostics are reported.
- // If the document is not marked as open `null` can be provided.
- Version int32 `json:"version"`
- FullDocumentDiagnosticReport
-}
-
-// Defines workspace specific capabilities of the server.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceOptions
-type WorkspaceOptions struct {
- // The server supports workspace folder.
- //
- // @since 3.6.0
- WorkspaceFolders *WorkspaceFoldersServerCapabilities `json:"workspaceFolders,omitempty"`
- // The server is interested in notifications/requests for operations on files.
- //
- // @since 3.16.0
- FileOperations *FileOperationOptions `json:"fileOperations,omitempty"`
- // The server supports the `workspace/textDocumentContent` request.
- //
- // @since 3.18.0
- // @proposed
- TextDocumentContent *Or_WorkspaceOptions_textDocumentContent `json:"textDocumentContent,omitempty"`
-}
-
-// A special workspace symbol that supports locations without a range.
-//
-// See also SymbolInformation.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbol
-type WorkspaceSymbol struct {
- // The location of the symbol. Whether a server is allowed to
- // return a location without a range depends on the client
- // capability `workspace.symbol.resolveSupport`.
- //
- // See SymbolInformation#location for more details.
- Location Or_WorkspaceSymbol_location `json:"location"`
- // A data entry field that is preserved on a workspace symbol between a
- // workspace symbol request and a workspace symbol resolve request.
- Data interface{} `json:"data,omitempty"`
- BaseSymbolInformation
-}
-
-// Client capabilities for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolClientCapabilities
-type WorkspaceSymbolClientCapabilities struct {
- // Symbol request supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Specific capabilities for the `SymbolKind` in the `workspace/symbol` request.
- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"`
- // The client supports tags on `SymbolInformation`.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.16.0
- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"`
- // The client support partial workspace symbols. The client will send the
- // request `workspaceSymbol/resolve` to the server to resolve additional
- // properties.
- //
- // @since 3.17.0
- ResolveSupport *ClientSymbolResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Server capabilities for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolOptions
-type WorkspaceSymbolOptions struct {
- // The server provides support to resolve additional
- // information for a workspace symbol.
- //
- // @since 3.17.0
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolParams
-type WorkspaceSymbolParams struct {
- // A query string to filter symbols by. Clients may send an empty
- // string here to request all symbols.
- //
- // The `query`-parameter should be interpreted in a *relaxed way* as editors
- // will apply their own highlighting and scoring on the results. A good rule
- // of thumb is to match case-insensitive and to simply check that the
- // characters of *query* appear in their order in a candidate symbol.
- // Servers shouldn't use prefix, substring, or similar strict matching.
- Query string `json:"query"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolRegistrationOptions
-type WorkspaceSymbolRegistrationOptions struct {
- WorkspaceSymbolOptions
-}
-
-// An unchanged document diagnostic report for a workspace diagnostic result.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceUnchangedDocumentDiagnosticReport
-type WorkspaceUnchangedDocumentDiagnosticReport struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // The version number for which the diagnostics are reported.
- // If the document is not marked as open `null` can be provided.
- Version int32 `json:"version"`
- UnchangedDocumentDiagnosticReport
-}
-
-// The initialize parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams
-type XInitializeParams struct {
- // The process Id of the parent process that started
- // the server.
- //
- // Is `null` if the process has not been started by another process.
- // If the parent process is not alive then the server should exit.
- ProcessID int32 `json:"processId"`
- // Information about the client
- //
- // @since 3.15.0
- ClientInfo *ClientInfo `json:"clientInfo,omitempty"`
- // The locale the client is currently showing the user interface
- // in. This must not necessarily be the locale of the operating
- // system.
- //
- // Uses IETF language tags as the value's syntax
- // (See https://en.wikipedia.org/wiki/IETF_language_tag)
- //
- // @since 3.16.0
- Locale string `json:"locale,omitempty"`
- // The rootPath of the workspace. Is null
- // if no folder is open.
- //
- // @deprecated in favour of rootUri.
- RootPath string `json:"rootPath,omitempty"`
- // The rootUri of the workspace. Is null if no
- // folder is open. If both `rootPath` and `rootUri` are set
- // `rootUri` wins.
- //
- // @deprecated in favour of workspaceFolders.
- RootURI DocumentURI `json:"rootUri"`
- // The capabilities provided by the client (editor or tool)
- Capabilities ClientCapabilities `json:"capabilities"`
- // User provided initialization options.
- InitializationOptions interface{} `json:"initializationOptions,omitempty"`
- // The initial trace setting. If omitted trace is disabled ('off').
- Trace *TraceValue `json:"trace,omitempty"`
- WorkDoneProgressParams
-}
-
-// The initialize parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams
-type _InitializeParams struct {
- // The process Id of the parent process that started
- // the server.
- //
- // Is `null` if the process has not been started by another process.
- // If the parent process is not alive then the server should exit.
- ProcessID int32 `json:"processId"`
- // Information about the client
- //
- // @since 3.15.0
- ClientInfo *ClientInfo `json:"clientInfo,omitempty"`
- // The locale the client is currently showing the user interface
- // in. This must not necessarily be the locale of the operating
- // system.
- //
- // Uses IETF language tags as the value's syntax
- // (See https://en.wikipedia.org/wiki/IETF_language_tag)
- //
- // @since 3.16.0
- Locale string `json:"locale,omitempty"`
- // The rootPath of the workspace. Is null
- // if no folder is open.
- //
- // @deprecated in favour of rootUri.
- RootPath string `json:"rootPath,omitempty"`
- // The rootUri of the workspace. Is null if no
- // folder is open. If both `rootPath` and `rootUri` are set
- // `rootUri` wins.
- //
- // @deprecated in favour of workspaceFolders.
- RootURI DocumentURI `json:"rootUri"`
- // The capabilities provided by the client (editor or tool)
- Capabilities ClientCapabilities `json:"capabilities"`
- // User provided initialization options.
- InitializationOptions interface{} `json:"initializationOptions,omitempty"`
- // The initial trace setting. If omitted trace is disabled ('off').
- Trace *TraceValue `json:"trace,omitempty"`
- WorkDoneProgressParams
-}
-
-const (
- // A set of predefined code action kinds
- // Empty kind.
- Empty CodeActionKind = ""
- // Base kind for quickfix actions: 'quickfix'
- QuickFix CodeActionKind = "quickfix"
- // Base kind for refactoring actions: 'refactor'
- Refactor CodeActionKind = "refactor"
- // Base kind for refactoring extraction actions: 'refactor.extract'
- //
- // Example extract actions:
- //
- //
- // - Extract method
- // - Extract function
- // - Extract variable
- // - Extract interface from class
- // - ...
- RefactorExtract CodeActionKind = "refactor.extract"
- // Base kind for refactoring inline actions: 'refactor.inline'
- //
- // Example inline actions:
- //
- //
- // - Inline function
- // - Inline variable
- // - Inline constant
- // - ...
- RefactorInline CodeActionKind = "refactor.inline"
- // Base kind for refactoring move actions: `refactor.move`
- //
- // Example move actions:
- //
- //
- // - Move a function to a new file
- // - Move a property between classes
- // - Move method to base class
- // - ...
- //
- // @since 3.18.0
- // @proposed
- RefactorMove CodeActionKind = "refactor.move"
- // Base kind for refactoring rewrite actions: 'refactor.rewrite'
- //
- // Example rewrite actions:
- //
- //
- // - Convert JavaScript function to class
- // - Add or remove parameter
- // - Encapsulate field
- // - Make method static
- // - Move method to base class
- // - ...
- RefactorRewrite CodeActionKind = "refactor.rewrite"
- // Base kind for source actions: `source`
- //
- // Source code actions apply to the entire file.
- Source CodeActionKind = "source"
- // Base kind for an organize imports source action: `source.organizeImports`
- SourceOrganizeImports CodeActionKind = "source.organizeImports"
- // Base kind for auto-fix source actions: `source.fixAll`.
- //
- // Fix all actions automatically fix errors that have a clear fix that do not require user input.
- // They should not suppress errors or perform unsafe fixes such as generating new types or classes.
- //
- // @since 3.15.0
- SourceFixAll CodeActionKind = "source.fixAll"
- // Base kind for all code actions applying to the entire notebook's scope. CodeActionKinds using
- // this should always begin with `notebook.`
- //
- // @since 3.18.0
- Notebook CodeActionKind = "notebook"
- // The reason why code actions were requested.
- //
- // @since 3.17.0
- // Code actions were explicitly requested by the user or by an extension.
- CodeActionInvoked CodeActionTriggerKind = 1
- // Code actions were requested automatically.
- //
- // This typically happens when current selection in a file changes, but can
- // also be triggered when file content changes.
- CodeActionAutomatic CodeActionTriggerKind = 2
- // The kind of a completion entry.
- TextCompletion CompletionItemKind = 1
- MethodCompletion CompletionItemKind = 2
- FunctionCompletion CompletionItemKind = 3
- ConstructorCompletion CompletionItemKind = 4
- FieldCompletion CompletionItemKind = 5
- VariableCompletion CompletionItemKind = 6
- ClassCompletion CompletionItemKind = 7
- InterfaceCompletion CompletionItemKind = 8
- ModuleCompletion CompletionItemKind = 9
- PropertyCompletion CompletionItemKind = 10
- UnitCompletion CompletionItemKind = 11
- ValueCompletion CompletionItemKind = 12
- EnumCompletion CompletionItemKind = 13
- KeywordCompletion CompletionItemKind = 14
- SnippetCompletion CompletionItemKind = 15
- ColorCompletion CompletionItemKind = 16
- FileCompletion CompletionItemKind = 17
- ReferenceCompletion CompletionItemKind = 18
- FolderCompletion CompletionItemKind = 19
- EnumMemberCompletion CompletionItemKind = 20
- ConstantCompletion CompletionItemKind = 21
- StructCompletion CompletionItemKind = 22
- EventCompletion CompletionItemKind = 23
- OperatorCompletion CompletionItemKind = 24
- TypeParameterCompletion CompletionItemKind = 25
- // Completion item tags are extra annotations that tweak the rendering of a completion
- // item.
- //
- // @since 3.15.0
- // Render a completion as obsolete, usually using a strike-out.
- ComplDeprecated CompletionItemTag = 1
- // How a completion was triggered
- // Completion was triggered by typing an identifier (24x7 code
- // complete), manual invocation (e.g Ctrl+Space) or via API.
- Invoked CompletionTriggerKind = 1
- // Completion was triggered by a trigger character specified by
- // the `triggerCharacters` properties of the `CompletionRegistrationOptions`.
- TriggerCharacter CompletionTriggerKind = 2
- // Completion was re-triggered as current completion list is incomplete
- TriggerForIncompleteCompletions CompletionTriggerKind = 3
- // The diagnostic's severity.
- // Reports an error.
- SeverityError DiagnosticSeverity = 1
- // Reports a warning.
- SeverityWarning DiagnosticSeverity = 2
- // Reports an information.
- SeverityInformation DiagnosticSeverity = 3
- // Reports a hint.
- SeverityHint DiagnosticSeverity = 4
- // The diagnostic tags.
- //
- // @since 3.15.0
- // Unused or unnecessary code.
- //
- // Clients are allowed to render diagnostics with this tag faded out instead of having
- // an error squiggle.
- Unnecessary DiagnosticTag = 1
- // Deprecated or obsolete code.
- //
- // Clients are allowed to rendered diagnostics with this tag strike through.
- Deprecated DiagnosticTag = 2
- // The document diagnostic report kinds.
- //
- // @since 3.17.0
- // A diagnostic report with a full
- // set of problems.
- DiagnosticFull DocumentDiagnosticReportKind = "full"
- // A report indicating that the last
- // returned report is still accurate.
- DiagnosticUnchanged DocumentDiagnosticReportKind = "unchanged"
- // A document highlight kind.
- // A textual occurrence.
- Text DocumentHighlightKind = 1
- // Read-access of a symbol, like reading a variable.
- Read DocumentHighlightKind = 2
- // Write-access of a symbol, like writing to a variable.
- Write DocumentHighlightKind = 3
- // Predefined error codes.
- ParseError ErrorCodes = -32700
- InvalidRequest ErrorCodes = -32600
- MethodNotFound ErrorCodes = -32601
- InvalidParams ErrorCodes = -32602
- InternalError ErrorCodes = -32603
- // Error code indicating that a server received a notification or
- // request before the server has received the `initialize` request.
- ServerNotInitialized ErrorCodes = -32002
- UnknownErrorCode ErrorCodes = -32001
- // Applying the workspace change is simply aborted if one of the changes provided
- // fails. All operations executed before the failing operation stay executed.
- Abort FailureHandlingKind = "abort"
- // All operations are executed transactional. That means they either all
- // succeed or no changes at all are applied to the workspace.
- Transactional FailureHandlingKind = "transactional"
- // If the workspace edit contains only textual file changes they are executed transactional.
- // If resource changes (create, rename or delete file) are part of the change the failure
- // handling strategy is abort.
- TextOnlyTransactional FailureHandlingKind = "textOnlyTransactional"
- // The client tries to undo the operations already executed. But there is no
- // guarantee that this is succeeding.
- Undo FailureHandlingKind = "undo"
- // The file event type
- // The file got created.
- Created FileChangeType = 1
- // The file got changed.
- Changed FileChangeType = 2
- // The file got deleted.
- Deleted FileChangeType = 3
- // A pattern kind describing if a glob pattern matches a file a folder or
- // both.
- //
- // @since 3.16.0
- // The pattern matches a file only.
- FilePattern FileOperationPatternKind = "file"
- // The pattern matches a folder only.
- FolderPattern FileOperationPatternKind = "folder"
- // A set of predefined range kinds.
- // Folding range for a comment
- Comment FoldingRangeKind = "comment"
- // Folding range for an import or include
- Imports FoldingRangeKind = "imports"
- // Folding range for a region (e.g. `#region`)
- Region FoldingRangeKind = "region"
- // Inlay hint kinds.
- //
- // @since 3.17.0
- // An inlay hint that for a type annotation.
- Type InlayHintKind = 1
- // An inlay hint that is for a parameter.
- Parameter InlayHintKind = 2
- // Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.
- //
- // @since 3.18.0
- // @proposed
- // Completion was triggered explicitly by a user gesture.
- InlineInvoked InlineCompletionTriggerKind = 1
- // Completion was triggered automatically while editing.
- InlineAutomatic InlineCompletionTriggerKind = 2
- // Defines whether the insert text in a completion item should be interpreted as
- // plain text or a snippet.
- // The primary text to be inserted is treated as a plain string.
- PlainTextTextFormat InsertTextFormat = 1
- // The primary text to be inserted is treated as a snippet.
- //
- // A snippet can define tab stops and placeholders with `$1`, `$2`
- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to
- // the end of the snippet. Placeholders with equal identifiers are linked,
- // that is typing in one will update others too.
- //
- // See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax
- SnippetTextFormat InsertTextFormat = 2
- // How whitespace and indentation is handled during completion
- // item insertion.
- //
- // @since 3.16.0
- // The insertion or replace strings is taken as it is. If the
- // value is multi line the lines below the cursor will be
- // inserted using the indentation defined in the string value.
- // The client will not apply any kind of adjustments to the
- // string.
- AsIs InsertTextMode = 1
- // The editor adjusts leading whitespace of new lines so that
- // they match the indentation up to the cursor of the line for
- // which the item is accepted.
- //
- // Consider a line like this: <2tabs><3tabs>foo. Accepting a
- // multi line completion item is indented using 2 tabs and all
- // following lines inserted will be indented using 2 tabs as well.
- AdjustIndentation InsertTextMode = 2
- // A request failed but it was syntactically correct, e.g the
- // method name was known and the parameters were valid. The error
- // message should contain human readable information about why
- // the request failed.
- //
- // @since 3.17.0
- RequestFailed LSPErrorCodes = -32803
- // The server cancelled the request. This error code should
- // only be used for requests that explicitly support being
- // server cancellable.
- //
- // @since 3.17.0
- ServerCancelled LSPErrorCodes = -32802
- // The server detected that the content of a document got
- // modified outside normal conditions. A server should
- // NOT send this error code if it detects a content change
- // in it unprocessed messages. The result even computed
- // on an older state might still be useful for the client.
- //
- // If a client decides that a result is not of any use anymore
- // the client should cancel the request.
- ContentModified LSPErrorCodes = -32801
- // The client has canceled a request and a server has detected
- // the cancel.
- RequestCancelled LSPErrorCodes = -32800
- // Predefined Language kinds
- // @since 3.18.0
- // @proposed
- LangABAP LanguageKind = "abap"
- LangWindowsBat LanguageKind = "bat"
- LangBibTeX LanguageKind = "bibtex"
- LangClojure LanguageKind = "clojure"
- LangCoffeescript LanguageKind = "coffeescript"
- LangC LanguageKind = "c"
- LangCPP LanguageKind = "cpp"
- LangCSharp LanguageKind = "csharp"
- LangCSS LanguageKind = "css"
- // @since 3.18.0
- // @proposed
- LangD LanguageKind = "d"
- // @since 3.18.0
- // @proposed
- LangDelphi LanguageKind = "pascal"
- LangDiff LanguageKind = "diff"
- LangDart LanguageKind = "dart"
- LangDockerfile LanguageKind = "dockerfile"
- LangElixir LanguageKind = "elixir"
- LangErlang LanguageKind = "erlang"
- LangFSharp LanguageKind = "fsharp"
- LangGitCommit LanguageKind = "git-commit"
- LangGitRebase LanguageKind = "rebase"
- LangGo LanguageKind = "go"
- LangGroovy LanguageKind = "groovy"
- LangHandlebars LanguageKind = "handlebars"
- LangHaskell LanguageKind = "haskell"
- LangHTML LanguageKind = "html"
- LangIni LanguageKind = "ini"
- LangJava LanguageKind = "java"
- LangJavaScript LanguageKind = "javascript"
- LangJavaScriptReact LanguageKind = "javascriptreact"
- LangJSON LanguageKind = "json"
- LangLaTeX LanguageKind = "latex"
- LangLess LanguageKind = "less"
- LangLua LanguageKind = "lua"
- LangMakefile LanguageKind = "makefile"
- LangMarkdown LanguageKind = "markdown"
- LangObjectiveC LanguageKind = "objective-c"
- LangObjectiveCPP LanguageKind = "objective-cpp"
- // @since 3.18.0
- // @proposed
- LangPascal LanguageKind = "pascal"
- LangPerl LanguageKind = "perl"
- LangPerl6 LanguageKind = "perl6"
- LangPHP LanguageKind = "php"
- LangPowershell LanguageKind = "powershell"
- LangPug LanguageKind = "jade"
- LangPython LanguageKind = "python"
- LangR LanguageKind = "r"
- LangRazor LanguageKind = "razor"
- LangRuby LanguageKind = "ruby"
- LangRust LanguageKind = "rust"
- LangSCSS LanguageKind = "scss"
- LangSASS LanguageKind = "sass"
- LangScala LanguageKind = "scala"
- LangShaderLab LanguageKind = "shaderlab"
- LangShellScript LanguageKind = "shellscript"
- LangSQL LanguageKind = "sql"
- LangSwift LanguageKind = "swift"
- LangTypeScript LanguageKind = "typescript"
- LangTypeScriptReact LanguageKind = "typescriptreact"
- LangTeX LanguageKind = "tex"
- LangVisualBasic LanguageKind = "vb"
- LangXML LanguageKind = "xml"
- LangXSL LanguageKind = "xsl"
- LangYAML LanguageKind = "yaml"
- // Describes the content type that a client supports in various
- // result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
- //
- // Please note that `MarkupKinds` must not start with a `$`. This kinds
- // are reserved for internal usage.
- // Plain text is supported as a content format
- PlainText MarkupKind = "plaintext"
- // Markdown is supported as a content format
- Markdown MarkupKind = "markdown"
- // The message type
- // An error message.
- Error MessageType = 1
- // A warning message.
- Warning MessageType = 2
- // An information message.
- Info MessageType = 3
- // A log message.
- Log MessageType = 4
- // A debug message.
- //
- // @since 3.18.0
- // @proposed
- Debug MessageType = 5
- // The moniker kind.
- //
- // @since 3.16.0
- // The moniker represent a symbol that is imported into a project
- Import MonikerKind = "import"
- // The moniker represents a symbol that is exported from a project
- Export MonikerKind = "export"
- // The moniker represents a symbol that is local to a project (e.g. a local
- // variable of a function, a class not visible outside the project, ...)
- Local MonikerKind = "local"
- // A notebook cell kind.
- //
- // @since 3.17.0
- // A markup-cell is formatted source that is used for display.
- Markup NotebookCellKind = 1
- // A code-cell is source code.
- Code NotebookCellKind = 2
- // A set of predefined position encoding kinds.
- //
- // @since 3.17.0
- // Character offsets count UTF-8 code units (e.g. bytes).
- UTF8 PositionEncodingKind = "utf-8"
- // Character offsets count UTF-16 code units.
- //
- // This is the default and must always be supported
- // by servers
- UTF16 PositionEncodingKind = "utf-16"
- // Character offsets count UTF-32 code units.
- //
- // Implementation note: these are the same as Unicode codepoints,
- // so this `PositionEncodingKind` may also be used for an
- // encoding-agnostic representation of character offsets.
- UTF32 PositionEncodingKind = "utf-32"
- // The client's default behavior is to select the identifier
- // according the to language's syntax rule.
- Identifier PrepareSupportDefaultBehavior = 1
- // Supports creating new files and folders.
- Create ResourceOperationKind = "create"
- // Supports renaming existing files and folders.
- Rename ResourceOperationKind = "rename"
- // Supports deleting existing files and folders.
- Delete ResourceOperationKind = "delete"
- // A set of predefined token modifiers. This set is not fixed
- // an clients can specify additional token types via the
- // corresponding client capabilities.
- //
- // @since 3.16.0
- ModDeclaration SemanticTokenModifiers = "declaration"
- ModDefinition SemanticTokenModifiers = "definition"
- ModReadonly SemanticTokenModifiers = "readonly"
- ModStatic SemanticTokenModifiers = "static"
- ModDeprecated SemanticTokenModifiers = "deprecated"
- ModAbstract SemanticTokenModifiers = "abstract"
- ModAsync SemanticTokenModifiers = "async"
- ModModification SemanticTokenModifiers = "modification"
- ModDocumentation SemanticTokenModifiers = "documentation"
- ModDefaultLibrary SemanticTokenModifiers = "defaultLibrary"
- // A set of predefined token types. This set is not fixed
- // an clients can specify additional token types via the
- // corresponding client capabilities.
- //
- // @since 3.16.0
- NamespaceType SemanticTokenTypes = "namespace"
- // Represents a generic type. Acts as a fallback for types which can't be mapped to
- // a specific type like class or enum.
- TypeType SemanticTokenTypes = "type"
- ClassType SemanticTokenTypes = "class"
- EnumType SemanticTokenTypes = "enum"
- InterfaceType SemanticTokenTypes = "interface"
- StructType SemanticTokenTypes = "struct"
- TypeParameterType SemanticTokenTypes = "typeParameter"
- ParameterType SemanticTokenTypes = "parameter"
- VariableType SemanticTokenTypes = "variable"
- PropertyType SemanticTokenTypes = "property"
- EnumMemberType SemanticTokenTypes = "enumMember"
- EventType SemanticTokenTypes = "event"
- FunctionType SemanticTokenTypes = "function"
- MethodType SemanticTokenTypes = "method"
- MacroType SemanticTokenTypes = "macro"
- KeywordType SemanticTokenTypes = "keyword"
- ModifierType SemanticTokenTypes = "modifier"
- CommentType SemanticTokenTypes = "comment"
- StringType SemanticTokenTypes = "string"
- NumberType SemanticTokenTypes = "number"
- RegexpType SemanticTokenTypes = "regexp"
- OperatorType SemanticTokenTypes = "operator"
- // @since 3.17.0
- DecoratorType SemanticTokenTypes = "decorator"
- // @since 3.18.0
- LabelType SemanticTokenTypes = "label"
- // How a signature help was triggered.
- //
- // @since 3.15.0
- // Signature help was invoked manually by the user or by a command.
- SigInvoked SignatureHelpTriggerKind = 1
- // Signature help was triggered by a trigger character.
- SigTriggerCharacter SignatureHelpTriggerKind = 2
- // Signature help was triggered by the cursor moving or by the document content changing.
- SigContentChange SignatureHelpTriggerKind = 3
- // A symbol kind.
- File SymbolKind = 1
- Module SymbolKind = 2
- Namespace SymbolKind = 3
- Package SymbolKind = 4
- Class SymbolKind = 5
- Method SymbolKind = 6
- Property SymbolKind = 7
- Field SymbolKind = 8
- Constructor SymbolKind = 9
- Enum SymbolKind = 10
- Interface SymbolKind = 11
- Function SymbolKind = 12
- Variable SymbolKind = 13
- Constant SymbolKind = 14
- String SymbolKind = 15
- Number SymbolKind = 16
- Boolean SymbolKind = 17
- Array SymbolKind = 18
- Object SymbolKind = 19
- Key SymbolKind = 20
- Null SymbolKind = 21
- EnumMember SymbolKind = 22
- Struct SymbolKind = 23
- Event SymbolKind = 24
- Operator SymbolKind = 25
- TypeParameter SymbolKind = 26
- // Symbol tags are extra annotations that tweak the rendering of a symbol.
- //
- // @since 3.16
- // Render a symbol as obsolete, usually using a strike-out.
- DeprecatedSymbol SymbolTag = 1
- // Represents reasons why a text document is saved.
- // Manually triggered, e.g. by the user pressing save, by starting debugging,
- // or by an API call.
- Manual TextDocumentSaveReason = 1
- // Automatic after a delay.
- AfterDelay TextDocumentSaveReason = 2
- // When the editor lost focus.
- FocusOut TextDocumentSaveReason = 3
- // Defines how the host (editor) should sync
- // document changes to the language server.
- // Documents should not be synced at all.
- None TextDocumentSyncKind = 0
- // Documents are synced by always sending the full content
- // of the document.
- Full TextDocumentSyncKind = 1
- // Documents are synced by sending the full content on open.
- // After that only incremental updates to the document are
- // send.
- Incremental TextDocumentSyncKind = 2
- Relative TokenFormat = "relative"
- // Turn tracing off.
- Off TraceValue = "off"
- // Trace messages only.
- Messages TraceValue = "messages"
- // Verbose message tracing.
- Verbose TraceValue = "verbose"
- // Moniker uniqueness level to define scope of the moniker.
- //
- // @since 3.16.0
- // The moniker is only unique inside a document
- Document UniquenessLevel = "document"
- // The moniker is unique inside a project for which a dump got created
- Project UniquenessLevel = "project"
- // The moniker is unique inside the group to which a project belongs
- Group UniquenessLevel = "group"
- // The moniker is unique inside the moniker scheme.
- Scheme UniquenessLevel = "scheme"
- // The moniker is globally unique
- Global UniquenessLevel = "global"
- // Interested in create events.
- WatchCreate WatchKind = 1
- // Interested in change events
- WatchChange WatchKind = 2
- // Interested in delete events
- WatchDelete WatchKind = 4
-)
diff --git a/internal/lsp/protocol/uri.go b/internal/lsp/protocol/uri.go
deleted file mode 100644
index ccc45f23e46b3ea41ac28c525eca6c39c201695e..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/uri.go
+++ /dev/null
@@ -1,229 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package protocol
-
-// This file declares URI, DocumentUri, and its methods.
-//
-// For the LSP definition of these types, see
-// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#uri
-
-import (
- "fmt"
- "net/url"
- "path/filepath"
- "strings"
- "unicode"
-)
-
-// A DocumentURI is the URI of a client editor document.
-//
-// According to the LSP specification:
-//
-// Care should be taken to handle encoding in URIs. For
-// example, some clients (such as VS Code) may encode colons
-// in drive letters while others do not. The URIs below are
-// both valid, but clients and servers should be consistent
-// with the form they use themselves to ensure the other party
-// doesn’t interpret them as distinct URIs. Clients and
-// servers should not assume that each other are encoding the
-// same way (for example a client encoding colons in drive
-// letters cannot assume server responses will have encoded
-// colons). The same applies to casing of drive letters - one
-// party should not assume the other party will return paths
-// with drive letters cased the same as it.
-//
-// file:///c:/project/readme.md
-// file:///C%3A/project/readme.md
-//
-// This is done during JSON unmarshalling;
-// see [DocumentURI.UnmarshalText] for details.
-type DocumentURI string
-
-// A URI is an arbitrary URL (e.g. https), not necessarily a file.
-type URI = string
-
-// UnmarshalText implements decoding of DocumentUri values.
-//
-// In particular, it implements a systematic correction of various odd
-// features of the definition of DocumentUri in the LSP spec that
-// appear to be workarounds for bugs in VS Code. For example, it may
-// URI-encode the URI itself, so that colon becomes %3A, and it may
-// send file://foo.go URIs that have two slashes (not three) and no
-// hostname.
-//
-// We use UnmarshalText, not UnmarshalJSON, because it is called even
-// for non-addressable values such as keys and values of map[K]V,
-// where there is no pointer of type *K or *V on which to call
-// UnmarshalJSON. (See Go issue #28189 for more detail.)
-//
-// Non-empty DocumentUris are valid "file"-scheme URIs.
-// The empty DocumentUri is valid.
-func (uri *DocumentURI) UnmarshalText(data []byte) (err error) {
- *uri, err = ParseDocumentURI(string(data))
- return
-}
-
-// Path returns the file path for the given URI.
-//
-// DocumentUri("").Path() returns the empty string.
-//
-// Path panics if called on a URI that is not a valid filename.
-func (uri DocumentURI) Path() (string, error) {
- filename, err := filename(uri)
- if err != nil {
- // e.g. ParseRequestURI failed.
- //
- // This can only affect DocumentUris created by
- // direct string manipulation; all DocumentUris
- // received from the client pass through
- // ParseRequestURI, which ensures validity.
- return "", fmt.Errorf("invalid URI %q: %w", uri, err)
- }
- return filepath.FromSlash(filename), nil
-}
-
-// Dir returns the URI for the directory containing the receiver.
-func (uri DocumentURI) Dir() (DocumentURI, error) {
- // XXX: Legacy comment:
- // This function could be more efficiently implemented by avoiding any call
- // to Path(), but at least consolidates URI manipulation.
-
- path, err := uri.DirPath()
- if err != nil {
- return "", fmt.Errorf("invalid URI %q: %w", uri, err)
- }
-
- return URIFromPath(path), nil
-}
-
-// DirPath returns the file path to the directory containing this URI, which
-// must be a file URI.
-func (uri DocumentURI) DirPath() (string, error) {
- path, err := uri.Path()
- if err != nil {
- return "", err
- }
- return filepath.Dir(path), nil
-}
-
-func filename(uri DocumentURI) (string, error) {
- if uri == "" {
- return "", nil
- }
-
- // This conservative check for the common case
- // of a simple non-empty absolute POSIX filename
- // avoids the allocation of a net.URL.
- if strings.HasPrefix(string(uri), "file:///") {
- rest := string(uri)[len("file://"):] // leave one slash
- for i := range len(rest) {
- b := rest[i]
- // Reject these cases:
- if b < ' ' || b == 0x7f || // control character
- b == '%' || b == '+' || // URI escape
- b == ':' || // Windows drive letter
- b == '@' || b == '&' || b == '?' { // authority or query
- goto slow
- }
- }
- return rest, nil
- }
-slow:
-
- u, err := url.ParseRequestURI(string(uri))
- if err != nil {
- return "", err
- }
- if u.Scheme != fileScheme {
- return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri)
- }
- // If the URI is a Windows URI, we trim the leading "/" and uppercase
- // the drive letter, which will never be case sensitive.
- if isWindowsDriveURIPath(u.Path) {
- u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:]
- }
-
- return u.Path, nil
-}
-
-// ParseDocumentURI interprets a string as a DocumentUri, applying VS
-// Code workarounds; see [DocumentURI.UnmarshalText] for details.
-func ParseDocumentURI(s string) (DocumentURI, error) {
- if s == "" {
- return "", nil
- }
-
- if !strings.HasPrefix(s, "file://") {
- return "", fmt.Errorf("DocumentUri scheme is not 'file': %s", s)
- }
-
- // VS Code sends URLs with only two slashes,
- // which are invalid. golang/go#39789.
- if !strings.HasPrefix(s, "file:///") {
- s = "file:///" + s[len("file://"):]
- }
-
- // Even though the input is a URI, it may not be in canonical form. VS Code
- // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize.
- path, err := url.PathUnescape(s[len("file://"):])
- if err != nil {
- return "", err
- }
-
- // File URIs from Windows may have lowercase drive letters.
- // Since drive letters are guaranteed to be case insensitive,
- // we change them to uppercase to remain consistent.
- // For example, file:///c:/x/y/z becomes file:///C:/x/y/z.
- if isWindowsDriveURIPath(path) {
- path = path[:1] + strings.ToUpper(string(path[1])) + path[2:]
- }
- u := url.URL{Scheme: fileScheme, Path: path}
- return DocumentURI(u.String()), nil
-}
-
-// URIFromPath returns DocumentUri for the supplied file path.
-// Given "", it returns "".
-func URIFromPath(path string) DocumentURI {
- if path == "" {
- return ""
- }
- if !isWindowsDrivePath(path) {
- if abs, err := filepath.Abs(path); err == nil {
- path = abs
- }
- }
- // Check the file path again, in case it became absolute.
- if isWindowsDrivePath(path) {
- path = "/" + strings.ToUpper(string(path[0])) + path[1:]
- }
- path = filepath.ToSlash(path)
- u := url.URL{
- Scheme: fileScheme,
- Path: path,
- }
- return DocumentURI(u.String())
-}
-
-const fileScheme = "file"
-
-// isWindowsDrivePath returns true if the file path is of the form used by
-// Windows. We check if the path begins with a drive letter, followed by a ":".
-// For example: C:/x/y/z.
-func isWindowsDrivePath(path string) bool {
- if len(path) < 3 {
- return false
- }
- return unicode.IsLetter(rune(path[0])) && path[1] == ':'
-}
-
-// isWindowsDriveURIPath returns true if the file URI is of the format used by
-// Windows URIs. The url.Parse package does not specially handle Windows paths
-// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:").
-func isWindowsDriveURIPath(uri string) bool {
- if len(uri) < 4 {
- return false
- }
- return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
-}
diff --git a/internal/lsp/rootmarkers_test.go b/internal/lsp/rootmarkers_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..7b3a3c0905799865808b9b1ae0dff992e00ed34c
--- /dev/null
+++ b/internal/lsp/rootmarkers_test.go
@@ -0,0 +1,37 @@
+package lsp
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestHasRootMarkers(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory for testing
+ tmpDir := t.TempDir()
+
+ // Test with empty root markers (should return true)
+ require.True(t, HasRootMarkers(tmpDir, []string{}))
+
+ // Test with non-existent markers
+ require.False(t, HasRootMarkers(tmpDir, []string{"go.mod", "package.json"}))
+
+ // Create a go.mod file
+ goModPath := filepath.Join(tmpDir, "go.mod")
+ err := os.WriteFile(goModPath, []byte("module test"), 0o644)
+ require.NoError(t, err)
+
+ // Test with existing marker
+ require.True(t, HasRootMarkers(tmpDir, []string{"go.mod", "package.json"}))
+
+ // Test with only non-existent markers
+ require.False(t, HasRootMarkers(tmpDir, []string{"package.json", "Cargo.toml"}))
+
+ // Test with glob patterns
+ require.True(t, HasRootMarkers(tmpDir, []string{"*.mod"}))
+ require.False(t, HasRootMarkers(tmpDir, []string{"*.json"}))
+}
diff --git a/internal/lsp/transport.go b/internal/lsp/transport.go
deleted file mode 100644
index 483281d25c51a6bfb71ca3314419b570f9a6bf0d..0000000000000000000000000000000000000000
--- a/internal/lsp/transport.go
+++ /dev/null
@@ -1,284 +0,0 @@
-package lsp
-
-import (
- "bufio"
- "context"
- "encoding/json"
- "fmt"
- "io"
- "log/slog"
- "strings"
-
- "github.com/charmbracelet/crush/internal/config"
-)
-
-// WriteMessage writes an LSP message to the given writer
-func WriteMessage(w io.Writer, msg *Message) error {
- data, err := json.Marshal(msg)
- if err != nil {
- return fmt.Errorf("failed to marshal message: %w", err)
- }
- cfg := config.Get()
-
- if cfg.Options.DebugLSP {
- slog.Debug("Sending message to server", "method", msg.Method, "id", msg.ID)
- }
-
- _, err = fmt.Fprintf(w, "Content-Length: %d\r\n\r\n", len(data))
- if err != nil {
- return fmt.Errorf("failed to write header: %w", err)
- }
-
- _, err = w.Write(data)
- if err != nil {
- return fmt.Errorf("failed to write message: %w", err)
- }
-
- return nil
-}
-
-// ReadMessage reads a single LSP message from the given reader
-func ReadMessage(r *bufio.Reader) (*Message, error) {
- cfg := config.Get()
- // Read headers
- var contentLength int
- for {
- line, err := r.ReadString('\n')
- if err != nil {
- return nil, fmt.Errorf("failed to read header: %w", err)
- }
- line = strings.TrimSpace(line)
-
- if cfg.Options.DebugLSP {
- slog.Debug("Received header", "line", line)
- }
-
- if line == "" {
- break // End of headers
- }
-
- if strings.HasPrefix(line, "Content-Length: ") {
- _, err := fmt.Sscanf(line, "Content-Length: %d", &contentLength)
- if err != nil {
- return nil, fmt.Errorf("invalid Content-Length: %w", err)
- }
- }
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Content-Length", "length", contentLength)
- }
-
- // Read content
- content := make([]byte, contentLength)
- _, err := io.ReadFull(r, content)
- if err != nil {
- return nil, fmt.Errorf("failed to read content: %w", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Received content", "content", string(content))
- }
-
- // Parse message
- var msg Message
- if err := json.Unmarshal(content, &msg); err != nil {
- return nil, fmt.Errorf("failed to unmarshal message: %w", err)
- }
-
- return &msg, nil
-}
-
-// handleMessages reads and dispatches messages in a loop
-func (c *Client) handleMessages() {
- cfg := config.Get()
- for {
- msg, err := ReadMessage(c.stdout)
- if err != nil {
- if cfg.Options.DebugLSP {
- slog.Error("Error reading message", "error", err)
- }
- return
- }
-
- // Handle server->client request (has both Method and ID)
- if msg.Method != "" && msg.ID != 0 {
- if cfg.Options.DebugLSP {
- slog.Debug("Received request from server", "method", msg.Method, "id", msg.ID)
- }
-
- response := &Message{
- JSONRPC: "2.0",
- ID: msg.ID,
- }
-
- // Look up handler for this method
- c.serverHandlersMu.RLock()
- handler, ok := c.serverRequestHandlers[msg.Method]
- c.serverHandlersMu.RUnlock()
-
- if ok {
- result, err := handler(msg.Params)
- if err != nil {
- response.Error = &ResponseError{
- Code: -32603,
- Message: err.Error(),
- }
- } else {
- rawJSON, err := json.Marshal(result)
- if err != nil {
- response.Error = &ResponseError{
- Code: -32603,
- Message: fmt.Sprintf("failed to marshal response: %v", err),
- }
- } else {
- response.Result = rawJSON
- }
- }
- } else {
- response.Error = &ResponseError{
- Code: -32601,
- Message: fmt.Sprintf("method not found: %s", msg.Method),
- }
- }
-
- // Send response back to server
- if err := WriteMessage(c.stdin, response); err != nil {
- slog.Error("Error sending response to server", "error", err)
- }
-
- continue
- }
-
- // Handle notification (has Method but no ID)
- if msg.Method != "" && msg.ID == 0 {
- c.notificationMu.RLock()
- handler, ok := c.notificationHandlers[msg.Method]
- c.notificationMu.RUnlock()
-
- if ok {
- if cfg.Options.DebugLSP {
- slog.Debug("Handling notification", "method", msg.Method)
- }
- go handler(msg.Params)
- } else if cfg.Options.DebugLSP {
- slog.Debug("No handler for notification", "method", msg.Method)
- }
- continue
- }
-
- // Handle response to our request (has ID but no Method)
- if msg.ID != 0 && msg.Method == "" {
- c.handlersMu.RLock()
- ch, ok := c.handlers[msg.ID]
- c.handlersMu.RUnlock()
-
- if ok {
- if cfg.Options.DebugLSP {
- slog.Debug("Received response for request", "id", msg.ID)
- }
- ch <- msg
- close(ch)
- } else if cfg.Options.DebugLSP {
- slog.Debug("No handler for response", "id", msg.ID)
- }
- }
- }
-}
-
-// Call makes a request and waits for the response
-func (c *Client) Call(ctx context.Context, method string, params any, result any) error {
- if !c.IsMethodSupported(method) {
- return fmt.Errorf("method not supported by server: %s", method)
- }
- id := c.nextID.Add(1)
-
- cfg := config.Get()
- if cfg.Options.DebugLSP {
- slog.Debug("Making call", "method", method, "id", id)
- }
-
- msg, err := NewRequest(id, method, params)
- if err != nil {
- return fmt.Errorf("failed to create request: %w", err)
- }
-
- // Create response channel
- ch := make(chan *Message, 1)
- c.handlersMu.Lock()
- c.handlers[id] = ch
- c.handlersMu.Unlock()
-
- defer func() {
- c.handlersMu.Lock()
- delete(c.handlers, id)
- c.handlersMu.Unlock()
- }()
-
- // Send request
- if err := WriteMessage(c.stdin, msg); err != nil {
- return fmt.Errorf("failed to send request: %w", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Request sent", "method", method, "id", id)
- }
-
- // Wait for response
- select {
- case <-ctx.Done():
- return ctx.Err()
- case resp := <-ch:
- if cfg.Options.DebugLSP {
- slog.Debug("Received response", "id", id)
- }
-
- if resp.Error != nil {
- return fmt.Errorf("request failed: %s (code: %d)", resp.Error.Message, resp.Error.Code)
- }
-
- if result != nil {
- // If result is a json.RawMessage, just copy the raw bytes
- if rawMsg, ok := result.(*json.RawMessage); ok {
- *rawMsg = resp.Result
- return nil
- }
- // Otherwise unmarshal into the provided type
- if err := json.Unmarshal(resp.Result, result); err != nil {
- return fmt.Errorf("failed to unmarshal result: %w", err)
- }
- }
-
- return nil
- }
-}
-
-// Notify sends a notification (a request without an ID that doesn't expect a response)
-func (c *Client) Notify(ctx context.Context, method string, params any) error {
- cfg := config.Get()
- if !c.IsMethodSupported(method) {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping notification: method not supported by server", "method", method)
- }
- return nil
- }
- if cfg.Options.DebugLSP {
- slog.Debug("Sending notification", "method", method)
- }
-
- msg, err := NewNotification(method, params)
- if err != nil {
- return fmt.Errorf("failed to create notification: %w", err)
- }
-
- if err := WriteMessage(c.stdin, msg); err != nil {
- return fmt.Errorf("failed to send notification: %w", err)
- }
-
- return nil
-}
-
-type (
- NotificationHandler func(params json.RawMessage)
- ServerRequestHandler func(params json.RawMessage) (any, error)
-)
diff --git a/internal/lsp/util/edit.go b/internal/lsp/util/edit.go
index 12d8e428a7214338bd7ef66c6d71dd512484b243..8b500ac67489e5fbcd0981a012dcf7a0c871f67e 100644
--- a/internal/lsp/util/edit.go
+++ b/internal/lsp/util/edit.go
@@ -7,7 +7,7 @@ import (
"sort"
"strings"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
func applyTextEdits(uri protocol.DocumentURI, edits []protocol.TextEdit) error {
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
index c9aa4b3a26e42fe9a9e2c86834147828534c70fc..dcd1ba5d2e7dc8f1329d737896c7a28034268bda 100644
--- a/internal/lsp/watcher/global_watcher.go
+++ b/internal/lsp/watcher/global_watcher.go
@@ -15,7 +15,7 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/fsext"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
"github.com/raphamorim/notify"
)
diff --git a/internal/lsp/watcher/watcher.go b/internal/lsp/watcher/watcher.go
index 18b790349a10f0827f45f8ccb9fb6968980a9d4e..b3c90788db87411d56616501f18f04371deca04a 100644
--- a/internal/lsp/watcher/watcher.go
+++ b/internal/lsp/watcher/watcher.go
@@ -12,9 +12,8 @@ import (
"github.com/bmatcuk/doublestar/v4"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
-
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
// Client manages LSP file watching for a specific client
diff --git a/internal/tui/components/chat/header/header.go b/internal/tui/components/chat/header/header.go
index edcdc6960123056fc61df7a4332b106d1f417ab0..5e5a68b5290187cea95b7cf8c0aada6cb46b4415 100644
--- a/internal/tui/components/chat/header/header.go
+++ b/internal/tui/components/chat/header/header.go
@@ -8,13 +8,13 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
"github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/x/ansi"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
type Header interface {
diff --git a/internal/tui/components/lsp/lsp.go b/internal/tui/components/lsp/lsp.go
index f2546c945e436ca196064dda5b50d35583d5b2ab..53daeb0a65c43a1e4ae80ff6567c7daa32a800b8 100644
--- a/internal/tui/components/lsp/lsp.go
+++ b/internal/tui/components/lsp/lsp.go
@@ -4,14 +4,13 @@ import (
"fmt"
"strings"
- "github.com/charmbracelet/lipgloss/v2"
-
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/styles"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
// RenderOptions contains options for rendering LSP lists.
@@ -77,6 +76,9 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
} else {
description = t.S().Subtle.Render("error")
}
+ case lsp.StateDisabled:
+ icon = t.ItemOfflineIcon.Foreground(t.FgMuted)
+ description = t.S().Base.Foreground(t.FgMuted).Render("no root markers found")
}
}
diff --git a/schema.json b/schema.json
index 9dee9055050c8e29fb689e9700b33aa8e9842cd2..adb6cc82ca375a45cb8f867da7fa75090d760d5e 100644
--- a/schema.json
+++ b/schema.json
@@ -44,7 +44,7 @@
},
"LSPConfig": {
"properties": {
- "enabled": {
+ "disabled": {
"type": "boolean",
"description": "Whether this LSP server is disabled",
"default": false
@@ -70,9 +70,6 @@
"type": "object",
"description": "Environment variables to set to the LSP server command"
},
- "options": {
- "description": "LSP server-specific configuration options"
- },
"filetypes": {
"items": {
"type": "string",
@@ -87,6 +84,26 @@
},
"type": "array",
"description": "File types this LSP server handles"
+ },
+ "root_markers": {
+ "items": {
+ "type": "string",
+ "examples": [
+ "go.mod",
+ "package.json",
+ "Cargo.toml"
+ ]
+ },
+ "type": "array",
+ "description": "Files or directories that indicate the project root"
+ },
+ "init_options": {
+ "type": "object",
+ "description": "Initialization options passed to the LSP server during initialize request"
+ },
+ "options": {
+ "type": "object",
+ "description": "LSP server-specific settings passed during initialization"
}
},
"additionalProperties": false,
From 6c380d4414ee83b49d04aa299ef0616b45408f26 Mon Sep 17 00:00:00 2001
From: Amolith
Date: Wed, 17 Sep 2025 11:29:27 -0600
Subject: [PATCH 098/236] feat: add attribution settings to config and bash
tool (#1025)
* feat: add attribution settings to config and bash tool
* fix(readme): move ephemeral data block back to cfg section
Closes: #445
---
README.md | 33 +++++++++++++++++
internal/config/config.go | 22 +++++++-----
internal/llm/agent/agent.go | 2 +-
internal/llm/tools/bash.go | 72 ++++++++++++++++++++++++++++---------
schema.json | 20 +++++++++++
5 files changed, 123 insertions(+), 26 deletions(-)
diff --git a/README.md b/README.md
index d2a908f83a8ec6f56f3c6223127765049b09754a..7d618ddd178e97cfca70c0a0fd736d8f8fc959a5 100644
--- a/README.md
+++ b/README.md
@@ -173,6 +173,39 @@ $HOME/.local/share/crush/crush.json
%LOCALAPPDATA%\crush\crush.json
```
+### Attribution Settings
+
+By default, Crush adds attribution information to git commits and pull requests it creates. You can customize this behavior with the `attribution` option:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "attribution": {
+ "co_authored_by": true,
+ "generated_with": true
+ }
+ }
+}
+```
+
+- `co_authored_by`: When true (default), adds `Co-Authored-By: Crush ` to commit messages
+- `generated_with`: When true (default), adds `💘 Generated with Crush` line to commit messages and PR descriptions
+
+To disable all attribution, set both options to false:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "attribution": {
+ "co_authored_by": false,
+ "generated_with": false
+ }
+ }
+}
+```
+
### LSPs
Crush can use LSPs for additional context to help inform its decisions, just
diff --git a/internal/config/config.go b/internal/config/config.go
index 05f6f8a10209ca4c5ddb084c04eb873c043f3c2c..02074dc212330e71848b90a01201c29a6525744d 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -138,15 +138,21 @@ type Permissions struct {
SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
}
+type Attribution struct {
+ CoAuthoredBy bool `json:"co_authored_by,omitempty" jsonschema:"description=Add Co-Authored-By trailer to commit messages,default=true"`
+ GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
+}
+
type Options struct {
- ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
- TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
- Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
- DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
- DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
- DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
- DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
- DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
+ ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
+ TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
+ Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
+ DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
+ DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
+ DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
+ DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
+ DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
+ Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
}
type MCPs map[string]MCPConfig
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 85439c3c0e8cc99ee7c07cfeb669e9402b3acce7..7c09a0be621485962df43e82484b0add4ea63513 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -184,7 +184,7 @@ func NewAgent(
cwd := cfg.WorkingDir()
allTools := []tools.BaseTool{
- tools.NewBashTool(permissions, cwd),
+ tools.NewBashTool(permissions, cwd, cfg.Options.Attribution),
tools.NewDownloadTool(permissions, cwd),
tools.NewEditTool(lspClients, permissions, history, cwd),
tools.NewMultiEditTool(lspClients, permissions, history, cwd),
diff --git a/internal/llm/tools/bash.go b/internal/llm/tools/bash.go
index 6b55820632029e84f9381faa5ca2bd25734abeee..79205b9b142a10ff101da9a657e4b819dc88da4d 100644
--- a/internal/llm/tools/bash.go
+++ b/internal/llm/tools/bash.go
@@ -7,6 +7,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/shell"
)
@@ -30,6 +31,7 @@ type BashResponseMetadata struct {
type bashTool struct {
permissions permission.Service
workingDir string
+ attribution *config.Attribution
}
const (
@@ -114,8 +116,53 @@ var bannedCommands = []string{
"ufw",
}
-func bashDescription() string {
+func (b *bashTool) bashDescription() string {
bannedCommandsStr := strings.Join(bannedCommands, ", ")
+
+ // Build attribution text based on settings
+ var attributionStep, attributionExample, prAttribution string
+
+ // Default to true if attribution is nil (backward compatibility)
+ generatedWith := b.attribution == nil || b.attribution.GeneratedWith
+ coAuthoredBy := b.attribution == nil || b.attribution.CoAuthoredBy
+
+ // Build PR attribution
+ if generatedWith {
+ prAttribution = "💘 Generated with Crush"
+ }
+
+ if generatedWith || coAuthoredBy {
+ attributionParts := []string{}
+ if generatedWith {
+ attributionParts = append(attributionParts, "💘 Generated with Crush")
+ }
+ if coAuthoredBy {
+ attributionParts = append(attributionParts, "Co-Authored-By: Crush ")
+ }
+
+ if len(attributionParts) > 0 {
+ attributionStep = fmt.Sprintf("4. Create the commit with a message ending with:\n%s", strings.Join(attributionParts, "\n"))
+
+ attributionText := strings.Join(attributionParts, "\n ")
+ attributionExample = fmt.Sprintf(`
+git commit -m "$(cat <<'EOF'
+ Commit message here.
+
+ %s
+ EOF
+)"`, attributionText)
+ }
+ }
+
+ if attributionStep == "" {
+ attributionStep = "4. Create the commit with your commit message."
+ attributionExample = `
+git commit -m "$(cat <<'EOF'
+ Commit message here.
+ EOF
+)"`
+ }
+
return fmt.Sprintf(`Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
CROSS-PLATFORM SHELL SUPPORT:
@@ -190,20 +237,10 @@ When the user asks you to create a new git commit, follow these steps carefully:
- Review the draft message to ensure it accurately reflects the changes and their purpose
-4. Create the commit with a message ending with:
-💘 Generated with Crush
-Co-Authored-By: Crush
+%s
- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
-
-git commit -m "$(cat <<'EOF'
- Commit message here.
-
- 💘 Generated with Crush
- Co-Authored-By: 💘 Crush
- EOF
- )"
-
+%s
5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
@@ -262,14 +299,14 @@ gh pr create --title "the pr title" --body "$(cat <<'EOF'
## Test plan
[Checklist of TODOs for testing the pull request...]
-💘 Generated with Crush
+%s
EOF
)"
Important:
- Return an empty response - the user will see the gh output directly
-- Never update git config`, bannedCommandsStr, MaxOutputLength)
+- Never update git config`, bannedCommandsStr, MaxOutputLength, attributionStep, attributionExample, prAttribution)
}
func blockFuncs() []shell.BlockFunc {
@@ -304,7 +341,7 @@ func blockFuncs() []shell.BlockFunc {
}
}
-func NewBashTool(permission permission.Service, workingDir string) BaseTool {
+func NewBashTool(permission permission.Service, workingDir string, attribution *config.Attribution) BaseTool {
// Set up command blocking on the persistent shell
persistentShell := shell.GetPersistentShell(workingDir)
persistentShell.SetBlockFuncs(blockFuncs())
@@ -312,6 +349,7 @@ func NewBashTool(permission permission.Service, workingDir string) BaseTool {
return &bashTool{
permissions: permission,
workingDir: workingDir,
+ attribution: attribution,
}
}
@@ -322,7 +360,7 @@ func (b *bashTool) Name() string {
func (b *bashTool) Info() ToolInfo {
return ToolInfo{
Name: BashToolName,
- Description: bashDescription(),
+ Description: b.bashDescription(),
Parameters: map[string]any{
"command": map[string]any{
"type": "string",
diff --git a/schema.json b/schema.json
index adb6cc82ca375a45cb8f867da7fa75090d760d5e..f0cb2053e188d918e4c49168080026de5f0bffe5 100644
--- a/schema.json
+++ b/schema.json
@@ -3,6 +3,22 @@
"$id": "https://github.com/charmbracelet/crush/internal/config/config",
"$ref": "#/$defs/Config",
"$defs": {
+ "Attribution": {
+ "properties": {
+ "co_authored_by": {
+ "type": "boolean",
+ "description": "Add Co-Authored-By trailer to commit messages",
+ "default": true
+ },
+ "generated_with": {
+ "type": "boolean",
+ "description": "Add Generated with Crush line to commit messages and issues and PRs",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
"Config": {
"properties": {
"$schema": {
@@ -300,6 +316,10 @@
"type": "boolean",
"description": "Disable providers auto-update",
"default": false
+ },
+ "attribution": {
+ "$ref": "#/$defs/Attribution",
+ "description": "Attribution settings for generated content"
}
},
"additionalProperties": false,
From 2f73e6c017a6c551f4a9dd7979d89bccfd412152 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 17 Sep 2025 14:40:41 -0300
Subject: [PATCH 099/236] docs(readme): mention contributing guide (#1067)
Closes #1064
---
README.md | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/README.md b/README.md
index 7d618ddd178e97cfca70c0a0fd736d8f8fc959a5..85793845005f8e2029db4bcd9b616be2a6370a26 100644
--- a/README.md
+++ b/README.md
@@ -556,6 +556,10 @@ We’re committed to building sustainable, trusted integrations with model
providers. If you’re a provider interested in working with us,
[reach out](mailto:vt100@charm.sh).
+## Contributing
+
+See the [contributing guide](https://github.com/charmbracelet/crush?tab=contributing-ov-file).
+
## Whatcha think?
We’d love to hear your thoughts on this project. Need help? We gotchu. You can find us on:
From 13735c70e128f5d2c3c1fcb87d093f49b876561d Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 17 Sep 2025 14:41:51 -0300
Subject: [PATCH 100/236] docs: add missing anchor
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 85793845005f8e2029db4bcd9b616be2a6370a26..e47d106e9dbc447a16e0f548c1dd7c3103f6b517 100644
--- a/README.md
+++ b/README.md
@@ -558,7 +558,7 @@ providers. If you’re a provider interested in working with us,
## Contributing
-See the [contributing guide](https://github.com/charmbracelet/crush?tab=contributing-ov-file).
+See the [contributing guide](https://github.com/charmbracelet/crush?tab=contributing-ov-file#contributing).
## Whatcha think?
From 34457697efabe3fc88bee0ed84d16714e8d1af77 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 17 Sep 2025 13:33:12 -0400
Subject: [PATCH 101/236] chore: various attention to detail edits via
@andreynering
---
README.md | 2 +-
internal/llm/tools/bash.go | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index e47d106e9dbc447a16e0f548c1dd7c3103f6b517..f2aec2a0a28f3e5ec0fd259ac52eb172fe79b1e7 100644
--- a/README.md
+++ b/README.md
@@ -175,7 +175,7 @@ $HOME/.local/share/crush/crush.json
### Attribution Settings
-By default, Crush adds attribution information to git commits and pull requests it creates. You can customize this behavior with the `attribution` option:
+By default, Crush adds attribution information to Git commits and pull requests it creates. You can customize this behavior with the `attribution` option:
```json
{
diff --git a/internal/llm/tools/bash.go b/internal/llm/tools/bash.go
index 79205b9b142a10ff101da9a657e4b819dc88da4d..f0a8bdd69c1748ffc8a402933d61ab67615d7054 100644
--- a/internal/llm/tools/bash.go
+++ b/internal/llm/tools/bash.go
@@ -132,7 +132,7 @@ func (b *bashTool) bashDescription() string {
}
if generatedWith || coAuthoredBy {
- attributionParts := []string{}
+ var attributionParts []string
if generatedWith {
attributionParts = append(attributionParts, "💘 Generated with Crush")
}
From 348fa94f87e3d18626de338c600ab722a820f88a Mon Sep 17 00:00:00 2001
From: Raphael Amorim
Date: Wed, 17 Sep 2025 19:51:12 +0200
Subject: [PATCH 102/236] Update README.md
Co-authored-by: Andrey Nering
---
README.md | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index f2aec2a0a28f3e5ec0fd259ac52eb172fe79b1e7..d8acdced00f105f8feefcc1ebb21b8514be5c948 100644
--- a/README.md
+++ b/README.md
@@ -175,7 +175,8 @@ $HOME/.local/share/crush/crush.json
### Attribution Settings
-By default, Crush adds attribution information to Git commits and pull requests it creates. You can customize this behavior with the `attribution` option:
+By default, Crush adds attribution information to Git commits and pull requests
+it creates. You can customize this behavior with the `attribution` option:
```json
{
From 38468a34edb9edad343797af71de00179b60e262 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 17 Sep 2025 15:38:08 -0400
Subject: [PATCH 103/236] docs(readme): tidy attribution section
---
README.md | 54 ++++++++++++++++++++----------------------------------
1 file changed, 20 insertions(+), 34 deletions(-)
diff --git a/README.md b/README.md
index d8acdced00f105f8feefcc1ebb21b8514be5c948..299ce72010540c9b6bd7b9730e05c161bc6e523b 100644
--- a/README.md
+++ b/README.md
@@ -173,40 +173,6 @@ $HOME/.local/share/crush/crush.json
%LOCALAPPDATA%\crush\crush.json
```
-### Attribution Settings
-
-By default, Crush adds attribution information to Git commits and pull requests
-it creates. You can customize this behavior with the `attribution` option:
-
-```json
-{
- "$schema": "https://charm.land/crush.json",
- "options": {
- "attribution": {
- "co_authored_by": true,
- "generated_with": true
- }
- }
-}
-```
-
-- `co_authored_by`: When true (default), adds `Co-Authored-By: Crush ` to commit messages
-- `generated_with`: When true (default), adds `💘 Generated with Crush` line to commit messages and PR descriptions
-
-To disable all attribution, set both options to false:
-
-```json
-{
- "$schema": "https://charm.land/crush.json",
- "options": {
- "attribution": {
- "co_authored_by": false,
- "generated_with": false
- }
- }
-}
-```
-
### LSPs
Crush can use LSPs for additional context to help inform its decisions, just
@@ -304,6 +270,26 @@ permissions. Use this with care.
You can also skip all permission prompts entirely by running Crush with the
`--yolo` flag. Be very, very careful with this feature.
+### Attribution Settings
+
+By default, Crush adds attribution information to Git commits and pull requests
+it creates. You can customize this behavior with the `attribution` option:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "attribution": {
+ "co_authored_by": true,
+ "generated_with": true
+ }
+ }
+}
+```
+
+- `co_authored_by`: When true (default), adds `Co-Authored-By: Crush ` to commit messages
+- `generated_with`: When true (default), adds `💘 Generated with Crush` line to commit messages and PR descriptions
+
### Local Models
Local models can also be configured via OpenAI-compatible API. Here are two common examples:
From 016b3b6dddf8dbafc3fd186086533edc2f9a42c5 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 18 Sep 2025 10:58:49 -0300
Subject: [PATCH 105/236] feat(lsp): remove internal watcher (#1062)
* feat(lsp): remove internal watcher
It was only ever useful if the user edits files through their editor,
but we don't really need it assuming we only care about edits done by
Crush itself.
Basically, we lose that functionality, but save a bunch of file
descriptors and have improved perf.
Damn good deal if you ask me.
Signed-off-by: Carlos Alexandro Becker
* fix: cleanup
Signed-off-by: Carlos Alexandro Becker
* fix: remove rlimit
Signed-off-by: Carlos Alexandro Becker
* fix: more cleanup
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 1 -
go.sum | 3 -
internal/app/app.go | 38 --
internal/app/lsp.go | 58 ---
internal/csync/maps.go | 8 +-
internal/csync/versionedmap.go | 35 ++
internal/csync/versionedmap_test.go | 89 ++++
internal/llm/tools/diagnostics.go | 83 +--
internal/llm/tools/edit.go | 3 +-
internal/llm/tools/multiedit.go | 4 +-
internal/llm/tools/view.go | 2 +-
internal/llm/tools/write.go | 3 +-
internal/lsp/client.go | 26 +-
internal/lsp/watcher/global_watcher.go | 394 --------------
internal/lsp/watcher/global_watcher_test.go | 302 -----------
internal/lsp/watcher/rlimit_stub.go | 12 -
internal/lsp/watcher/rlimit_unix.go | 57 --
internal/lsp/watcher/watcher.go | 548 --------------------
18 files changed, 169 insertions(+), 1497 deletions(-)
create mode 100644 internal/csync/versionedmap.go
create mode 100644 internal/csync/versionedmap_test.go
delete mode 100644 internal/lsp/watcher/global_watcher.go
delete mode 100644 internal/lsp/watcher/global_watcher_test.go
delete mode 100644 internal/lsp/watcher/rlimit_stub.go
delete mode 100644 internal/lsp/watcher/rlimit_unix.go
delete mode 100644 internal/lsp/watcher/watcher.go
diff --git a/go.mod b/go.mod
index b4d0015ef96fde5aa0105bac9c7a2dcbfe2d8d8b..7f888b04a86aa7bb0ae4631fdef410b88633fe90 100644
--- a/go.mod
+++ b/go.mod
@@ -114,7 +114,6 @@ require (
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/raphamorim/notify v0.9.4
github.com/rivo/uniseg v0.4.7
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
diff --git a/go.sum b/go.sum
index dd9347e6e058b89170a4925f65507e01b69ab89c..a937b62c5f9ee12e2ef41a12cb8be352f112442d 100644
--- a/go.sum
+++ b/go.sum
@@ -237,8 +237,6 @@ github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjG
github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c h1:kmzxiX+OB0knCo1V0dkEkdPelzCdAzCURCfmFArn2/A=
github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
-github.com/raphamorim/notify v0.9.4 h1:JXAGOzeR/cnclKkRCZINKS4EtB47O5TD1N1iCkkarTM=
-github.com/raphamorim/notify v0.9.4/go.mod h1:3FXSIPyrunV10GCnLGPrpSxoY/Dxi+saeQb9hf+TDSo=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
@@ -370,7 +368,6 @@ golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
-golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
diff --git a/internal/app/app.go b/internal/app/app.go
index f30df8b8adb4ef52c5ef93a5934b070454b29981..b93ba2cc9cbb16569c7c4739192dad1517581f57 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -7,14 +7,11 @@ import (
"fmt"
"log/slog"
"maps"
- "os/exec"
- "strings"
"sync"
"time"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/db"
"github.com/charmbracelet/crush/internal/format"
"github.com/charmbracelet/crush/internal/history"
@@ -23,7 +20,6 @@ import (
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/watcher"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/session"
@@ -41,9 +37,6 @@ type App struct {
clientsMutex sync.RWMutex
- watcherCancelFuncs *csync.Slice[context.CancelFunc]
- lspWatcherWG sync.WaitGroup
-
config *config.Config
serviceEventsWG *sync.WaitGroup
@@ -56,16 +49,6 @@ type App struct {
cleanupFuncs []func() error
}
-// isGitRepo checks if the current directory is a git repository
-func isGitRepo() bool {
- bts, err := exec.CommandContext(
- context.Background(),
- "git", "rev-parse",
- "--is-inside-work-tree",
- ).CombinedOutput()
- return err == nil && strings.TrimSpace(string(bts)) == "true"
-}
-
// New initializes a new applcation instance.
func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
q := db.New(conn)
@@ -89,8 +72,6 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
config: cfg,
- watcherCancelFuncs: csync.NewSlice[context.CancelFunc](),
-
events: make(chan tea.Msg, 100),
serviceEventsWG: &sync.WaitGroup{},
tuiWG: &sync.WaitGroup{},
@@ -98,15 +79,6 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
app.setupEvents()
- // Start the global watcher only if this is a git repository
- if isGitRepo() {
- if err := watcher.Start(); err != nil {
- return nil, fmt.Errorf("app: %w", err)
- }
- } else {
- slog.Warn("Not starting global watcher: not a git repository")
- }
-
// Initialize LSP clients in the background.
app.initLSPClients(ctx)
@@ -352,13 +324,6 @@ func (app *App) Shutdown() {
app.CoderAgent.CancelAll()
}
- for cancel := range app.watcherCancelFuncs.Seq() {
- cancel()
- }
-
- // Wait for all LSP watchers to finish.
- app.lspWatcherWG.Wait()
-
// Get all LSP clients.
app.clientsMutex.RLock()
clients := make(map[string]*lsp.Client, len(app.LSPClients))
@@ -374,9 +339,6 @@ func (app *App) Shutdown() {
cancel()
}
- // Shutdown the global watcher
- watcher.Shutdown()
-
// Call call cleanup functions.
for _, cleanup := range app.cleanupFuncs {
if cleanup != nil {
diff --git a/internal/app/lsp.go b/internal/app/lsp.go
index d273774620dad4ac3cfc4f79f9d7b5fd681cbfb2..4a6932f275564139bd91e83467d6e5224083e5b5 100644
--- a/internal/app/lsp.go
+++ b/internal/app/lsp.go
@@ -6,9 +6,7 @@ import (
"time"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/log"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/watcher"
)
// initLSPClients initializes LSP clients.
@@ -77,64 +75,8 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
slog.Info("LSP client initialized", "name", name)
- // Create a child context that can be canceled when the app is shutting
- // down.
- watchCtx, cancelFunc := context.WithCancel(ctx)
-
- // Create the workspace watcher.
- workspaceWatcher := watcher.New(name, lspClient)
-
- // Store the cancel function to be called during cleanup.
- app.watcherCancelFuncs.Append(cancelFunc)
-
// Add to map with mutex protection before starting goroutine
app.clientsMutex.Lock()
app.LSPClients[name] = lspClient
app.clientsMutex.Unlock()
-
- // Run workspace watcher.
- app.lspWatcherWG.Add(1)
- go app.runWorkspaceWatcher(watchCtx, name, workspaceWatcher)
-}
-
-// runWorkspaceWatcher executes the workspace watcher for an LSP client.
-func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.Client) {
- defer app.lspWatcherWG.Done()
- defer log.RecoverPanic("LSP-"+name, func() {
- // Try to restart the client.
- app.restartLSPClient(ctx, name)
- })
-
- workspaceWatcher.Watch(ctx, app.config.WorkingDir())
- slog.Info("Workspace watcher stopped", "client", name)
-}
-
-// restartLSPClient attempts to restart a crashed or failed LSP client.
-func (app *App) restartLSPClient(ctx context.Context, name string) {
- // Get the original configuration.
- clientConfig, exists := app.config.LSP[name]
- if !exists {
- slog.Error("Cannot restart client, configuration not found", "client", name)
- return
- }
-
- // Clean up the old client if it exists.
- app.clientsMutex.Lock()
- oldClient, exists := app.LSPClients[name]
- if exists {
- // Remove from map before potentially slow shutdown.
- delete(app.LSPClients, name)
- }
- app.clientsMutex.Unlock()
-
- if exists && oldClient != nil {
- // Try to shut down client gracefully, but don't block on errors.
- shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
- _ = oldClient.Close(shutdownCtx)
- cancel()
- }
-
- // Create a new client using the shared function.
- app.createAndStartLSPClient(ctx, name, clientConfig)
- slog.Info("Successfully restarted LSP client", "client", name)
}
diff --git a/internal/csync/maps.go b/internal/csync/maps.go
index b7a1f3109f6c15e7e5592cb538943a2d9e340819..14e8b36c9c37ae2d93c9771e424579051f5181c8 100644
--- a/internal/csync/maps.go
+++ b/internal/csync/maps.go
@@ -70,10 +70,10 @@ func (m *Map[K, V]) GetOrSet(key K, fn func() V) V {
// Take gets an item and then deletes it.
func (m *Map[K, V]) Take(key K) (V, bool) {
- m.mu.Lock()
- defer m.mu.Unlock()
- v, ok := m.inner[key]
- delete(m.inner, key)
+ v, ok := m.Get(key)
+ if ok {
+ m.Del(key)
+ }
return v, ok
}
diff --git a/internal/csync/versionedmap.go b/internal/csync/versionedmap.go
new file mode 100644
index 0000000000000000000000000000000000000000..dfe2d6f5e893f73cc34cfd99fab984dcc273cd9a
--- /dev/null
+++ b/internal/csync/versionedmap.go
@@ -0,0 +1,35 @@
+package csync
+
+import (
+ "sync/atomic"
+)
+
+// NewVersionedMap creates a new versioned, thread-safe map.
+func NewVersionedMap[K comparable, V any]() *VersionedMap[K, V] {
+ return &VersionedMap[K, V]{
+ Map: NewMap[K, V](),
+ }
+}
+
+// VersionedMap is a thread-safe map that keeps track of its version.
+type VersionedMap[K comparable, V any] struct {
+ *Map[K, V]
+ v atomic.Uint64
+}
+
+// Set sets the value for the specified key in the map and increments the version.
+func (m *VersionedMap[K, V]) Set(key K, value V) {
+ m.Map.Set(key, value)
+ m.v.Add(1)
+}
+
+// Del deletes the specified key from the map and increments the version.
+func (m *VersionedMap[K, V]) Del(key K) {
+ m.Map.Del(key)
+ m.v.Add(1)
+}
+
+// Version returns the current version of the map.
+func (m *VersionedMap[K, V]) Version() uint64 {
+ return m.v.Load()
+}
diff --git a/internal/csync/versionedmap_test.go b/internal/csync/versionedmap_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..2c32004c5f269b7518999f95be23db95d7b6ec15
--- /dev/null
+++ b/internal/csync/versionedmap_test.go
@@ -0,0 +1,89 @@
+package csync
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestVersionedMap_Set(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ require.Equal(t, uint64(0), vm.Version())
+
+ vm.Set("key1", 42)
+ require.Equal(t, uint64(1), vm.Version())
+
+ value, ok := vm.Get("key1")
+ require.True(t, ok)
+ require.Equal(t, 42, value)
+}
+
+func TestVersionedMap_Del(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ vm.Set("key1", 42)
+ initialVersion := vm.Version()
+
+ vm.Del("key1")
+ require.Equal(t, initialVersion+1, vm.Version())
+
+ _, ok := vm.Get("key1")
+ require.False(t, ok)
+}
+
+func TestVersionedMap_VersionIncrement(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ initialVersion := vm.Version()
+
+ // Setting a value should increment the version
+ vm.Set("key1", 42)
+ require.Equal(t, initialVersion+1, vm.Version())
+
+ // Deleting a value should increment the version
+ vm.Del("key1")
+ require.Equal(t, initialVersion+2, vm.Version())
+
+ // Deleting a non-existent key should still increment the version
+ vm.Del("nonexistent")
+ require.Equal(t, initialVersion+3, vm.Version())
+}
+
+func TestVersionedMap_ConcurrentAccess(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[int, int]()
+ const numGoroutines = 100
+ const numOperations = 100
+
+ // Initial version
+ initialVersion := vm.Version()
+
+ // Perform concurrent Set and Del operations
+ for i := range numGoroutines {
+ go func(id int) {
+ for j := range numOperations {
+ key := id*numOperations + j
+ vm.Set(key, key*2)
+ vm.Del(key)
+ }
+ }(i)
+ }
+
+ // Wait for operations to complete by checking the version
+ // This is a simplified check - in a real test you might want to use sync.WaitGroup
+ expectedMinVersion := initialVersion + uint64(numGoroutines*numOperations*2)
+
+ // Allow some time for operations to complete
+ for vm.Version() < expectedMinVersion {
+ // Busy wait - in a real test you'd use proper synchronization
+ }
+
+ // Final version should be at least the expected minimum
+ require.GreaterOrEqual(t, vm.Version(), expectedMinVersion)
+ require.Equal(t, 0, vm.Len())
+}
diff --git a/internal/llm/tools/diagnostics.go b/internal/llm/tools/diagnostics.go
index 68586023296c1b5763faefe609171e5c1759eb09..527e2f786895230db41784d0cb1b643b0f40f71c 100644
--- a/internal/llm/tools/diagnostics.go
+++ b/internal/llm/tools/diagnostics.go
@@ -16,6 +16,7 @@ import (
type DiagnosticsParams struct {
FilePath string `json:"file_path"`
}
+
type diagnosticsTool struct {
lspClients map[string]*lsp.Client
}
@@ -76,91 +77,26 @@ func (b *diagnosticsTool) Run(ctx context.Context, call ToolCall) (ToolResponse,
}
lsps := b.lspClients
-
if len(lsps) == 0 {
return NewTextErrorResponse("no LSP clients available"), nil
}
-
- if params.FilePath != "" {
- notifyLspOpenFile(ctx, params.FilePath, lsps)
- waitForLspDiagnostics(ctx, params.FilePath, lsps)
- }
-
+ notifyLSPs(ctx, lsps, params.FilePath)
output := getDiagnostics(params.FilePath, lsps)
-
return NewTextResponse(output), nil
}
-func notifyLspOpenFile(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
- for _, client := range lsps {
- err := client.OpenFile(ctx, filePath)
- if err != nil {
- continue
- }
- }
-}
-
-func waitForLspDiagnostics(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
- if len(lsps) == 0 {
+func notifyLSPs(ctx context.Context, lsps map[string]*lsp.Client, filepath string) {
+ if filepath == "" {
return
}
-
- diagChan := make(chan struct{}, 1)
-
for _, client := range lsps {
- originalDiags := client.GetDiagnostics()
-
- handler := func(_ context.Context, _ string, params json.RawMessage) {
- lsp.HandleDiagnostics(client, params)
- var diagParams protocol.PublishDiagnosticsParams
- if err := json.Unmarshal(params, &diagParams); err != nil {
- return
- }
-
- path, err := diagParams.URI.Path()
- if err != nil {
- slog.Error("Failed to convert diagnostic URI to path", "uri", diagParams.URI, "error", err)
- return
- }
-
- if path == filePath || hasDiagnosticsChanged(client.GetDiagnostics(), originalDiags) {
- select {
- case diagChan <- struct{}{}:
- default:
- }
- }
- }
-
- client.RegisterNotificationHandler("textDocument/publishDiagnostics", handler)
-
- if client.IsFileOpen(filePath) {
- err := client.NotifyChange(ctx, filePath)
- if err != nil {
- continue
- }
- } else {
- err := client.OpenFile(ctx, filePath)
- if err != nil {
- continue
- }
- }
- }
-
- select {
- case <-diagChan:
- case <-time.After(5 * time.Second):
- case <-ctx.Done():
- }
-}
-
-func hasDiagnosticsChanged(current, original map[protocol.DocumentURI][]protocol.Diagnostic) bool {
- for uri, diags := range current {
- origDiags, exists := original[uri]
- if !exists || len(diags) != len(origDiags) {
- return true
+ if !client.HandlesFile(filepath) {
+ continue
}
+ _ = client.OpenFileOnDemand(ctx, filepath)
+ _ = client.NotifyChange(ctx, filepath)
+ client.WaitForDiagnostics(ctx, 5*time.Second)
}
- return false
}
func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string {
@@ -198,7 +134,6 @@ func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string {
fileWarnings := countSeverity(fileDiagnostics, "Warn")
projectErrors := countSeverity(projectDiagnostics, "Error")
projectWarnings := countSeverity(projectDiagnostics, "Warn")
-
output.WriteString("\n\n")
fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
diff --git a/internal/llm/tools/edit.go b/internal/llm/tools/edit.go
index 8cc3154ebab98ca34a49715d48b110caabc4ffe3..1afa03a427c36c7fe6ad448f4183f7ff4636ef85 100644
--- a/internal/llm/tools/edit.go
+++ b/internal/llm/tools/edit.go
@@ -184,7 +184,8 @@ func (e *editTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
return response, nil
}
- waitForLspDiagnostics(ctx, params.FilePath, e.lspClients)
+ notifyLSPs(ctx, e.lspClients, params.FilePath)
+
text := fmt.Sprintf("\n%s\n\n", response.Content)
text += getDiagnostics(params.FilePath, e.lspClients)
response.Content = text
diff --git a/internal/llm/tools/multiedit.go b/internal/llm/tools/multiedit.go
index 50070ca613464a280e53ab964155b8d1e205dde5..2e08e973ba9eb46910fd39e98207b2f5e7bcca1f 100644
--- a/internal/llm/tools/multiedit.go
+++ b/internal/llm/tools/multiedit.go
@@ -188,8 +188,10 @@ func (m *multiEditTool) Run(ctx context.Context, call ToolCall) (ToolResponse, e
return response, nil
}
+ // Notify LSP clients about the change
+ notifyLSPs(ctx, m.lspClients, params.FilePath)
+
// Wait for LSP diagnostics and add them to the response
- waitForLspDiagnostics(ctx, params.FilePath, m.lspClients)
text := fmt.Sprintf("\n%s\n\n", response.Content)
text += getDiagnostics(params.FilePath, m.lspClients)
response.Content = text
diff --git a/internal/llm/tools/view.go b/internal/llm/tools/view.go
index ee1fd6614b3fee0a0c3d65c433bb6d9e1dd6489c..5664edf0baf01f448f1b92ffed6c3e213ee608c2 100644
--- a/internal/llm/tools/view.go
+++ b/internal/llm/tools/view.go
@@ -233,7 +233,7 @@ func (v *viewTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
return ToolResponse{}, fmt.Errorf("error reading file: %w", err)
}
- notifyLspOpenFile(ctx, filePath, v.lspClients)
+ notifyLSPs(ctx, v.lspClients, filePath)
output := "\n"
// Format the output with line numbers
output += addLineNumbers(content, params.Offset+1)
diff --git a/internal/llm/tools/write.go b/internal/llm/tools/write.go
index d719337a971cb09babd47753444d761586806fdd..6bbabba93d1dcf7064789bddd9fe4bc69e9f9182 100644
--- a/internal/llm/tools/write.go
+++ b/internal/llm/tools/write.go
@@ -221,7 +221,8 @@ func (w *writeTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error
recordFileWrite(filePath)
recordFileRead(filePath)
- waitForLspDiagnostics(ctx, filePath, w.lspClients)
+
+ notifyLSPs(ctx, w.lspClients, params.FilePath)
result := fmt.Sprintf("File successfully written: %s", filePath)
result = fmt.Sprintf("\n%s\n", result)
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index 08b5cc2cc438546d3f60674d4f1daf7906b7b21a..70146d3ad181459db3d2193383373159f72b2022 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -34,7 +34,7 @@ type Client struct {
onDiagnosticsChanged func(name string, count int)
// Diagnostic cache
- diagnostics *csync.Map[protocol.DocumentURI, []protocol.Diagnostic]
+ diagnostics *csync.VersionedMap[protocol.DocumentURI, []protocol.Diagnostic]
// Files are currently opened by the LSP
openFiles *csync.Map[string, *OpenFileInfo]
@@ -83,7 +83,7 @@ func New(ctx context.Context, name string, config config.LSPConfig) (*Client, er
client: powernapClient,
name: name,
fileTypes: config.FileTypes,
- diagnostics: csync.NewMap[protocol.DocumentURI, []protocol.Diagnostic](),
+ diagnostics: csync.NewVersionedMap[protocol.DocumentURI, []protocol.Diagnostic](),
openFiles: csync.NewMap[string, *OpenFileInfo](),
config: config,
}
@@ -314,6 +314,8 @@ func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
}
// CloseFile closes a file in the LSP server.
+//
+// NOTE: this is only ever called on LSP shutdown.
func (c *Client) CloseFile(ctx context.Context, filepath string) error {
cfg := config.Get()
uri := string(protocol.URIFromPath(filepath))
@@ -454,6 +456,26 @@ func (c *Client) openKeyConfigFiles(ctx context.Context) {
}
}
+// WaitForDiagnostics waits until diagnostics change or the timeout is reached.
+func (c *Client) WaitForDiagnostics(ctx context.Context, d time.Duration) {
+ ticker := time.NewTicker(200 * time.Millisecond)
+ defer ticker.Stop()
+ timeout := time.After(d)
+ pv := c.diagnostics.Version()
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case <-timeout:
+ return
+ case <-ticker.C:
+ if pv != c.diagnostics.Version() {
+ return
+ }
+ }
+ }
+}
+
// HasRootMarkers checks if any of the specified root marker patterns exist in the given directory.
// Uses glob patterns to match files, allowing for more flexible matching.
func HasRootMarkers(dir string, rootMarkers []string) bool {
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
deleted file mode 100644
index dcd1ba5d2e7dc8f1329d737896c7a28034268bda..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/global_watcher.go
+++ /dev/null
@@ -1,394 +0,0 @@
-package watcher
-
-import (
- "context"
- "errors"
- "fmt"
- "log/slog"
- "os"
- "path/filepath"
- "sync"
- "sync/atomic"
- "syscall"
- "time"
-
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/csync"
- "github.com/charmbracelet/crush/internal/fsext"
- "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
- "github.com/raphamorim/notify"
-)
-
-// global manages file watching shared across all LSP clients.
-//
-// IMPORTANT: This implementation uses github.com/raphamorim/notify which provides
-// recursive watching on all platforms. On macOS it uses FSEvents, on Linux it
-// uses inotify (with recursion handled by the library), and on Windows it uses
-// ReadDirectoryChangesW.
-//
-// Key benefits:
-// - Single watch point for entire directory tree
-// - Automatic recursive watching without manually adding subdirectories
-// - No file descriptor exhaustion issues
-// - Built-in ignore system for filtering file events
-type global struct {
- // Channel for receiving file system events
- events chan notify.EventInfo
-
- // Map of workspace watchers by client name
- watchers *csync.Map[string, *Client]
-
- // Single workspace root directory for ignore checking
- root string
-
- started atomic.Bool
-
- // Debouncing for file events (shared across all clients)
- debounceTime time.Duration
- debounceMap *csync.Map[string, *time.Timer]
-
- // Context for shutdown
- ctx context.Context
- cancel context.CancelFunc
-
- // Wait group for cleanup
- wg sync.WaitGroup
-}
-
-// instance returns the singleton global watcher instance
-var instance = sync.OnceValue(func() *global {
- ctx, cancel := context.WithCancel(context.Background())
- gw := &global{
- events: make(chan notify.EventInfo, 4096), // Large buffer to prevent dropping events
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- return gw
-})
-
-// register registers a workspace watcher with the global watcher
-func (gw *global) register(name string, watcher *Client) {
- gw.watchers.Set(name, watcher)
- slog.Debug("lsp watcher: Registered workspace watcher", "name", name)
-}
-
-// unregister removes a workspace watcher from the global watcher
-func (gw *global) unregister(name string) {
- gw.watchers.Del(name)
- slog.Debug("lsp watcher: Unregistered workspace watcher", "name", name)
-}
-
-// Start sets up recursive watching on the workspace root.
-//
-// Note: We use github.com/raphamorim/notify which provides recursive watching
-// with a single watch point. The "..." suffix means watch recursively.
-// This is much more efficient than manually walking and watching each directory.
-func Start() error {
- gw := instance()
-
- // technically workspace root is always the same...
- if gw.started.Load() {
- slog.Debug("lsp watcher: watcher already set up, skipping")
- return nil
- }
-
- cfg := config.Get()
- root := cfg.WorkingDir()
- slog.Debug("lsp watcher: set workspace directory to global watcher", "path", root)
-
- // Store the workspace root for hierarchical ignore checking
- gw.root = root
- gw.started.Store(true)
-
- // Set up ignore system
- if err := setupIgnoreSystem(root); err != nil {
- slog.Warn("lsp watcher: Failed to set up ignore system", "error", err)
- // Continue anyway, but without ignore functionality
- }
-
- // Start the event processing goroutine
- gw.wg.Add(1)
- go gw.processEvents()
-
- // Set up recursive watching on the root directory
- // The "..." suffix tells notify to watch recursively
- watchPath := filepath.Join(root, "...")
-
- // Watch for all event types we care about
- events := notify.Create | notify.Write | notify.Remove | notify.Rename
-
- if err := notify.Watch(watchPath, gw.events, events); err != nil {
- // Check if the error might be due to file descriptor limits
- if isFileLimitError(err) {
- slog.Warn("lsp watcher: Hit file descriptor limit, attempting to increase", "error", err)
- if newLimit, rlimitErr := maximizeOpenFileLimit(); rlimitErr == nil {
- slog.Info("lsp watcher: Increased file descriptor limit", "limit", newLimit)
- // Retry the watch operation
- if err = notify.Watch(watchPath, gw.events, events); err == nil {
- slog.Info("lsp watcher: Successfully set up watch after increasing limit")
- goto watchSuccess
- }
- err = fmt.Errorf("still failed after increasing limit: %w", err)
- } else {
- slog.Warn("lsp watcher: Failed to increase file descriptor limit", "error", rlimitErr)
- }
- }
- return fmt.Errorf("lsp watcher: error setting up recursive watch on %s: %w", root, err)
- }
-watchSuccess:
-
- slog.Info("lsp watcher: Started recursive watching", "root", root)
- return nil
-}
-
-// processEvents processes file system events from the notify library.
-// Since notify handles recursive watching for us, we don't need to manually
-// add new directories - they're automatically included.
-func (gw *global) processEvents() {
- defer gw.wg.Done()
- cfg := config.Get()
-
- if !gw.started.Load() {
- slog.Error("lsp watcher: Global watcher not initialized")
- return
- }
-
- for {
- select {
- case <-gw.ctx.Done():
- return
-
- case event, ok := <-gw.events:
- if !ok {
- return
- }
-
- path := event.Path()
-
- if cfg != nil && cfg.Options.DebugLSP {
- slog.Debug("lsp watcher: Global watcher received event", "path", path, "event", event.Event().String())
- }
-
- // Convert notify event to our internal format and handle it
- gw.handleFileEvent(event)
- }
- }
-}
-
-// handleFileEvent processes a file system event and distributes notifications to relevant clients
-func (gw *global) handleFileEvent(event notify.EventInfo) {
- cfg := config.Get()
- path := event.Path()
- uri := string(protocol.URIFromPath(path))
-
- // Map notify events to our change types
- var changeType protocol.FileChangeType
- var watchKindNeeded protocol.WatchKind
-
- switch event.Event() {
- case notify.Create:
- changeType = protocol.FileChangeType(protocol.Created)
- watchKindNeeded = protocol.WatchCreate
- // Handle file creation for all relevant clients
- if !isDir(path) && !fsext.ShouldExcludeFile(gw.root, path) {
- gw.openMatchingFileForClients(path)
- }
- case notify.Write:
- changeType = protocol.FileChangeType(protocol.Changed)
- watchKindNeeded = protocol.WatchChange
- case notify.Remove:
- changeType = protocol.FileChangeType(protocol.Deleted)
- watchKindNeeded = protocol.WatchDelete
- case notify.Rename:
- // Treat rename as delete + create
- // First handle as delete
- for _, watcher := range gw.watchers.Seq2() {
- if !watcher.client.HandlesFile(path) {
- continue
- }
- if watched, watchKind := watcher.isPathWatched(path); watched {
- if watchKind&protocol.WatchDelete != 0 {
- gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
- }
- }
- }
- // Then check if renamed file exists and treat as create
- if !isDir(path) {
- changeType = protocol.FileChangeType(protocol.Created)
- watchKindNeeded = protocol.WatchCreate
- } else {
- return // Already handled delete, nothing more to do for directories
- }
- default:
- // Unknown event type, skip
- return
- }
-
- // Process the event for each relevant client
- for client, watcher := range gw.watchers.Seq2() {
- if !watcher.client.HandlesFile(path) {
- continue // client doesn't handle this filetype
- }
-
- // Debug logging per client
- if cfg.Options.DebugLSP {
- matched, kind := watcher.isPathWatched(path)
- slog.Debug("lsp watcher: File event for client",
- "path", path,
- "event", event.Event().String(),
- "watched", matched,
- "kind", kind,
- "client", client,
- )
- }
-
- // Check if this path should be watched according to server registrations
- if watched, watchKind := watcher.isPathWatched(path); watched {
- if watchKind&watchKindNeeded != 0 {
- // Skip directory events for non-delete operations
- if changeType != protocol.FileChangeType(protocol.Deleted) && isDir(path) {
- continue
- }
-
- if changeType == protocol.FileChangeType(protocol.Deleted) {
- // Don't debounce deletes
- gw.handleFileEventForClient(watcher, uri, changeType)
- } else {
- // Debounce creates and changes
- gw.debounceHandleFileEventForClient(watcher, uri, changeType)
- }
- }
- }
- }
-}
-
-// isDir checks if a path is a directory
-func isDir(path string) bool {
- info, err := os.Stat(path)
- return err == nil && info.IsDir()
-}
-
-// openMatchingFileForClients opens a newly created file for all clients that handle it (only once per file)
-func (gw *global) openMatchingFileForClients(path string) {
- // Skip directories
- info, err := os.Stat(path)
- if err != nil || info.IsDir() {
- return
- }
-
- // Skip excluded files
- if fsext.ShouldExcludeFile(gw.root, path) {
- return
- }
-
- // Open the file for each client that handles it and has matching patterns
- for _, watcher := range gw.watchers.Seq2() {
- if watcher.client.HandlesFile(path) {
- watcher.openMatchingFile(gw.ctx, path)
- }
- }
-}
-
-// debounceHandleFileEventForClient handles file events with debouncing for a specific client
-func (gw *global) debounceHandleFileEventForClient(watcher *Client, uri string, changeType protocol.FileChangeType) {
- // Create a unique key based on URI, change type, and client name
- key := fmt.Sprintf("%s:%d:%s", uri, changeType, watcher.name)
-
- // Cancel existing timer if any
- if timer, exists := gw.debounceMap.Get(key); exists {
- timer.Stop()
- }
-
- // Create new timer
- gw.debounceMap.Set(key, time.AfterFunc(gw.debounceTime, func() {
- gw.handleFileEventForClient(watcher, uri, changeType)
-
- // Cleanup timer after execution
- gw.debounceMap.Del(key)
- }))
-}
-
-// handleFileEventForClient sends file change notifications to a specific client
-func (gw *global) handleFileEventForClient(watcher *Client, uri string, changeType protocol.FileChangeType) {
- // If the file is open and it's a change event, use didChange notification
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- slog.Error("lsp watcher: Error converting URI to path", "uri", uri, "error", err)
- return
- }
-
- if changeType == protocol.FileChangeType(protocol.Deleted) {
- watcher.client.ClearDiagnosticsForURI(protocol.DocumentURI(uri))
- } else if changeType == protocol.FileChangeType(protocol.Changed) && watcher.client.IsFileOpen(filePath) {
- err := watcher.client.NotifyChange(gw.ctx, filePath)
- if err != nil {
- slog.Error("lsp watcher: Error notifying change", "error", err)
- }
- return
- }
-
- // Notify LSP server about the file event using didChangeWatchedFiles
- if err := watcher.notifyFileEvent(gw.ctx, uri, changeType); err != nil {
- slog.Error("lsp watcher: Error notifying LSP server about file event", "error", err)
- }
-}
-
-// shutdown gracefully shuts down the global watcher
-func (gw *global) shutdown() {
- if gw.cancel != nil {
- gw.cancel()
- }
-
- // Stop watching and close the event channel
- notify.Stop(gw.events)
- close(gw.events)
-
- gw.wg.Wait()
- slog.Debug("lsp watcher: Global watcher shutdown complete")
-}
-
-// Shutdown shuts down the singleton global watcher
-func Shutdown() {
- instance().shutdown()
-}
-
-// isFileLimitError checks if an error is related to file descriptor limits
-func isFileLimitError(err error) bool {
- if err == nil {
- return false
- }
- // Check for common file limit errors
- return errors.Is(err, syscall.EMFILE) || errors.Is(err, syscall.ENFILE)
-}
-
-// setupIgnoreSystem configures the notify library's ignore system
-// to use .crushignore and .gitignore files for filtering file events
-func setupIgnoreSystem(root string) error {
- // Create a new ignore matcher for the workspace root
- im := notify.NewIgnoreMatcher(root)
-
- // Load .crushignore file if it exists
- crushignorePath := filepath.Join(root, ".crushignore")
- if _, err := os.Stat(crushignorePath); err == nil {
- if err := im.LoadIgnoreFile(crushignorePath); err != nil {
- slog.Warn("lsp watcher: Failed to load .crushignore file", "error", err)
- }
- }
-
- // Load .gitignore file if it exists
- gitignorePath := filepath.Join(root, ".gitignore")
- if _, err := os.Stat(gitignorePath); err == nil {
- if err := im.LoadIgnoreFile(gitignorePath); err != nil {
- slog.Warn("lsp watcher: Failed to load .gitignore file", "error", err)
- }
- }
-
- // Set as the global ignore matcher
- notify.SetIgnoreMatcher(im)
-
- return nil
-}
diff --git a/internal/lsp/watcher/global_watcher_test.go b/internal/lsp/watcher/global_watcher_test.go
deleted file mode 100644
index f33244dea3b3b95bb65c8a570d366d4b887f6b34..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/global_watcher_test.go
+++ /dev/null
@@ -1,302 +0,0 @@
-package watcher
-
-import (
- "context"
- "os"
- "path/filepath"
- "testing"
- "time"
-
- "github.com/charmbracelet/crush/internal/csync"
- "github.com/raphamorim/notify"
-)
-
-func TestGlobalWatcher(t *testing.T) {
- t.Parallel()
-
- // Test that we can get the global watcher instance
- gw1 := instance()
- if gw1 == nil {
- t.Fatal("Expected global watcher instance, got nil")
- }
-
- // Test that subsequent calls return the same instance (singleton)
- gw2 := instance()
- if gw1 != gw2 {
- t.Fatal("Expected same global watcher instance, got different instances")
- }
-
- // Test registration and unregistration
- mockWatcher := &Client{
- name: "test-watcher",
- }
-
- gw1.register("test", mockWatcher)
-
- // Check that it was registered
- registered, _ := gw1.watchers.Get("test")
-
- if registered != mockWatcher {
- t.Fatal("Expected workspace watcher to be registered")
- }
-
- // Test unregistration
- gw1.unregister("test")
-
- unregistered, _ := gw1.watchers.Get("test")
-
- if unregistered != nil {
- t.Fatal("Expected workspace watcher to be unregistered")
- }
-}
-
-func TestGlobalWatcherWorkspaceIdempotent(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory for testing
- tempDir := t.TempDir()
-
- // Create a new global watcher instance for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- gw := &global{
- events: make(chan notify.EventInfo, 100),
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- // Test that watching the same workspace multiple times is safe (idempotent)
- // With notify, we use recursive watching with "..."
- watchPath := filepath.Join(tempDir, "...")
-
- err1 := notify.Watch(watchPath, gw.events, notify.All)
- if err1 != nil {
- t.Fatalf("First Watch call failed: %v", err1)
- }
- defer notify.Stop(gw.events)
-
- // Watching the same path again should be safe (notify handles this)
- err2 := notify.Watch(watchPath, gw.events, notify.All)
- if err2 != nil {
- t.Fatalf("Second Watch call failed: %v", err2)
- }
-
- err3 := notify.Watch(watchPath, gw.events, notify.All)
- if err3 != nil {
- t.Fatalf("Third Watch call failed: %v", err3)
- }
-
- // All calls should succeed - notify handles deduplication internally
- // This test verifies that multiple Watch calls are safe
-}
-
-func TestGlobalWatcherRecursiveWatching(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory structure for testing
- tempDir := t.TempDir()
- subDir := filepath.Join(tempDir, "subdir")
- if err := os.Mkdir(subDir, 0o755); err != nil {
- t.Fatalf("Failed to create subdirectory: %v", err)
- }
-
- // Create some files
- file1 := filepath.Join(tempDir, "file1.txt")
- file2 := filepath.Join(subDir, "file2.txt")
- if err := os.WriteFile(file1, []byte("content1"), 0o644); err != nil {
- t.Fatalf("Failed to create file1: %v", err)
- }
- if err := os.WriteFile(file2, []byte("content2"), 0o644); err != nil {
- t.Fatalf("Failed to create file2: %v", err)
- }
-
- // Create a new global watcher instance for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- gw := &global{
- events: make(chan notify.EventInfo, 100),
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- root: tempDir,
- }
-
- // Set up recursive watching on the root directory
- watchPath := filepath.Join(tempDir, "...")
- if err := notify.Watch(watchPath, gw.events, notify.All); err != nil {
- t.Fatalf("Failed to set up recursive watch: %v", err)
- }
- defer notify.Stop(gw.events)
-
- // Verify that our expected directories and files exist
- expectedDirs := []string{tempDir, subDir}
-
- for _, expectedDir := range expectedDirs {
- info, err := os.Stat(expectedDir)
- if err != nil {
- t.Fatalf("Expected directory %s doesn't exist: %v", expectedDir, err)
- }
- if !info.IsDir() {
- t.Fatalf("Expected %s to be a directory, but it's not", expectedDir)
- }
- }
-
- // Verify that files exist
- testFiles := []string{file1, file2}
- for _, file := range testFiles {
- info, err := os.Stat(file)
- if err != nil {
- t.Fatalf("Test file %s doesn't exist: %v", file, err)
- }
- if info.IsDir() {
- t.Fatalf("Expected %s to be a file, but it's a directory", file)
- }
- }
-
- // Create a new file in the subdirectory to test recursive watching
- newFile := filepath.Join(subDir, "new.txt")
- if err := os.WriteFile(newFile, []byte("new content"), 0o644); err != nil {
- t.Fatalf("Failed to create new file: %v", err)
- }
-
- // We should receive an event for the file creation
- select {
- case event := <-gw.events:
- // On macOS, paths might have /private prefix, so we need to compare the real paths
- eventPath, _ := filepath.EvalSymlinks(event.Path())
- expectedPath, _ := filepath.EvalSymlinks(newFile)
- if eventPath != expectedPath {
- // Also try comparing just the base names as a fallback
- if filepath.Base(event.Path()) != filepath.Base(newFile) {
- t.Errorf("Expected event for %s, got %s", newFile, event.Path())
- }
- }
- case <-time.After(2 * time.Second):
- t.Fatal("Timeout waiting for file creation event")
- }
-}
-
-func TestNotifyDeduplication(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory for testing
- tempDir := t.TempDir()
-
- // Create an event channel
- events := make(chan notify.EventInfo, 100)
- defer close(events)
-
- // Add the same directory multiple times with recursive watching
- watchPath := filepath.Join(tempDir, "...")
-
- err1 := notify.Watch(watchPath, events, notify.All)
- if err1 != nil {
- t.Fatalf("First Watch failed: %v", err1)
- }
- defer notify.Stop(events)
-
- err2 := notify.Watch(watchPath, events, notify.All)
- if err2 != nil {
- t.Fatalf("Second Watch failed: %v", err2)
- }
-
- err3 := notify.Watch(watchPath, events, notify.All)
- if err3 != nil {
- t.Fatalf("Third Watch failed: %v", err3)
- }
-
- // All should succeed - notify handles deduplication internally
- // This test verifies the notify behavior we're relying on
-}
-
-func TestGlobalWatcherRespectsIgnoreFiles(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory structure for testing
- tempDir := t.TempDir()
-
- // Create directories that should be ignored
- nodeModules := filepath.Join(tempDir, "node_modules")
- target := filepath.Join(tempDir, "target")
- customIgnored := filepath.Join(tempDir, "custom_ignored")
- normalDir := filepath.Join(tempDir, "src")
-
- for _, dir := range []string{nodeModules, target, customIgnored, normalDir} {
- if err := os.MkdirAll(dir, 0o755); err != nil {
- t.Fatalf("Failed to create directory %s: %v", dir, err)
- }
- }
-
- // Create .gitignore file
- gitignoreContent := "node_modules/\ntarget/\n"
- if err := os.WriteFile(filepath.Join(tempDir, ".gitignore"), []byte(gitignoreContent), 0o644); err != nil {
- t.Fatalf("Failed to create .gitignore: %v", err)
- }
-
- // Create .crushignore file
- crushignoreContent := "custom_ignored/\n"
- if err := os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644); err != nil {
- t.Fatalf("Failed to create .crushignore: %v", err)
- }
-
- // Create a new global watcher instance for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- gw := &global{
- events: make(chan notify.EventInfo, 100),
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- root: tempDir,
- }
-
- // Set up recursive watching
- watchPath := filepath.Join(tempDir, "...")
- if err := notify.Watch(watchPath, gw.events, notify.All); err != nil {
- t.Fatalf("Failed to set up recursive watch: %v", err)
- }
- defer notify.Stop(gw.events)
-
- // The notify library watches everything, but our processEvents
- // function should filter out ignored files using fsext.ShouldExcludeFile
- // This test verifies that the structure is set up correctly
-}
-
-func TestGlobalWatcherShutdown(t *testing.T) {
- t.Parallel()
-
- // Create a new context for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- // Create a temporary global watcher for testing
- gw := &global{
- events: make(chan notify.EventInfo, 100),
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- // Test shutdown doesn't panic
- gw.shutdown()
-
- // Verify context was cancelled
- select {
- case <-gw.ctx.Done():
- // Expected
- case <-time.After(100 * time.Millisecond):
- t.Fatal("Expected context to be cancelled after shutdown")
- }
-}
diff --git a/internal/lsp/watcher/rlimit_stub.go b/internal/lsp/watcher/rlimit_stub.go
deleted file mode 100644
index 9e39467f21bf602c73fd124f799139e4b6cafc09..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/rlimit_stub.go
+++ /dev/null
@@ -1,12 +0,0 @@
-//go:build !unix
-
-package watcher
-
-// maximizeOpenFileLimit is a no-op on non-Unix systems.
-// Returns a high value to indicate no practical limit.
-func maximizeOpenFileLimit() (int, error) {
- // Windows and other non-Unix systems don't have file descriptor limits
- // in the same way Unix systems do. Return a very high value to indicate
- // there's no practical limit to worry about.
- return 10000000, nil // 10M handles - way more than any process would use
-}
diff --git a/internal/lsp/watcher/rlimit_unix.go b/internal/lsp/watcher/rlimit_unix.go
deleted file mode 100644
index 298f3d5b3004a032f0ce5cc592ed30e954fef3f9..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/rlimit_unix.go
+++ /dev/null
@@ -1,57 +0,0 @@
-//go:build unix
-
-// This file contains code inspired by Syncthing's rlimit implementation
-// Syncthing is licensed under the Mozilla Public License Version 2.0
-// See: https://github.com/syncthing/syncthing/blob/main/LICENSE
-
-package watcher
-
-import (
- "runtime"
- "syscall"
-)
-
-const (
- // macOS has a specific limit for RLIMIT_NOFILE
- darwinOpenMax = 10240
-)
-
-// maximizeOpenFileLimit tries to set the resource limit RLIMIT_NOFILE (number
-// of open file descriptors) to the max (hard limit), if the current (soft
-// limit) is below the max. Returns the new (though possibly unchanged) limit,
-// or an error if it could not be changed.
-func maximizeOpenFileLimit() (int, error) {
- // Get the current limit on number of open files.
- var lim syscall.Rlimit
- if err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &lim); err != nil {
- return 0, err
- }
-
- // If we're already at max, there's no need to try to raise the limit.
- if lim.Cur >= lim.Max {
- return int(lim.Cur), nil
- }
-
- // macOS doesn't like a soft limit greater than OPEN_MAX
- if runtime.GOOS == "darwin" && lim.Max > darwinOpenMax {
- lim.Max = darwinOpenMax
- }
-
- // Try to increase the limit to the max.
- oldLimit := lim.Cur
- lim.Cur = lim.Max
- if err := syscall.Setrlimit(syscall.RLIMIT_NOFILE, &lim); err != nil {
- return int(oldLimit), err
- }
-
- // If the set succeeded, perform a new get to see what happened. We might
- // have gotten a value lower than the one in lim.Max, if lim.Max was
- // something that indicated "unlimited" (i.e. intmax).
- if err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &lim); err != nil {
- // We don't really know the correct value here since Getrlimit
- // mysteriously failed after working once... Shouldn't ever happen.
- return 0, err
- }
-
- return int(lim.Cur), nil
-}
diff --git a/internal/lsp/watcher/watcher.go b/internal/lsp/watcher/watcher.go
deleted file mode 100644
index b3c90788db87411d56616501f18f04371deca04a..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/watcher.go
+++ /dev/null
@@ -1,548 +0,0 @@
-package watcher
-
-import (
- "context"
- "fmt"
- "log/slog"
- "os"
- "path/filepath"
- "strings"
- "time"
-
- "github.com/bmatcuk/doublestar/v4"
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/csync"
- "github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
-)
-
-// Client manages LSP file watching for a specific client
-// It now delegates actual file watching to the GlobalWatcher
-type Client struct {
- client *lsp.Client
- name string
- workspacePath string
-
- // File watchers registered by the server
- registrations *csync.Slice[protocol.FileSystemWatcher]
-}
-
-// New creates a new workspace watcher for the given client.
-func New(name string, client *lsp.Client) *Client {
- return &Client{
- name: name,
- client: client,
- registrations: csync.NewSlice[protocol.FileSystemWatcher](),
- }
-}
-
-// register adds file watchers to track
-func (w *Client) register(ctx context.Context, id string, watchers []protocol.FileSystemWatcher) {
- cfg := config.Get()
-
- w.registrations.Append(watchers...)
-
- if cfg.Options.DebugLSP {
- slog.Debug("Adding file watcher registrations",
- "id", id,
- "watchers", len(watchers),
- "total", w.registrations.Len(),
- )
-
- for i, watcher := range watchers {
- slog.Debug("Registration", "index", i+1)
-
- // Log the GlobPattern
- switch v := watcher.GlobPattern.Value.(type) {
- case string:
- slog.Debug("GlobPattern", "pattern", v)
- case protocol.RelativePattern:
- slog.Debug("GlobPattern", "pattern", v.Pattern)
-
- // Log BaseURI details
- switch u := v.BaseURI.Value.(type) {
- case string:
- slog.Debug("BaseURI", "baseURI", u)
- case protocol.DocumentURI:
- slog.Debug("BaseURI", "baseURI", u)
- default:
- slog.Debug("BaseURI", "baseURI", u)
- }
- default:
- slog.Debug("GlobPattern unknown type", "type", fmt.Sprintf("%T", v))
- }
-
- // Log WatchKind
- watchKind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- if watcher.Kind != nil {
- watchKind = *watcher.Kind
- }
-
- slog.Debug("WatchKind", "kind", watchKind)
- }
- }
-
- // For servers that need file preloading, open high-priority files only
- if shouldPreloadFiles(w.name) {
- go func() {
- highPriorityFilesOpened := w.openHighPriorityFiles(ctx, w.name)
- if cfg.Options.DebugLSP {
- slog.Debug("Opened high-priority files",
- "count", highPriorityFilesOpened,
- "serverName", w.name)
- }
- }()
- }
-}
-
-// openHighPriorityFiles opens important files for the server type
-// Returns the number of files opened
-func (w *Client) openHighPriorityFiles(ctx context.Context, serverName string) int {
- cfg := config.Get()
- filesOpened := 0
-
- // Define patterns for high-priority files based on server type
- var patterns []string
-
- // TODO: move this to LSP config
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- patterns = []string{
- "**/tsconfig.json",
- "**/package.json",
- "**/jsconfig.json",
- "**/index.ts",
- "**/index.js",
- "**/main.ts",
- "**/main.js",
- }
- case "gopls":
- patterns = []string{
- "**/go.mod",
- "**/go.sum",
- "**/main.go",
- }
- case "rust-analyzer":
- patterns = []string{
- "**/Cargo.toml",
- "**/Cargo.lock",
- "**/src/lib.rs",
- "**/src/main.rs",
- }
- case "python", "pyright", "pylsp":
- patterns = []string{
- "**/pyproject.toml",
- "**/setup.py",
- "**/requirements.txt",
- "**/__init__.py",
- "**/__main__.py",
- }
- case "clangd":
- patterns = []string{
- "**/CMakeLists.txt",
- "**/Makefile",
- "**/compile_commands.json",
- }
- case "java", "jdtls":
- patterns = []string{
- "**/pom.xml",
- "**/build.gradle",
- "**/src/main/java/**/*.java",
- }
- default:
- // For unknown servers, use common configuration files
- patterns = []string{
- "**/package.json",
- "**/Makefile",
- "**/CMakeLists.txt",
- "**/.editorconfig",
- }
- }
-
- // Collect all files to open first
- var filesToOpen []string
-
- // For each pattern, find matching files
- for _, pattern := range patterns {
- // Use doublestar.Glob to find files matching the pattern (supports ** patterns)
- matches, err := doublestar.Glob(os.DirFS(w.workspacePath), pattern)
- if err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Error finding high-priority files", "pattern", pattern, "error", err)
- }
- continue
- }
-
- for _, match := range matches {
- // Convert relative path to absolute
- fullPath := filepath.Join(w.workspacePath, match)
-
- // Skip directories and excluded files
- info, err := os.Stat(fullPath)
- if err != nil || info.IsDir() || shouldExcludeFile(fullPath) {
- continue
- }
-
- filesToOpen = append(filesToOpen, fullPath)
-
- // Limit the number of files per pattern
- if len(filesToOpen) >= 5 && (serverName != "java" && serverName != "jdtls") {
- break
- }
- }
- }
-
- // Open files in batches to reduce overhead
- batchSize := 3
- for i := 0; i < len(filesToOpen); i += batchSize {
- end := min(i+batchSize, len(filesToOpen))
-
- // Open batch of files
- for j := i; j < end; j++ {
- fullPath := filesToOpen[j]
- if err := w.client.OpenFile(ctx, fullPath); err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Error opening high-priority file", "path", fullPath, "error", err)
- }
- } else {
- filesOpened++
- if cfg.Options.DebugLSP {
- slog.Debug("Opened high-priority file", "path", fullPath)
- }
- }
- }
-
- // Only add delay between batches, not individual files
- if end < len(filesToOpen) {
- time.Sleep(50 * time.Millisecond)
- }
- }
-
- return filesOpened
-}
-
-// Watch sets up file watching for a workspace using the global watcher
-func (w *Client) Watch(ctx context.Context, workspacePath string) {
- w.workspacePath = workspacePath
-
- slog.Debug("Starting workspace watcher", "workspacePath", workspacePath, "serverName", w.name)
-
- // Register this workspace watcher with the global watcher
- instance().register(w.name, w)
- defer instance().unregister(w.name)
-
- // Register handler for file watcher registrations from the server
- lsp.RegisterFileWatchHandler(func(id string, watchers []protocol.FileSystemWatcher) {
- w.register(ctx, id, watchers)
- })
-
- // Wait for context cancellation
- <-ctx.Done()
- slog.Debug("Workspace watcher stopped", "name", w.name)
-}
-
-// isPathWatched checks if a path should be watched based on server registrations
-// If no explicit registrations, watch everything
-func (w *Client) isPathWatched(path string) (bool, protocol.WatchKind) {
- if w.registrations.Len() == 0 {
- return true, protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- }
-
- // Check each registration
- for reg := range w.registrations.Seq() {
- isMatch := w.matchesPattern(path, reg.GlobPattern)
- if isMatch {
- kind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- if reg.Kind != nil {
- kind = *reg.Kind
- }
- return true, kind
- }
- }
-
- return false, 0
-}
-
-// matchesGlob handles glob patterns using the doublestar library
-func matchesGlob(pattern, path string) bool {
- // Use doublestar for all glob matching - it handles ** and other complex patterns
- matched, err := doublestar.Match(pattern, path)
- if err != nil {
- slog.Error("Error matching pattern", "pattern", pattern, "path", path, "error", err)
- return false
- }
- return matched
-}
-
-// matchesPattern checks if a path matches the glob pattern
-func (w *Client) matchesPattern(path string, pattern protocol.GlobPattern) bool {
- patternInfo, err := pattern.AsPattern()
- if err != nil {
- slog.Error("Error parsing pattern", "pattern", pattern, "error", err)
- return false
- }
-
- basePath := patternInfo.GetBasePath()
- patternText := patternInfo.GetPattern()
-
- path = filepath.ToSlash(path)
-
- // For simple patterns without base path
- if basePath == "" {
- // Check if the pattern matches the full path or just the file extension
- fullPathMatch := matchesGlob(patternText, path)
- baseNameMatch := matchesGlob(patternText, filepath.Base(path))
-
- return fullPathMatch || baseNameMatch
- }
-
- if basePath == "" {
- return false
- }
-
- // Make path relative to basePath for matching
- relPath, err := filepath.Rel(basePath, path)
- if err != nil {
- slog.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err, "server", w.name)
- return false
- }
- relPath = filepath.ToSlash(relPath)
-
- isMatch := matchesGlob(patternText, relPath)
-
- return isMatch
-}
-
-// notifyFileEvent sends a didChangeWatchedFiles notification for a file event
-func (w *Client) notifyFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) error {
- cfg := config.Get()
- if cfg.Options.DebugLSP {
- slog.Debug("Notifying file event",
- "uri", uri,
- "changeType", changeType,
- )
- }
-
- params := protocol.DidChangeWatchedFilesParams{
- Changes: []protocol.FileEvent{
- {
- URI: protocol.DocumentURI(uri),
- Type: changeType,
- },
- },
- }
-
- return w.client.DidChangeWatchedFiles(ctx, params)
-}
-
-// shouldPreloadFiles determines if we should preload files for a specific language server
-// Some servers work better with preloaded files, others don't need it
-func shouldPreloadFiles(serverName string) bool {
- // TypeScript/JavaScript servers typically need some files preloaded
- // to properly resolve imports and provide intellisense
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- return true
- case "java", "jdtls":
- // Java servers often need to see source files to build the project model
- return true
- default:
- // For most servers, we'll use lazy loading by default
- return false
- }
-}
-
-// Common patterns for directories and files to exclude
-// TODO: make configurable
-var (
- excludedFileExtensions = map[string]bool{
- ".swp": true,
- ".swo": true,
- ".tmp": true,
- ".temp": true,
- ".bak": true,
- ".log": true,
- ".o": true, // Object files
- ".so": true, // Shared libraries
- ".dylib": true, // macOS shared libraries
- ".dll": true, // Windows shared libraries
- ".a": true, // Static libraries
- ".exe": true, // Windows executables
- ".lock": true, // Lock files
- }
-
- // Large binary files that shouldn't be opened
- largeBinaryExtensions = map[string]bool{
- ".png": true,
- ".jpg": true,
- ".jpeg": true,
- ".gif": true,
- ".bmp": true,
- ".ico": true,
- ".zip": true,
- ".tar": true,
- ".gz": true,
- ".rar": true,
- ".7z": true,
- ".pdf": true,
- ".mp3": true,
- ".mp4": true,
- ".mov": true,
- ".wav": true,
- ".wasm": true,
- }
-
- // Maximum file size to open (5MB)
- maxFileSize int64 = 5 * 1024 * 1024
-)
-
-// shouldExcludeFile returns true if the file should be excluded from opening
-func shouldExcludeFile(filePath string) bool {
- fileName := filepath.Base(filePath)
- cfg := config.Get()
-
- // Skip dot files
- if strings.HasPrefix(fileName, ".") {
- return true
- }
-
- // Check file extension
- ext := strings.ToLower(filepath.Ext(filePath))
- if excludedFileExtensions[ext] || largeBinaryExtensions[ext] {
- return true
- }
-
- info, err := os.Stat(filePath)
- if err != nil {
- // If we can't stat the file, skip it
- return true
- }
-
- // Skip large files
- if info.Size() > maxFileSize {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping large file",
- "path", filePath,
- "size", info.Size(),
- "maxSize", maxFileSize,
- "debug", cfg.Options.Debug,
- "sizeMB", float64(info.Size())/(1024*1024),
- "maxSizeMB", float64(maxFileSize)/(1024*1024),
- )
- }
- return true
- }
-
- return false
-}
-
-// openMatchingFile opens a file if it matches any of the registered patterns
-func (w *Client) openMatchingFile(ctx context.Context, path string) {
- cfg := config.Get()
- // Skip directories
- info, err := os.Stat(path)
- if err != nil || info.IsDir() {
- return
- }
-
- // Skip excluded files
- if shouldExcludeFile(path) {
- return
- }
-
- // Check if this path should be watched according to server registrations
- if watched, _ := w.isPathWatched(path); !watched {
- return
- }
-
- serverName := w.name
-
- // Get server name for specialized handling
- // Check if the file is a high-priority file that should be opened immediately
- // This helps with project initialization for certain language servers
- if isHighPriorityFile(path, serverName) {
- if cfg.Options.DebugLSP {
- slog.Debug("Opening high-priority file", "path", path, "serverName", serverName)
- }
- if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- slog.Error("Error opening high-priority file", "path", path, "error", err)
- }
- return
- }
-
- // For non-high-priority files, we'll use different strategies based on server type
- if !shouldPreloadFiles(serverName) {
- return
- }
- // For servers that benefit from preloading, open files but with limits
-
- // Check file size - for preloading we're more conservative
- if info.Size() > (1 * 1024 * 1024) { // 1MB limit for preloaded files
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping large file for preloading", "path", path, "size", info.Size())
- }
- return
- }
-
- // File type is already validated by HandlesFile() and isPathWatched() checks earlier,
- // so we know this client handles this file type. Just open it.
- if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- slog.Error("Error opening file", "path", path, "error", err)
- }
-}
-
-// isHighPriorityFile determines if a file should be opened immediately
-// regardless of the preloading strategy
-func isHighPriorityFile(path string, serverName string) bool {
- fileName := filepath.Base(path)
- ext := filepath.Ext(path)
-
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- // For TypeScript, we want to open configuration files immediately
- return fileName == "tsconfig.json" ||
- fileName == "package.json" ||
- fileName == "jsconfig.json" ||
- // Also open main entry points
- fileName == "index.ts" ||
- fileName == "index.js" ||
- fileName == "main.ts" ||
- fileName == "main.js"
- case "gopls":
- // For Go, we want to open go.mod files immediately
- return fileName == "go.mod" ||
- fileName == "go.sum" ||
- // Also open main.go files
- fileName == "main.go"
- case "rust-analyzer":
- // For Rust, we want to open Cargo.toml files immediately
- return fileName == "Cargo.toml" ||
- fileName == "Cargo.lock" ||
- // Also open lib.rs and main.rs
- fileName == "lib.rs" ||
- fileName == "main.rs"
- case "python", "pyright", "pylsp":
- // For Python, open key project files
- return fileName == "pyproject.toml" ||
- fileName == "setup.py" ||
- fileName == "requirements.txt" ||
- fileName == "__init__.py" ||
- fileName == "__main__.py"
- case "clangd":
- // For C/C++, open key project files
- return fileName == "CMakeLists.txt" ||
- fileName == "Makefile" ||
- fileName == "compile_commands.json"
- case "java", "jdtls":
- // For Java, open key project files
- return fileName == "pom.xml" ||
- fileName == "build.gradle" ||
- ext == ".java" // Java servers often need to see source files
- }
-
- // For unknown servers, prioritize common configuration files
- return fileName == "package.json" ||
- fileName == "Makefile" ||
- fileName == "CMakeLists.txt" ||
- fileName == ".editorconfig"
-}
From d66dfa2c92173e85d707ea20edceed7b9aba32d4 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 18 Sep 2025 11:16:13 -0300
Subject: [PATCH 106/236] fix(lsp): use csync for lsp clients (#1073)
The map was being passed down everywhere, but the locking mechanism only
ever lived in `app.go`, which might cause concurrent access issues.
This changes it to a `*csync.Map`.
Signed-off-by: Carlos Alexandro Becker
---
internal/app/app.go | 16 ++++---------
internal/app/lsp.go | 4 +---
internal/csync/maps.go | 8 +++----
internal/csync/versionedmap.go | 24 +++++++++++++++----
internal/llm/agent/agent.go | 7 +++---
internal/llm/tools/diagnostics.go | 20 ++++++++--------
internal/llm/tools/edit.go | 5 ++--
internal/llm/tools/multiedit.go | 5 ++--
internal/llm/tools/view.go | 5 ++--
internal/llm/tools/write.go | 5 ++--
internal/tui/components/chat/header/header.go | 7 +++---
.../tui/components/chat/sidebar/sidebar.go | 4 ++--
internal/tui/components/lsp/lsp.go | 7 +++---
13 files changed, 64 insertions(+), 53 deletions(-)
diff --git a/internal/app/app.go b/internal/app/app.go
index b93ba2cc9cbb16569c7c4739192dad1517581f57..2b3d81fb58acdeb2570a765c0a25ec53b65121da 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -6,12 +6,12 @@ import (
"errors"
"fmt"
"log/slog"
- "maps"
"sync"
"time"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/db"
"github.com/charmbracelet/crush/internal/format"
"github.com/charmbracelet/crush/internal/history"
@@ -33,9 +33,7 @@ type App struct {
CoderAgent agent.Service
- LSPClients map[string]*lsp.Client
-
- clientsMutex sync.RWMutex
+ LSPClients *csync.Map[string, *lsp.Client]
config *config.Config
@@ -66,7 +64,7 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
Messages: messages,
History: files,
Permissions: permission.NewPermissionService(cfg.WorkingDir(), skipPermissionsRequests, allowedTools),
- LSPClients: make(map[string]*lsp.Client),
+ LSPClients: csync.NewMap[string, *lsp.Client](),
globalCtx: ctx,
@@ -324,14 +322,8 @@ func (app *App) Shutdown() {
app.CoderAgent.CancelAll()
}
- // Get all LSP clients.
- app.clientsMutex.RLock()
- clients := make(map[string]*lsp.Client, len(app.LSPClients))
- maps.Copy(clients, app.LSPClients)
- app.clientsMutex.RUnlock()
-
// Shutdown all LSP clients.
- for name, client := range clients {
+ for name, client := range app.LSPClients.Seq2() {
shutdownCtx, cancel := context.WithTimeout(app.globalCtx, 5*time.Second)
if err := client.Close(shutdownCtx); err != nil {
slog.Error("Failed to shutdown LSP client", "name", name, "error", err)
diff --git a/internal/app/lsp.go b/internal/app/lsp.go
index 4a6932f275564139bd91e83467d6e5224083e5b5..057e9ce39363f3fd68c8c980ce22e3e8b0e78154 100644
--- a/internal/app/lsp.go
+++ b/internal/app/lsp.go
@@ -76,7 +76,5 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
slog.Info("LSP client initialized", "name", name)
// Add to map with mutex protection before starting goroutine
- app.clientsMutex.Lock()
- app.LSPClients[name] = lspClient
- app.clientsMutex.Unlock()
+ app.LSPClients.Set(name, lspClient)
}
diff --git a/internal/csync/maps.go b/internal/csync/maps.go
index 14e8b36c9c37ae2d93c9771e424579051f5181c8..b7a1f3109f6c15e7e5592cb538943a2d9e340819 100644
--- a/internal/csync/maps.go
+++ b/internal/csync/maps.go
@@ -70,10 +70,10 @@ func (m *Map[K, V]) GetOrSet(key K, fn func() V) V {
// Take gets an item and then deletes it.
func (m *Map[K, V]) Take(key K) (V, bool) {
- v, ok := m.Get(key)
- if ok {
- m.Del(key)
- }
+ m.mu.Lock()
+ defer m.mu.Unlock()
+ v, ok := m.inner[key]
+ delete(m.inner, key)
return v, ok
}
diff --git a/internal/csync/versionedmap.go b/internal/csync/versionedmap.go
index dfe2d6f5e893f73cc34cfd99fab984dcc273cd9a..f0f4e0249c3b0102976840bd82400e18c1703c47 100644
--- a/internal/csync/versionedmap.go
+++ b/internal/csync/versionedmap.go
@@ -1,34 +1,50 @@
package csync
import (
+ "iter"
"sync/atomic"
)
// NewVersionedMap creates a new versioned, thread-safe map.
func NewVersionedMap[K comparable, V any]() *VersionedMap[K, V] {
return &VersionedMap[K, V]{
- Map: NewMap[K, V](),
+ m: NewMap[K, V](),
}
}
// VersionedMap is a thread-safe map that keeps track of its version.
type VersionedMap[K comparable, V any] struct {
- *Map[K, V]
+ m *Map[K, V]
v atomic.Uint64
}
+// Get gets the value for the specified key from the map.
+func (m *VersionedMap[K, V]) Get(key K) (V, bool) {
+ return m.m.Get(key)
+}
+
// Set sets the value for the specified key in the map and increments the version.
func (m *VersionedMap[K, V]) Set(key K, value V) {
- m.Map.Set(key, value)
+ m.m.Set(key, value)
m.v.Add(1)
}
// Del deletes the specified key from the map and increments the version.
func (m *VersionedMap[K, V]) Del(key K) {
- m.Map.Del(key)
+ m.m.Del(key)
m.v.Add(1)
}
+// Seq2 returns an iter.Seq2 that yields key-value pairs from the map.
+func (m *VersionedMap[K, V]) Seq2() iter.Seq2[K, V] {
+ return m.m.Seq2()
+}
+
+// Len returns the number of items in the map.
+func (m *VersionedMap[K, V]) Len() int {
+ return m.m.Len()
+}
+
// Version returns the current version of the map.
func (m *VersionedMap[K, V]) Version() uint64 {
return m.v.Load()
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 7c09a0be621485962df43e82484b0add4ea63513..864188113168948c2e59a221c62c6cdad99f75ce 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -83,8 +83,7 @@ type agent struct {
summarizeProviderID string
activeRequests *csync.Map[string, context.CancelFunc]
-
- promptQueue *csync.Map[string, []string]
+ promptQueue *csync.Map[string, []string]
}
var agentPromptMap = map[string]prompt.PromptID{
@@ -100,7 +99,7 @@ func NewAgent(
sessions session.Service,
messages message.Service,
history history.Service,
- lspClients map[string]*lsp.Client,
+ lspClients *csync.Map[string, *lsp.Client],
) (Service, error) {
cfg := config.Get()
@@ -204,7 +203,7 @@ func NewAgent(
withCoderTools := func(t []tools.BaseTool) []tools.BaseTool {
if agentCfg.ID == "coder" {
t = append(t, mcpTools...)
- if len(lspClients) > 0 {
+ if lspClients.Len() > 0 {
t = append(t, tools.NewDiagnosticsTool(lspClients))
}
}
diff --git a/internal/llm/tools/diagnostics.go b/internal/llm/tools/diagnostics.go
index 527e2f786895230db41784d0cb1b643b0f40f71c..17b93ab07cae29f2a274c0e289be02ac10827af2 100644
--- a/internal/llm/tools/diagnostics.go
+++ b/internal/llm/tools/diagnostics.go
@@ -9,6 +9,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
@@ -18,7 +19,7 @@ type DiagnosticsParams struct {
}
type diagnosticsTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
}
const (
@@ -46,7 +47,7 @@ TIPS:
`
)
-func NewDiagnosticsTool(lspClients map[string]*lsp.Client) BaseTool {
+func NewDiagnosticsTool(lspClients *csync.Map[string, *lsp.Client]) BaseTool {
return &diagnosticsTool{
lspClients,
}
@@ -76,20 +77,19 @@ func (b *diagnosticsTool) Run(ctx context.Context, call ToolCall) (ToolResponse,
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
- lsps := b.lspClients
- if len(lsps) == 0 {
+ if b.lspClients.Len() == 0 {
return NewTextErrorResponse("no LSP clients available"), nil
}
- notifyLSPs(ctx, lsps, params.FilePath)
- output := getDiagnostics(params.FilePath, lsps)
+ notifyLSPs(ctx, b.lspClients, params.FilePath)
+ output := getDiagnostics(params.FilePath, b.lspClients)
return NewTextResponse(output), nil
}
-func notifyLSPs(ctx context.Context, lsps map[string]*lsp.Client, filepath string) {
+func notifyLSPs(ctx context.Context, lsps *csync.Map[string, *lsp.Client], filepath string) {
if filepath == "" {
return
}
- for _, client := range lsps {
+ for client := range lsps.Seq() {
if !client.HandlesFile(filepath) {
continue
}
@@ -99,11 +99,11 @@ func notifyLSPs(ctx context.Context, lsps map[string]*lsp.Client, filepath strin
}
}
-func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string {
+func getDiagnostics(filePath string, lsps *csync.Map[string, *lsp.Client]) string {
fileDiagnostics := []string{}
projectDiagnostics := []string{}
- for lspName, client := range lsps {
+ for lspName, client := range lsps.Seq2() {
for location, diags := range client.GetDiagnostics() {
path, err := location.Path()
if err != nil {
diff --git a/internal/llm/tools/edit.go b/internal/llm/tools/edit.go
index 1afa03a427c36c7fe6ad448f4183f7ff4636ef85..d819ceb0af54b5682aecda703850a7b5a795e97c 100644
--- a/internal/llm/tools/edit.go
+++ b/internal/llm/tools/edit.go
@@ -10,6 +10,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -39,7 +40,7 @@ type EditResponseMetadata struct {
}
type editTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
@@ -104,7 +105,7 @@ WINDOWS NOTES:
Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.`
)
-func NewEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &editTool{
lspClients: lspClients,
permissions: permissions,
diff --git a/internal/llm/tools/multiedit.go b/internal/llm/tools/multiedit.go
index 2e08e973ba9eb46910fd39e98207b2f5e7bcca1f..4f99070b1a030e9c8f741f0671a6b2254899f276 100644
--- a/internal/llm/tools/multiedit.go
+++ b/internal/llm/tools/multiedit.go
@@ -10,6 +10,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -43,7 +44,7 @@ type MultiEditResponseMetadata struct {
}
type multiEditTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
@@ -95,7 +96,7 @@ If you want to create a new file, use:
- Subsequent edits: normal edit operations on the created content`
)
-func NewMultiEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewMultiEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &multiEditTool{
lspClients: lspClients,
permissions: permissions,
diff --git a/internal/llm/tools/view.go b/internal/llm/tools/view.go
index 5664edf0baf01f448f1b92ffed6c3e213ee608c2..7e48a91d380a693295a130b0b39e47c685aab142 100644
--- a/internal/llm/tools/view.go
+++ b/internal/llm/tools/view.go
@@ -11,6 +11,7 @@ import (
"strings"
"unicode/utf8"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/permission"
)
@@ -28,7 +29,7 @@ type ViewPermissionsParams struct {
}
type viewTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
workingDir string
permissions permission.Service
}
@@ -81,7 +82,7 @@ TIPS:
- When viewing large files, use the offset parameter to read specific sections`
)
-func NewViewTool(lspClients map[string]*lsp.Client, permissions permission.Service, workingDir string) BaseTool {
+func NewViewTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, workingDir string) BaseTool {
return &viewTool{
lspClients: lspClients,
workingDir: workingDir,
diff --git a/internal/llm/tools/write.go b/internal/llm/tools/write.go
index 6bbabba93d1dcf7064789bddd9fe4bc69e9f9182..cb256eb3d5c016797635796c8a8cf706810161af 100644
--- a/internal/llm/tools/write.go
+++ b/internal/llm/tools/write.go
@@ -10,6 +10,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -30,7 +31,7 @@ type WritePermissionsParams struct {
}
type writeTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
@@ -78,7 +79,7 @@ TIPS:
- Always include descriptive comments when making changes to existing code`
)
-func NewWriteTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewWriteTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &writeTool{
lspClients: lspClients,
permissions: permissions,
diff --git a/internal/tui/components/chat/header/header.go b/internal/tui/components/chat/header/header.go
index 5e5a68b5290187cea95b7cf8c0aada6cb46b4415..21861a4a2eda1340f6e01c0748f24cb713f15398 100644
--- a/internal/tui/components/chat/header/header.go
+++ b/internal/tui/components/chat/header/header.go
@@ -6,6 +6,7 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/pubsub"
@@ -28,11 +29,11 @@ type Header interface {
type header struct {
width int
session session.Session
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
detailsOpen bool
}
-func New(lspClients map[string]*lsp.Client) Header {
+func New(lspClients *csync.Map[string, *lsp.Client]) Header {
return &header{
lspClients: lspClients,
width: 0,
@@ -104,7 +105,7 @@ func (h *header) details(availWidth int) string {
var parts []string
errorCount := 0
- for _, l := range h.lspClients {
+ for l := range h.lspClients.Seq() {
for _, diagnostics := range l.GetDiagnostics() {
for _, diagnostic := range diagnostics {
if diagnostic.Severity == protocol.SeverityError {
diff --git a/internal/tui/components/chat/sidebar/sidebar.go b/internal/tui/components/chat/sidebar/sidebar.go
index 236c5d2e31c6e7f81482757ff750f572e23cc3fb..b50a78c7f8697e4f4db19649a01794cfe7a23bac 100644
--- a/internal/tui/components/chat/sidebar/sidebar.go
+++ b/internal/tui/components/chat/sidebar/sidebar.go
@@ -69,13 +69,13 @@ type sidebarCmp struct {
session session.Session
logo string
cwd string
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
compactMode bool
history history.Service
files *csync.Map[string, SessionFile]
}
-func New(history history.Service, lspClients map[string]*lsp.Client, compact bool) Sidebar {
+func New(history history.Service, lspClients *csync.Map[string, *lsp.Client], compact bool) Sidebar {
return &sidebarCmp{
lspClients: lspClients,
history: history,
diff --git a/internal/tui/components/lsp/lsp.go b/internal/tui/components/lsp/lsp.go
index 53daeb0a65c43a1e4ae80ff6567c7daa32a800b8..f5f4061045901c91ecb8bce1f47eab3ac1f7abcf 100644
--- a/internal/tui/components/lsp/lsp.go
+++ b/internal/tui/components/lsp/lsp.go
@@ -6,6 +6,7 @@ import (
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/styles"
@@ -22,7 +23,7 @@ type RenderOptions struct {
}
// RenderLSPList renders a list of LSP status items with the given options.
-func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []string {
+func RenderLSPList(lspClients *csync.Map[string, *lsp.Client], opts RenderOptions) []string {
t := styles.CurrentTheme()
lspList := []string{}
@@ -91,7 +92,7 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
protocol.SeverityHint: 0,
protocol.SeverityInformation: 0,
}
- if client, ok := lspClients[l.Name]; ok {
+ if client, ok := lspClients.Get(l.Name); ok {
for _, diagnostics := range client.GetDiagnostics() {
for _, diagnostic := range diagnostics {
if severity, ok := lspErrs[diagnostic.Severity]; ok {
@@ -134,7 +135,7 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
}
// RenderLSPBlock renders a complete LSP block with optional truncation indicator.
-func RenderLSPBlock(lspClients map[string]*lsp.Client, opts RenderOptions, showTruncationIndicator bool) string {
+func RenderLSPBlock(lspClients *csync.Map[string, *lsp.Client], opts RenderOptions, showTruncationIndicator bool) string {
t := styles.CurrentTheme()
lspList := RenderLSPList(lspClients, opts)
From 58eda7ccabb9a596dd4fc6920b9e2a7dce3d1ad3 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 19 Sep 2025 10:17:20 +0200
Subject: [PATCH 108/236] fix: handle z.ai key validation differently
---
internal/config/config.go | 13 ++++++++++---
1 file changed, 10 insertions(+), 3 deletions(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index 02074dc212330e71848b90a01201c29a6525744d..67378e9ff00356358bfedd403aacd655b763cfc6 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -508,7 +508,7 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if baseURL == "" {
baseURL = "https://api.openai.com/v1"
}
- if c.ID == "openrouter" {
+ if c.ID == string(catwalk.InferenceProviderOpenRouter) {
testURL = baseURL + "/credits"
} else {
testURL = baseURL + "/models"
@@ -546,8 +546,15 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if err != nil {
return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
}
- if b.StatusCode != http.StatusOK {
- return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ if c.ID == string(catwalk.InferenceProviderZAI) {
+ if b.StatusCode == http.StatusUnauthorized {
+ // for z.ai just check if the http response is not 401
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
+ } else {
+ if b.StatusCode != http.StatusOK {
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
}
_ = b.Body.Close()
return nil
From 5811c4acd6a9b5d2aa66b6a4208b9d5476902bfc Mon Sep 17 00:00:00 2001
From: tauraamui
Date: Fri, 19 Sep 2025 09:35:31 +0100
Subject: [PATCH 109/236] refactor(tidy): remove nested if and else block
---
internal/config/config.go | 15 ++++++---------
1 file changed, 6 insertions(+), 9 deletions(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index 67378e9ff00356358bfedd403aacd655b763cfc6..4e12218d37bf23aa9b80bb91d4e653493088dc86 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -546,15 +546,12 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if err != nil {
return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
}
- if c.ID == string(catwalk.InferenceProviderZAI) {
- if b.StatusCode == http.StatusUnauthorized {
- // for z.ai just check if the http response is not 401
- return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
- }
- } else {
- if b.StatusCode != http.StatusOK {
- return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
- }
+ if c.ID == string(catwalk.InferenceProviderZAI) && b.StatusCode == http.StatusUnauthorized {
+ // for z.ai just check if the http response is not 401
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
+ if b.StatusCode != http.StatusOK {
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
}
_ = b.Body.Close()
return nil
From 96af7826dca69ee6f33a76d27cfa70793f1c1660 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Fri, 19 Sep 2025 10:50:30 +0200
Subject: [PATCH 110/236] Revert "refactor(tidy): remove nested if and else
block"
This reverts commit a6e99fff52b4ab105df91f595a6e94ff3e8bd2cb.
---
internal/config/config.go | 15 +++++++++------
1 file changed, 9 insertions(+), 6 deletions(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index 4e12218d37bf23aa9b80bb91d4e653493088dc86..67378e9ff00356358bfedd403aacd655b763cfc6 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -546,12 +546,15 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if err != nil {
return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
}
- if c.ID == string(catwalk.InferenceProviderZAI) && b.StatusCode == http.StatusUnauthorized {
- // for z.ai just check if the http response is not 401
- return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
- }
- if b.StatusCode != http.StatusOK {
- return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ if c.ID == string(catwalk.InferenceProviderZAI) {
+ if b.StatusCode == http.StatusUnauthorized {
+ // for z.ai just check if the http response is not 401
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
+ } else {
+ if b.StatusCode != http.StatusOK {
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
}
_ = b.Body.Close()
return nil
From 91b6b3134a63036b1528da8e38d80bfe998c3d05 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 19 Sep 2025 09:16:47 -0300
Subject: [PATCH 111/236] fix(mcp): pass down mcp name to logger (#1078)
This will make the logs more useful. Right now it would simply print the
MCP's library logs without any additional context, so you wouldn't know
which MCP is causing issues.
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/mcp-tools.go | 21 +++++++++++++--------
1 file changed, 13 insertions(+), 8 deletions(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index bb50231da028e714c783f50cc7ebd8a1f4b595db..90011c43a0fce476c119c7a981ea6760c294b806 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -327,7 +327,7 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
}
func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig) (*client.Client, error) {
- c, err := createMcpClient(m)
+ c, err := createMcpClient(name, m)
if err != nil {
updateMCPState(name, MCPStateError, err, nil, 0)
slog.Error("error creating mcp client", "error", err, "name", name)
@@ -353,7 +353,7 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
return c, nil
}
-func createMcpClient(m config.MCPConfig) (*client.Client, error) {
+func createMcpClient(name string, m config.MCPConfig) (*client.Client, error) {
switch m.Type {
case config.MCPStdio:
if strings.TrimSpace(m.Command) == "" {
@@ -363,7 +363,7 @@ func createMcpClient(m config.MCPConfig) (*client.Client, error) {
m.Command,
m.ResolvedEnv(),
m.Args,
- transport.WithCommandLogger(mcpLogger{}),
+ transport.WithCommandLogger(mcpLogger{name: name}),
)
case config.MCPHttp:
if strings.TrimSpace(m.URL) == "" {
@@ -372,7 +372,7 @@ func createMcpClient(m config.MCPConfig) (*client.Client, error) {
return client.NewStreamableHttpClient(
m.URL,
transport.WithHTTPHeaders(m.ResolvedHeaders()),
- transport.WithHTTPLogger(mcpLogger{}),
+ transport.WithHTTPLogger(mcpLogger{name: name}),
)
case config.MCPSse:
if strings.TrimSpace(m.URL) == "" {
@@ -381,7 +381,7 @@ func createMcpClient(m config.MCPConfig) (*client.Client, error) {
return client.NewSSEMCPClient(
m.URL,
client.WithHeaders(m.ResolvedHeaders()),
- transport.WithSSELogger(mcpLogger{}),
+ transport.WithSSELogger(mcpLogger{name: name}),
)
default:
return nil, fmt.Errorf("unsupported mcp type: %s", m.Type)
@@ -389,10 +389,15 @@ func createMcpClient(m config.MCPConfig) (*client.Client, error) {
}
// for MCP's clients.
-type mcpLogger struct{}
+type mcpLogger struct{ name string }
-func (l mcpLogger) Errorf(format string, v ...any) { slog.Error(fmt.Sprintf(format, v...)) }
-func (l mcpLogger) Infof(format string, v ...any) { slog.Info(fmt.Sprintf(format, v...)) }
+func (l mcpLogger) Errorf(format string, v ...any) {
+ slog.Error(fmt.Sprintf(format, v...), "name", l.name)
+}
+
+func (l mcpLogger) Infof(format string, v ...any) {
+ slog.Info(fmt.Sprintf(format, v...), "name", l.name)
+}
func mcpTimeout(m config.MCPConfig) time.Duration {
return time.Duration(cmp.Or(m.Timeout, 15)) * time.Second
From fa34edc67f1c25328b3a54e01a7243ec9a64c93f Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 19 Sep 2025 09:19:16 -0300
Subject: [PATCH 112/236] refactor: put tool descriptions in markdown files
(#1077)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/tools/bash.go | 175 ++++++------------------------
internal/llm/tools/bash.md | 161 +++++++++++++++++++++++++++
internal/llm/tools/diagnostics.go | 31 +-----
internal/llm/tools/diagnostics.md | 21 ++++
internal/llm/tools/download.go | 39 +------
internal/llm/tools/download.md | 34 ++++++
internal/llm/tools/edit.go | 63 +----------
internal/llm/tools/edit.md | 60 ++++++++++
internal/llm/tools/fetch.go | 39 +------
internal/llm/tools/fetch.md | 34 ++++++
internal/llm/tools/glob.go | 49 +--------
internal/llm/tools/glob.md | 46 ++++++++
internal/llm/tools/grep.go | 56 +---------
internal/llm/tools/grep.md | 54 +++++++++
internal/llm/tools/ls.go | 44 ++------
internal/llm/tools/ls.md | 40 +++++++
internal/llm/tools/multiedit.go | 52 +--------
internal/llm/tools/multiedit.md | 48 ++++++++
internal/llm/tools/sourcegraph.go | 104 +-----------------
internal/llm/tools/sourcegraph.md | 102 +++++++++++++++++
internal/llm/tools/view.go | 42 +------
internal/llm/tools/view.md | 42 +++++++
internal/llm/tools/write.go | 42 +------
internal/llm/tools/write.md | 38 +++++++
24 files changed, 776 insertions(+), 640 deletions(-)
create mode 100644 internal/llm/tools/bash.md
create mode 100644 internal/llm/tools/diagnostics.md
create mode 100644 internal/llm/tools/download.md
create mode 100644 internal/llm/tools/edit.md
create mode 100644 internal/llm/tools/fetch.md
create mode 100644 internal/llm/tools/glob.md
create mode 100644 internal/llm/tools/grep.md
create mode 100644 internal/llm/tools/ls.md
create mode 100644 internal/llm/tools/multiedit.md
create mode 100644 internal/llm/tools/sourcegraph.md
create mode 100644 internal/llm/tools/view.md
create mode 100644 internal/llm/tools/write.md
diff --git a/internal/llm/tools/bash.go b/internal/llm/tools/bash.go
index f0a8bdd69c1748ffc8a402933d61ab67615d7054..7f91ecb78b3d79380d029a1d58bb62083cdf27fe 100644
--- a/internal/llm/tools/bash.go
+++ b/internal/llm/tools/bash.go
@@ -1,9 +1,12 @@
package tools
import (
+ "bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
+ "html/template"
"strings"
"time"
@@ -43,6 +46,22 @@ const (
BashNoOutput = "no output"
)
+//go:embed bash.md
+var bashDescription []byte
+
+var bashDescriptionTpl = template.Must(
+ template.New("bashDescription").
+ Parse(string(bashDescription)),
+)
+
+type bashDescriptionData struct {
+ BannedCommands string
+ MaxOutputLength int
+ AttributionStep string
+ AttributionExample string
+ PRAttribution string
+}
+
var bannedCommands = []string{
// Network/Download tools
"alias",
@@ -163,150 +182,18 @@ git commit -m "$(cat <<'EOF'
)"`
}
- return fmt.Sprintf(`Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
-
-CROSS-PLATFORM SHELL SUPPORT:
-* This tool uses a shell interpreter (mvdan/sh) that mimics the Bash language,
- so you should use Bash syntax in all platforms, including Windows.
- The most common shell builtins and core utils are available in Windows as
- well.
-* Make sure to use forward slashes (/) as path separators in commands, even on
- Windows. Example: "ls C:/foo/bar" instead of "ls C:\foo\bar".
-
-Before executing the command, please follow these steps:
-
-1. Directory Verification:
- - If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location
- - For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory
-
-2. Security Check:
- - For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.
- - Verify that the command is not one of the banned commands: %s.
-
-3. Command Execution:
- - After ensuring proper quoting, execute the command.
- - Capture the output of the command.
-
-4. Output Processing:
- - If the output exceeds %d characters, output will be truncated before being returned to you.
- - Prepare the output for display to the user.
-
-5. Return Result:
- - Provide the processed output of the command.
- - If any errors occurred during execution, include those in the output.
- - The result will also have metadata like the cwd (current working directory) at the end, included with tags.
-
-Usage notes:
-- The command argument is required.
-- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.
-- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files.
-- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).
-- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands.
-- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it.
-
-pytest /foo/bar/tests
-
-
-cd /foo/bar && pytest tests
-
-
-# Committing changes with git
-
-When the user asks you to create a new git commit, follow these steps carefully:
-
-1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!):
- - Run a git status command to see all untracked files.
- - Run a git diff command to see both staged and unstaged changes that will be committed.
- - Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
-
-2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit.
-
-3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in tags:
-
-
-- List the files that have been changed or added
-- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
-- Brainstorm the purpose or motivation behind these changes
-- Do not use tools to explore code, beyond what is available in the git context
-- Assess the impact of these changes on the overall project
-- Check for any sensitive information that shouldn't be committed
-- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
-- Ensure your language is clear, concise, and to the point
-- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
-- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
-- Review the draft message to ensure it accurately reflects the changes and their purpose
-
-
-%s
-
-- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
-%s
-
-5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
-
-6. Finally, run git status to make sure the commit succeeded.
-
-Important notes:
-- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up
-- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit.
-- NEVER update the git config
-- DO NOT push to the remote repository
-- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
-- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
-- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
-- Return an empty response - the user will see the git output directly
-
-# Creating pull requests
-Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
-
-IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
-
-1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!):
- - Run a git status command to see all untracked files.
- - Run a git diff command to see both staged and unstaged changes that will be committed.
- - Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
- - Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.)
-
-2. Create new branch if needed
-
-3. Commit changes if needed
-
-4. Push to remote with -u flag if needed
-
-5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in tags:
-
-
-- List the commits since diverging from the main branch
-- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
-- Brainstorm the purpose or motivation behind these changes
-- Assess the impact of these changes on the overall project
-- Do not use tools to explore code, beyond what is available in the git context
-- Check for any sensitive information that shouldn't be committed
-- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what"
-- Ensure the summary accurately reflects all changes since diverging from the main branch
-- Ensure your language is clear, concise, and to the point
-- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
-- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context)
-- Review the draft summary to ensure it accurately reflects the changes and their purpose
-
-
-6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
-
-gh pr create --title "the pr title" --body "$(cat <<'EOF'
-## Summary
-<1-3 bullet points>
-
-## Test plan
-[Checklist of TODOs for testing the pull request...]
-
-%s
-EOF
-)"
-
-
-Important:
-- Return an empty response - the user will see the gh output directly
-- Never update git config`, bannedCommandsStr, MaxOutputLength, attributionStep, attributionExample, prAttribution)
+ var out bytes.Buffer
+ if err := bashDescriptionTpl.Execute(&out, bashDescriptionData{
+ BannedCommands: bannedCommandsStr,
+ MaxOutputLength: MaxOutputLength,
+ AttributionStep: attributionStep,
+ AttributionExample: attributionExample,
+ PRAttribution: prAttribution,
+ }); err != nil {
+ // this should never happen.
+ panic("failed to execute bash description template: " + err.Error())
+ }
+ return out.String()
}
func blockFuncs() []shell.BlockFunc {
diff --git a/internal/llm/tools/bash.md b/internal/llm/tools/bash.md
new file mode 100644
index 0000000000000000000000000000000000000000..464b2749fb0205729caafbccc2dde57ffe267cd1
--- /dev/null
+++ b/internal/llm/tools/bash.md
@@ -0,0 +1,161 @@
+Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
+
+CROSS-PLATFORM SHELL SUPPORT:
+
+- This tool uses a shell interpreter (mvdan/sh) that mimics the Bash language,
+ so you should use Bash syntax in all platforms, including Windows.
+ The most common shell builtins and core utils are available in Windows as
+ well.
+- Make sure to use forward slashes (/) as path separators in commands, even on
+ Windows. Example: "ls C:/foo/bar" instead of "ls C:\foo\bar".
+
+Before executing the command, please follow these steps:
+
+1. Directory Verification:
+
+- If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location
+- For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory
+
+2. Security Check:
+
+- For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.
+- Verify that the command is not one of the banned commands: {{ .BannedCommands }}.
+
+3. Command Execution:
+
+- After ensuring proper quoting, execute the command.
+- Capture the output of the command.
+
+4. Output Processing:
+
+- If the output exceeds {{ .MaxOutputLength }} characters, output will be truncated before being returned to you.
+- Prepare the output for display to the user.
+
+5. Return Result:
+
+- Provide the processed output of the command.
+- If any errors occurred during execution, include those in the output.
+- The result will also have metadata like the cwd (current working directory) at the end, included with tags.
+
+Usage notes:
+
+- The command argument is required.
+- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.
+- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files.
+- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).
+- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands.
+- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it.
+
+ pytest /foo/bar/tests
+
+
+ cd /foo/bar && pytest tests
+
+
+# Committing changes with git
+
+When the user asks you to create a new git commit, follow these steps carefully:
+
+1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!):
+
+- Run a git status command to see all untracked files.
+- Run a git diff command to see both staged and unstaged changes that will be committed.
+- Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
+
+2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit.
+
+3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in tags:
+
+
+
+- List the files that have been changed or added
+- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
+- Brainstorm the purpose or motivation behind these changes
+- Do not use tools to explore code, beyond what is available in the git context
+- Assess the impact of these changes on the overall project
+- Check for any sensitive information that shouldn't be committed
+- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
+- Ensure your language is clear, concise, and to the point
+- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
+- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
+- Review the draft message to ensure it accurately reflects the changes and their purpose
+
+
+{{ .AttributionStep }}
+
+- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
+ {{ .AttributionExample }}
+
+5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
+
+6. Finally, run git status to make sure the commit succeeded.
+
+Important notes:
+
+- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up
+- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit.
+- NEVER update the git config
+- DO NOT push to the remote repository
+- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
+- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
+- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
+- Return an empty response - the user will see the git output directly
+
+# Creating pull requests
+
+Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
+
+IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
+
+1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!):
+
+- Run a git status command to see all untracked files.
+- Run a git diff command to see both staged and unstaged changes that will be committed.
+- Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
+- Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.)
+
+2. Create new branch if needed
+
+3. Commit changes if needed
+
+4. Push to remote with -u flag if needed
+
+5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in tags:
+
+
+
+- List the commits since diverging from the main branch
+- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
+- Brainstorm the purpose or motivation behind these changes
+- Assess the impact of these changes on the overall project
+- Do not use tools to explore code, beyond what is available in the git context
+- Check for any sensitive information that shouldn't be committed
+- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what"
+- Ensure the summary accurately reflects all changes since diverging from the main branch
+- Ensure your language is clear, concise, and to the point
+- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
+- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context)
+- Review the draft summary to ensure it accurately reflects the changes and their purpose
+
+
+6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
+
+ gh pr create --title "the pr title" --body "$(cat <<'EOF'
+
+## Summary
+
+<1-3 bullet points>
+
+## Test plan
+
+[Checklist of TODOs for testing the pull request...]
+
+{{ .PRAttribution }}
+EOF
+)"
+
+
+Important:
+
+- Return an empty response - the user will see the gh output directly
+- Never update git config
diff --git a/internal/llm/tools/diagnostics.go b/internal/llm/tools/diagnostics.go
index 17b93ab07cae29f2a274c0e289be02ac10827af2..8e0c332cef76e40d5e24e74ed3260b95aab8b04b 100644
--- a/internal/llm/tools/diagnostics.go
+++ b/internal/llm/tools/diagnostics.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -22,30 +23,10 @@ type diagnosticsTool struct {
lspClients *csync.Map[string, *lsp.Client]
}
-const (
- DiagnosticsToolName = "diagnostics"
- diagnosticsDescription = `Get diagnostics for a file and/or project.
-WHEN TO USE THIS TOOL:
-- Use when you need to check for errors or warnings in your code
-- Helpful for debugging and ensuring code quality
-- Good for getting a quick overview of issues in a file or project
-HOW TO USE:
-- Provide a path to a file to get diagnostics for that file
-- Leave the path empty to get diagnostics for the entire project
-- Results are displayed in a structured format with severity levels
-FEATURES:
-- Displays errors, warnings, and hints
-- Groups diagnostics by severity
-- Provides detailed information about each diagnostic
-LIMITATIONS:
-- Results are limited to the diagnostics provided by the LSP clients
-- May not cover all possible issues in the code
-- Does not provide suggestions for fixing issues
-TIPS:
-- Use in conjunction with other tools for a comprehensive code review
-- Combine with the LSP client for real-time diagnostics
-`
-)
+const DiagnosticsToolName = "diagnostics"
+
+//go:embed diagnostics.md
+var diagnosticsDescription []byte
func NewDiagnosticsTool(lspClients *csync.Map[string, *lsp.Client]) BaseTool {
return &diagnosticsTool{
@@ -60,7 +41,7 @@ func (b *diagnosticsTool) Name() string {
func (b *diagnosticsTool) Info() ToolInfo {
return ToolInfo{
Name: DiagnosticsToolName,
- Description: diagnosticsDescription,
+ Description: string(diagnosticsDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/diagnostics.md b/internal/llm/tools/diagnostics.md
new file mode 100644
index 0000000000000000000000000000000000000000..3f00d66039ade58615d46372bbf7f3cd6e619dcf
--- /dev/null
+++ b/internal/llm/tools/diagnostics.md
@@ -0,0 +1,21 @@
+Get diagnostics for a file and/or project.
+WHEN TO USE THIS TOOL:
+
+- Use when you need to check for errors or warnings in your code
+- Helpful for debugging and ensuring code quality
+- Good for getting a quick overview of issues in a file or project
+ HOW TO USE:
+- Provide a path to a file to get diagnostics for that file
+- Leave the path empty to get diagnostics for the entire project
+- Results are displayed in a structured format with severity levels
+ FEATURES:
+- Displays errors, warnings, and hints
+- Groups diagnostics by severity
+- Provides detailed information about each diagnostic
+ LIMITATIONS:
+- Results are limited to the diagnostics provided by the LSP clients
+- May not cover all possible issues in the code
+- Does not provide suggestions for fixing issues
+ TIPS:
+- Use in conjunction with other tools for a comprehensive code review
+- Combine with the LSP client for real-time diagnostics
diff --git a/internal/llm/tools/download.go b/internal/llm/tools/download.go
index fc0c33a846305d002df2bd6e21a54cbe088a511e..63092cf24a0dbb98e7aef58e9d4ee867525ae945 100644
--- a/internal/llm/tools/download.go
+++ b/internal/llm/tools/download.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -32,38 +33,10 @@ type downloadTool struct {
workingDir string
}
-const (
- DownloadToolName = "download"
- downloadToolDescription = `Downloads binary data from a URL and saves it to a local file.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to download files, images, or other binary data from URLs
-- Helpful for downloading assets, documents, or any file type
-- Useful for saving remote content locally for processing or storage
-
-HOW TO USE:
-- Provide the URL to download from
-- Specify the local file path where the content should be saved
-- Optionally set a timeout for the request
-
-FEATURES:
-- Downloads any file type (binary or text)
-- Automatically creates parent directories if they don't exist
-- Handles large files efficiently with streaming
-- Sets reasonable timeouts to prevent hanging
-- Validates input parameters before making requests
-
-LIMITATIONS:
-- Maximum file size is 100MB
-- Only supports HTTP and HTTPS protocols
-- Cannot handle authentication or cookies
-- Some websites may block automated requests
-- Will overwrite existing files without warning
-
-TIPS:
-- Use absolute paths or paths relative to the working directory
-- Set appropriate timeouts for large files or slow connections`
-)
+const DownloadToolName = "download"
+
+//go:embed download.md
+var downloadDescription []byte
func NewDownloadTool(permissions permission.Service, workingDir string) BaseTool {
return &downloadTool{
@@ -87,7 +60,7 @@ func (t *downloadTool) Name() string {
func (t *downloadTool) Info() ToolInfo {
return ToolInfo{
Name: DownloadToolName,
- Description: downloadToolDescription,
+ Description: string(downloadDescription),
Parameters: map[string]any{
"url": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/download.md b/internal/llm/tools/download.md
new file mode 100644
index 0000000000000000000000000000000000000000..4a9516100dfa2c38f3a4bac588793964569e78b3
--- /dev/null
+++ b/internal/llm/tools/download.md
@@ -0,0 +1,34 @@
+Downloads binary data from a URL and saves it to a local file.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to download files, images, or other binary data from URLs
+- Helpful for downloading assets, documents, or any file type
+- Useful for saving remote content locally for processing or storage
+
+HOW TO USE:
+
+- Provide the URL to download from
+- Specify the local file path where the content should be saved
+- Optionally set a timeout for the request
+
+FEATURES:
+
+- Downloads any file type (binary or text)
+- Automatically creates parent directories if they don't exist
+- Handles large files efficiently with streaming
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before making requests
+
+LIMITATIONS:
+
+- Maximum file size is 100MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+- Will overwrite existing files without warning
+
+TIPS:
+
+- Use absolute paths or paths relative to the working directory
+- Set appropriate timeouts for large files or slow connections
diff --git a/internal/llm/tools/edit.go b/internal/llm/tools/edit.go
index d819ceb0af54b5682aecda703850a7b5a795e97c..ae4322e08da087127a3238a1e3edf6c1a9a2e37a 100644
--- a/internal/llm/tools/edit.go
+++ b/internal/llm/tools/edit.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -46,64 +47,10 @@ type editTool struct {
workingDir string
}
-const (
- EditToolName = "edit"
- editDescription = `Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files.
+const EditToolName = "edit"
-Before using this tool:
-
-1. Use the FileRead tool to understand the file's contents and context
-
-2. Verify the directory path is correct (only applicable when creating new files):
- - Use the LS tool to verify the parent directory exists and is the correct location
-
-To make a file edit, provide the following:
-1. file_path: The absolute path to the file to modify (must be absolute, not relative)
-2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
-3. new_string: The edited text to replace the old_string
-4. replace_all: Replace all occurrences of old_string (default false)
-
-Special cases:
-- To create a new file: provide file_path and new_string, leave old_string empty
-- To delete content: provide file_path and old_string, leave new_string empty
-
-The tool will replace ONE occurrence of old_string with new_string in the specified file by default. Set replace_all to true to replace all occurrences.
-
-CRITICAL REQUIREMENTS FOR USING THIS TOOL:
-
-1. UNIQUENESS: When replace_all is false (default), the old_string MUST uniquely identify the specific instance you want to change. This means:
- - Include AT LEAST 3-5 lines of context BEFORE the change point
- - Include AT LEAST 3-5 lines of context AFTER the change point
- - Include all whitespace, indentation, and surrounding code exactly as it appears in the file
-
-2. SINGLE INSTANCE: When replace_all is false, this tool can only change ONE instance at a time. If you need to change multiple instances:
- - Set replace_all to true to replace all occurrences at once
- - Or make separate calls to this tool for each instance
- - Each call must uniquely identify its specific instance using extensive context
-
-3. VERIFICATION: Before using this tool:
- - Check how many instances of the target text exist in the file
- - If multiple instances exist and replace_all is false, gather enough context to uniquely identify each one
- - Plan separate tool calls for each instance or use replace_all
-
-WARNING: If you do not follow these requirements:
- - The tool will fail if old_string matches multiple locations and replace_all is false
- - The tool will fail if old_string doesn't match exactly (including whitespace)
- - You may change the wrong instance if you don't include enough context
-
-When making edits:
- - Ensure the edit results in idiomatic, correct code
- - Do not leave the code in a broken state
- - Always use absolute file paths (starting with /)
-
-WINDOWS NOTES:
-- File paths should use forward slashes (/) for cross-platform compatibility
-- On Windows, absolute paths start with drive letters (C:/) but forward slashes work throughout
-- File permissions are handled automatically by the Go runtime
-- Always assumes \n for line endings. The tool will handle \r\n conversion automatically if needed.
-
-Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.`
-)
+//go:embed edit.md
+var editDescription []byte
func NewEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &editTool{
@@ -121,7 +68,7 @@ func (e *editTool) Name() string {
func (e *editTool) Info() ToolInfo {
return ToolInfo{
Name: EditToolName,
- Description: editDescription,
+ Description: string(editDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/edit.md b/internal/llm/tools/edit.md
new file mode 100644
index 0000000000000000000000000000000000000000..2f9ec73883f3a68546e7b14bd53a12f9f69cc430
--- /dev/null
+++ b/internal/llm/tools/edit.md
@@ -0,0 +1,60 @@
+Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files.
+
+Before using this tool:
+
+1. Use the FileRead tool to understand the file's contents and context
+
+2. Verify the directory path is correct (only applicable when creating new files):
+ - Use the LS tool to verify the parent directory exists and is the correct location
+
+To make a file edit, provide the following:
+
+1. file_path: The absolute path to the file to modify (must be absolute, not relative)
+2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
+3. new_string: The edited text to replace the old_string
+4. replace_all: Replace all occurrences of old_string (default false)
+
+Special cases:
+
+- To create a new file: provide file_path and new_string, leave old_string empty
+- To delete content: provide file_path and old_string, leave new_string empty
+
+The tool will replace ONE occurrence of old_string with new_string in the specified file by default. Set replace_all to true to replace all occurrences.
+
+CRITICAL REQUIREMENTS FOR USING THIS TOOL:
+
+1. UNIQUENESS: When replace_all is false (default), the old_string MUST uniquely identify the specific instance you want to change. This means:
+ - Include AT LEAST 3-5 lines of context BEFORE the change point
+ - Include AT LEAST 3-5 lines of context AFTER the change point
+ - Include all whitespace, indentation, and surrounding code exactly as it appears in the file
+
+2. SINGLE INSTANCE: When replace_all is false, this tool can only change ONE instance at a time. If you need to change multiple instances:
+ - Set replace_all to true to replace all occurrences at once
+ - Or make separate calls to this tool for each instance
+ - Each call must uniquely identify its specific instance using extensive context
+
+3. VERIFICATION: Before using this tool:
+ - Check how many instances of the target text exist in the file
+ - If multiple instances exist and replace_all is false, gather enough context to uniquely identify each one
+ - Plan separate tool calls for each instance or use replace_all
+
+WARNING: If you do not follow these requirements:
+
+- The tool will fail if old_string matches multiple locations and replace_all is false
+- The tool will fail if old_string doesn't match exactly (including whitespace)
+- You may change the wrong instance if you don't include enough context
+
+When making edits:
+
+- Ensure the edit results in idiomatic, correct code
+- Do not leave the code in a broken state
+- Always use absolute file paths (starting with /)
+
+WINDOWS NOTES:
+
+- File paths should use forward slashes (/) for cross-platform compatibility
+- On Windows, absolute paths start with drive letters (C:/) but forward slashes work throughout
+- File permissions are handled automatically by the Go runtime
+- Always assumes \n for line endings. The tool will handle \r\n conversion automatically if needed.
+
+Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.
diff --git a/internal/llm/tools/fetch.go b/internal/llm/tools/fetch.go
index 156dbff7edd5747c4e758fc09cf94a5230c50deb..e9c5732462651cd9e31337f91f4f4d91a36e70c1 100644
--- a/internal/llm/tools/fetch.go
+++ b/internal/llm/tools/fetch.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -33,38 +34,10 @@ type fetchTool struct {
workingDir string
}
-const (
- FetchToolName = "fetch"
- fetchToolDescription = `Fetches content from a URL and returns it in the specified format.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to download content from a URL
-- Helpful for retrieving documentation, API responses, or web content
-- Useful for getting external information to assist with tasks
-
-HOW TO USE:
-- Provide the URL to fetch content from
-- Specify the desired output format (text, markdown, or html)
-- Optionally set a timeout for the request
-
-FEATURES:
-- Supports three output formats: text, markdown, and html
-- Automatically handles HTTP redirects
-- Sets reasonable timeouts to prevent hanging
-- Validates input parameters before making requests
-
-LIMITATIONS:
-- Maximum response size is 5MB
-- Only supports HTTP and HTTPS protocols
-- Cannot handle authentication or cookies
-- Some websites may block automated requests
-
-TIPS:
-- Use text format for plain text content or simple API responses
-- Use markdown format for content that should be rendered with formatting
-- Use html format when you need the raw HTML structure
-- Set appropriate timeouts for potentially slow websites`
-)
+const FetchToolName = "fetch"
+
+//go:embed fetch.md
+var fetchDescription []byte
func NewFetchTool(permissions permission.Service, workingDir string) BaseTool {
return &fetchTool{
@@ -88,7 +61,7 @@ func (t *fetchTool) Name() string {
func (t *fetchTool) Info() ToolInfo {
return ToolInfo{
Name: FetchToolName,
- Description: fetchToolDescription,
+ Description: string(fetchDescription),
Parameters: map[string]any{
"url": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/fetch.md b/internal/llm/tools/fetch.md
new file mode 100644
index 0000000000000000000000000000000000000000..ffb7bbaf968e4ae4e338ab1997211dc89a9234d1
--- /dev/null
+++ b/internal/llm/tools/fetch.md
@@ -0,0 +1,34 @@
+Fetches content from a URL and returns it in the specified format.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to download content from a URL
+- Helpful for retrieving documentation, API responses, or web content
+- Useful for getting external information to assist with tasks
+
+HOW TO USE:
+
+- Provide the URL to fetch content from
+- Specify the desired output format (text, markdown, or html)
+- Optionally set a timeout for the request
+
+FEATURES:
+
+- Supports three output formats: text, markdown, and html
+- Automatically handles HTTP redirects
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before making requests
+
+LIMITATIONS:
+
+- Maximum response size is 5MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+
+TIPS:
+
+- Use text format for plain text content or simple API responses
+- Use markdown format for content that should be rendered with formatting
+- Use html format when you need the raw HTML structure
+- Set appropriate timeouts for potentially slow websites
diff --git a/internal/llm/tools/glob.go b/internal/llm/tools/glob.go
index 1d558cc7d65c8f7d766f8251073416248442c25c..85207c28ea3408ab4a3867e983766d35c9474b47 100644
--- a/internal/llm/tools/glob.go
+++ b/internal/llm/tools/glob.go
@@ -3,6 +3,7 @@ package tools
import (
"bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -14,48 +15,10 @@ import (
"github.com/charmbracelet/crush/internal/fsext"
)
-const (
- GlobToolName = "glob"
- globDescription = `Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first).
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find files by name patterns or extensions
-- Great for finding specific file types across a directory structure
-- Useful for discovering files that match certain naming conventions
-
-HOW TO USE:
-- Provide a glob pattern to match against file paths
-- Optionally specify a starting directory (defaults to current working directory)
-- Results are sorted with most recently modified files first
-
-GLOB PATTERN SYNTAX:
-- '*' matches any sequence of non-separator characters
-- '**' matches any sequence of characters, including separators
-- '?' matches any single non-separator character
-- '[...]' matches any character in the brackets
-- '[!...]' matches any character not in the brackets
-
-COMMON PATTERN EXAMPLES:
-- '*.js' - Find all JavaScript files in the current directory
-- '**/*.js' - Find all JavaScript files in any subdirectory
-- 'src/**/*.{ts,tsx}' - Find all TypeScript files in the src directory
-- '*.{html,css,js}' - Find all HTML, CSS, and JS files
-
-LIMITATIONS:
-- Results are limited to 100 files (newest first)
-- Does not search file contents (use Grep tool for that)
-- Hidden files (starting with '.') are skipped
-
-WINDOWS NOTES:
-- Path separators are handled automatically (both / and \ work)
-- Uses ripgrep (rg) command if available, otherwise falls back to built-in Go implementation
-
-TIPS:
-- Patterns should use forward slashes (/) for cross-platform compatibility
-- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep
-- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
-- Always check if results are truncated and refine your search pattern if needed`
-)
+const GlobToolName = "glob"
+
+//go:embed glob.md
+var globDescription []byte
type GlobParams struct {
Pattern string `json:"pattern"`
@@ -84,7 +47,7 @@ func (g *globTool) Name() string {
func (g *globTool) Info() ToolInfo {
return ToolInfo{
Name: GlobToolName,
- Description: globDescription,
+ Description: string(globDescription),
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/glob.md b/internal/llm/tools/glob.md
new file mode 100644
index 0000000000000000000000000000000000000000..1d73d42c9bc59372368965f8c04ef0c068d3deca
--- /dev/null
+++ b/internal/llm/tools/glob.md
@@ -0,0 +1,46 @@
+Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first).
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find files by name patterns or extensions
+- Great for finding specific file types across a directory structure
+- Useful for discovering files that match certain naming conventions
+
+HOW TO USE:
+
+- Provide a glob pattern to match against file paths
+- Optionally specify a starting directory (defaults to current working directory)
+- Results are sorted with most recently modified files first
+
+GLOB PATTERN SYNTAX:
+
+- '\*' matches any sequence of non-separator characters
+- '\*\*' matches any sequence of characters, including separators
+- '?' matches any single non-separator character
+- '[...]' matches any character in the brackets
+- '[!...]' matches any character not in the brackets
+
+COMMON PATTERN EXAMPLES:
+
+- '\*.js' - Find all JavaScript files in the current directory
+- '\*_/_.js' - Find all JavaScript files in any subdirectory
+- 'src/\*_/_.{ts,tsx}' - Find all TypeScript files in the src directory
+- '\*.{html,css,js}' - Find all HTML, CSS, and JS files
+
+LIMITATIONS:
+
+- Results are limited to 100 files (newest first)
+- Does not search file contents (use Grep tool for that)
+- Hidden files (starting with '.') are skipped
+
+WINDOWS NOTES:
+
+- Path separators are handled automatically (both / and \ work)
+- Uses ripgrep (rg) command if available, otherwise falls back to built-in Go implementation
+
+TIPS:
+
+- Patterns should use forward slashes (/) for cross-platform compatibility
+- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep
+- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
+- Always check if results are truncated and refine your search pattern if needed
diff --git a/internal/llm/tools/grep.go b/internal/llm/tools/grep.go
index 1160fc287088f960d15fa1bf847eb13f77e84b92..30d6e0b16a06c28aa33783f76fcdaa5ccb800915 100644
--- a/internal/llm/tools/grep.go
+++ b/internal/llm/tools/grep.go
@@ -3,6 +3,7 @@ package tools
import (
"bufio"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -92,55 +93,10 @@ type grepTool struct {
workingDir string
}
-const (
- GrepToolName = "grep"
- grepDescription = `Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find files containing specific text or patterns
-- Great for searching code bases for function names, variable declarations, or error messages
-- Useful for finding all files that use a particular API or pattern
-
-HOW TO USE:
-- Provide a regex pattern to search for within file contents
-- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
-- Optionally specify a starting directory (defaults to current working directory)
-- Optionally provide an include pattern to filter which files to search
-- Results are sorted with most recently modified files first
-
-REGEX PATTERN SYNTAX (when literal_text=false):
-- Supports standard regular expression syntax
-- 'function' searches for the literal text "function"
-- 'log\..*Error' finds text starting with "log." and ending with "Error"
-- 'import\s+.*\s+from' finds import statements in JavaScript/TypeScript
-
-COMMON INCLUDE PATTERN EXAMPLES:
-- '*.js' - Only search JavaScript files
-- '*.{ts,tsx}' - Only search TypeScript files
-- '*.go' - Only search Go files
-
-LIMITATIONS:
-- Results are limited to 100 files (newest first)
-- Performance depends on the number of files being searched
-- Very large binary files may be skipped
-- Hidden files (starting with '.') are skipped
-
-IGNORE FILE SUPPORT:
-- Respects .gitignore patterns to skip ignored files and directories
-- Respects .crushignore patterns for additional ignore rules
-- Both ignore files are automatically detected in the search root directory
-
-CROSS-PLATFORM NOTES:
-- Uses ripgrep (rg) command if available for better performance
-- Falls back to built-in Go implementation if ripgrep is not available
-- File paths are normalized automatically for cross-platform compatibility
-
-TIPS:
-- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
-- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
-- Always check if results are truncated and refine your search pattern if needed
-- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.`
-)
+const GrepToolName = "grep"
+
+//go:embed grep.md
+var grepDescription []byte
func NewGrepTool(workingDir string) BaseTool {
return &grepTool{
@@ -155,7 +111,7 @@ func (g *grepTool) Name() string {
func (g *grepTool) Info() ToolInfo {
return ToolInfo{
Name: GrepToolName,
- Description: grepDescription,
+ Description: string(grepDescription),
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/grep.md b/internal/llm/tools/grep.md
new file mode 100644
index 0000000000000000000000000000000000000000..c17a0ad1b8531a4fb9fd73b56e89973c749f91b1
--- /dev/null
+++ b/internal/llm/tools/grep.md
@@ -0,0 +1,54 @@
+Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find files containing specific text or patterns
+- Great for searching code bases for function names, variable declarations, or error messages
+- Useful for finding all files that use a particular API or pattern
+
+HOW TO USE:
+
+- Provide a regex pattern to search for within file contents
+- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
+- Optionally specify a starting directory (defaults to current working directory)
+- Optionally provide an include pattern to filter which files to search
+- Results are sorted with most recently modified files first
+
+REGEX PATTERN SYNTAX (when literal_text=false):
+
+- Supports standard regular expression syntax
+- 'function' searches for the literal text "function"
+- 'log\..\*Error' finds text starting with "log." and ending with "Error"
+- 'import\s+.\*\s+from' finds import statements in JavaScript/TypeScript
+
+COMMON INCLUDE PATTERN EXAMPLES:
+
+- '\*.js' - Only search JavaScript files
+- '\*.{ts,tsx}' - Only search TypeScript files
+- '\*.go' - Only search Go files
+
+LIMITATIONS:
+
+- Results are limited to 100 files (newest first)
+- Performance depends on the number of files being searched
+- Very large binary files may be skipped
+- Hidden files (starting with '.') are skipped
+
+IGNORE FILE SUPPORT:
+
+- Respects .gitignore patterns to skip ignored files and directories
+- Respects .crushignore patterns for additional ignore rules
+- Both ignore files are automatically detected in the search root directory
+
+CROSS-PLATFORM NOTES:
+
+- Uses ripgrep (rg) command if available for better performance
+- Falls back to built-in Go implementation if ripgrep is not available
+- File paths are normalized automatically for cross-platform compatibility
+
+TIPS:
+
+- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
+- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
+- Always check if results are truncated and refine your search pattern if needed
+- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.
diff --git a/internal/llm/tools/ls.go b/internal/llm/tools/ls.go
index 2546dd77a6b64faa24f54cf604710d568ffe9c5b..f421e69e7af938801aa9c3affacfe30ed669fabc 100644
--- a/internal/llm/tools/ls.go
+++ b/internal/llm/tools/ls.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"os"
@@ -40,44 +41,13 @@ type lsTool struct {
}
const (
- LSToolName = "ls"
- MaxLSFiles = 1000
- lsDescription = `Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to explore the structure of a directory
-- Helpful for understanding the organization of a project
-- Good first step when getting familiar with a new codebase
-
-HOW TO USE:
-- Provide a path to list (defaults to current working directory)
-- Optionally specify glob patterns to ignore
-- Results are displayed in a tree structure
-
-FEATURES:
-- Displays a hierarchical view of files and directories
-- Automatically skips hidden files/directories (starting with '.')
-- Skips common system directories like __pycache__
-- Can filter out files matching specific patterns
-
-LIMITATIONS:
-- Results are limited to 1000 files
-- Very large directories will be truncated
-- Does not show file sizes or permissions
-- Cannot recursively list all directories in a large project
-
-WINDOWS NOTES:
-- Hidden file detection uses Unix convention (files starting with '.')
-- Windows-specific hidden files (with hidden attribute) are not automatically skipped
-- Common Windows directories like System32, Program Files are not in default ignore list
-- Path separators are handled automatically (both / and \ work)
-
-TIPS:
-- Use Glob tool for finding files by name patterns instead of browsing
-- Use Grep tool for searching file contents
-- Combine with other tools for more effective exploration`
+ LSToolName = "ls"
+ MaxLSFiles = 1000
)
+//go:embed ls.md
+var lsDescription []byte
+
func NewLsTool(permissions permission.Service, workingDir string) BaseTool {
return &lsTool{
workingDir: workingDir,
@@ -92,7 +62,7 @@ func (l *lsTool) Name() string {
func (l *lsTool) Info() ToolInfo {
return ToolInfo{
Name: LSToolName,
- Description: lsDescription,
+ Description: string(lsDescription),
Parameters: map[string]any{
"path": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/ls.md b/internal/llm/tools/ls.md
new file mode 100644
index 0000000000000000000000000000000000000000..798ddcc90f9659032870a3b893129a7ec2edad50
--- /dev/null
+++ b/internal/llm/tools/ls.md
@@ -0,0 +1,40 @@
+Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to explore the structure of a directory
+- Helpful for understanding the organization of a project
+- Good first step when getting familiar with a new codebase
+
+HOW TO USE:
+
+- Provide a path to list (defaults to current working directory)
+- Optionally specify glob patterns to ignore
+- Results are displayed in a tree structure
+
+FEATURES:
+
+- Displays a hierarchical view of files and directories
+- Automatically skips hidden files/directories (starting with '.')
+- Skips common system directories like **pycache**
+- Can filter out files matching specific patterns
+
+LIMITATIONS:
+
+- Results are limited to 1000 files
+- Very large directories will be truncated
+- Does not show file sizes or permissions
+- Cannot recursively list all directories in a large project
+
+WINDOWS NOTES:
+
+- Hidden file detection uses Unix convention (files starting with '.')
+- Windows-specific hidden files (with hidden attribute) are not automatically skipped
+- Common Windows directories like System32, Program Files are not in default ignore list
+- Path separators are handled automatically (both / and \ work)
+
+TIPS:
+
+- Use Glob tool for finding files by name patterns instead of browsing
+- Use Grep tool for searching file contents
+- Combine with other tools for more effective exploration
diff --git a/internal/llm/tools/multiedit.go b/internal/llm/tools/multiedit.go
index 4f99070b1a030e9c8f741f0671a6b2254899f276..1d6e415ceb8cc10bcc32788cddbbdc40fc781907 100644
--- a/internal/llm/tools/multiedit.go
+++ b/internal/llm/tools/multiedit.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -50,51 +51,10 @@ type multiEditTool struct {
workingDir string
}
-const (
- MultiEditToolName = "multiedit"
- multiEditDescription = `This is a tool for making multiple edits to a single file in one operation. It is built on top of the Edit tool and allows you to perform multiple find-and-replace operations efficiently. Prefer this tool over the Edit tool when you need to make multiple edits to the same file.
-
-Before using this tool:
-
-1. Use the Read tool to understand the file's contents and context
-
-2. Verify the directory path is correct
-
-To make multiple file edits, provide the following:
-1. file_path: The absolute path to the file to modify (must be absolute, not relative)
-2. edits: An array of edit operations to perform, where each edit contains:
- - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)
- - new_string: The edited text to replace the old_string
- - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false.
-
-IMPORTANT:
-- All edits are applied in sequence, in the order they are provided
-- Each edit operates on the result of the previous edit
-- All edits must be valid for the operation to succeed - if any edit fails, none will be applied
-- This tool is ideal when you need to make several changes to different parts of the same file
-
-CRITICAL REQUIREMENTS:
-1. All edits follow the same requirements as the single Edit tool
-2. The edits are atomic - either all succeed or none are applied
-3. Plan your edits carefully to avoid conflicts between sequential operations
-
-WARNING:
-- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace)
-- The tool will fail if edits.old_string and edits.new_string are the same
-- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find
-
-When making edits:
-- Ensure all edits result in idiomatic, correct code
-- Do not leave the code in a broken state
-- Always use absolute file paths (starting with /)
-- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
-- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
-
-If you want to create a new file, use:
-- A new file path, including dir name if needed
-- First edit: empty old_string and the new file's contents as new_string
-- Subsequent edits: normal edit operations on the created content`
-)
+const MultiEditToolName = "multiedit"
+
+//go:embed multiedit.md
+var multieditDescription []byte
func NewMultiEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &multiEditTool{
@@ -112,7 +72,7 @@ func (m *multiEditTool) Name() string {
func (m *multiEditTool) Info() ToolInfo {
return ToolInfo{
Name: MultiEditToolName,
- Description: multiEditDescription,
+ Description: string(multieditDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/multiedit.md b/internal/llm/tools/multiedit.md
new file mode 100644
index 0000000000000000000000000000000000000000..4cc64f7a82c46140b79491a897fa8b691e29cc40
--- /dev/null
+++ b/internal/llm/tools/multiedit.md
@@ -0,0 +1,48 @@
+This is a tool for making multiple edits to a single file in one operation. It is built on top of the Edit tool and allows you to perform multiple find-and-replace operations efficiently. Prefer this tool over the Edit tool when you need to make multiple edits to the same file.
+
+Before using this tool:
+
+1. Use the Read tool to understand the file's contents and context
+
+2. Verify the directory path is correct
+
+To make multiple file edits, provide the following:
+
+1. file_path: The absolute path to the file to modify (must be absolute, not relative)
+2. edits: An array of edit operations to perform, where each edit contains:
+ - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)
+ - new_string: The edited text to replace the old_string
+ - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false.
+
+IMPORTANT:
+
+- All edits are applied in sequence, in the order they are provided
+- Each edit operates on the result of the previous edit
+- All edits must be valid for the operation to succeed - if any edit fails, none will be applied
+- This tool is ideal when you need to make several changes to different parts of the same file
+
+CRITICAL REQUIREMENTS:
+
+1. All edits follow the same requirements as the single Edit tool
+2. The edits are atomic - either all succeed or none are applied
+3. Plan your edits carefully to avoid conflicts between sequential operations
+
+WARNING:
+
+- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace)
+- The tool will fail if edits.old_string and edits.new_string are the same
+- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find
+
+When making edits:
+
+- Ensure all edits result in idiomatic, correct code
+- Do not leave the code in a broken state
+- Always use absolute file paths (starting with /)
+- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
+- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
+
+If you want to create a new file, use:
+
+- A new file path, including dir name if needed
+- First edit: empty old_string and the new file's contents as new_string
+- Subsequent edits: normal edit operations on the created content
diff --git a/internal/llm/tools/sourcegraph.go b/internal/llm/tools/sourcegraph.go
index fcc9bb57428b45a4620417775c61acb380b4d7ad..aea6b1ae8f0d52d10c083debf5bd3780957f261a 100644
--- a/internal/llm/tools/sourcegraph.go
+++ b/internal/llm/tools/sourcegraph.go
@@ -3,6 +3,7 @@ package tools
import (
"bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -27,103 +28,10 @@ type sourcegraphTool struct {
client *http.Client
}
-const (
- SourcegraphToolName = "sourcegraph"
- sourcegraphToolDescription = `Search code across public repositories using Sourcegraph's GraphQL API.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find code examples or implementations across public repositories
-- Helpful for researching how others have solved similar problems
-- Useful for discovering patterns and best practices in open source code
-
-HOW TO USE:
-- Provide a search query using Sourcegraph's query syntax
-- Optionally specify the number of results to return (default: 10)
-- Optionally set a timeout for the request
-
-QUERY SYNTAX:
-- Basic search: "fmt.Println" searches for exact matches
-- File filters: "file:.go fmt.Println" limits to Go files
-- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos
-- Language filters: "lang:go fmt.Println" limits to Go code
-- Boolean operators: "fmt.Println AND log.Fatal" for combined terms
-- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching
-- Quoted strings: "\"exact phrase\"" for exact phrase matching
-- Exclude filters: "-file:test" or "-repo:forks" to exclude matches
-
-ADVANCED FILTERS:
-- Repository filters:
- * "repo:name" - Match repositories with name containing "name"
- * "repo:^github\.com/org/repo$" - Exact repository match
- * "repo:org/repo@branch" - Search specific branch
- * "repo:org/repo rev:branch" - Alternative branch syntax
- * "-repo:name" - Exclude repositories
- * "fork:yes" or "fork:only" - Include or only show forks
- * "archived:yes" or "archived:only" - Include or only show archived repos
- * "visibility:public" or "visibility:private" - Filter by visibility
-
-- File filters:
- * "file:\.js$" - Files with .js extension
- * "file:internal/" - Files in internal directory
- * "-file:test" - Exclude test files
- * "file:has.content(Copyright)" - Files containing "Copyright"
- * "file:has.contributor([email protected])" - Files with specific contributor
-
-- Content filters:
- * "content:\"exact string\"" - Search for exact string
- * "-content:\"unwanted\"" - Exclude files with unwanted content
- * "case:yes" - Case-sensitive search
-
-- Type filters:
- * "type:symbol" - Search for symbols (functions, classes, etc.)
- * "type:file" - Search file content only
- * "type:path" - Search filenames only
- * "type:diff" - Search code changes
- * "type:commit" - Search commit messages
-
-- Commit/diff search:
- * "after:\"1 month ago\"" - Commits after date
- * "before:\"2023-01-01\"" - Commits before date
- * "author:name" - Commits by author
- * "message:\"fix bug\"" - Commits with message
-
-- Result selection:
- * "select:repo" - Show only repository names
- * "select:file" - Show only file paths
- * "select:content" - Show only matching content
- * "select:symbol" - Show only matching symbols
-
-- Result control:
- * "count:100" - Return up to 100 results
- * "count:all" - Return all results
- * "timeout:30s" - Set search timeout
-
-EXAMPLES:
-- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout
-- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks
-- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing
-- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code
-- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images
-- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow
-
-BOOLEAN OPERATORS:
-- "term1 AND term2" - Results containing both terms
-- "term1 OR term2" - Results containing either term
-- "term1 NOT term2" - Results with term1 but not term2
-- "term1 and (term2 or term3)" - Grouping with parentheses
-
-LIMITATIONS:
-- Only searches public repositories
-- Rate limits may apply
-- Complex queries may take longer to execute
-- Maximum of 20 results per query
-
-TIPS:
-- Use specific file extensions to narrow results
-- Add repo: filters for more targeted searches
-- Use type:symbol to find function/method definitions
-- Use type:file to find relevant files`
-)
+const SourcegraphToolName = "sourcegraph"
+
+//go:embed sourcegraph.md
+var sourcegraphDescription []byte
func NewSourcegraphTool() BaseTool {
return &sourcegraphTool{
@@ -145,7 +53,7 @@ func (t *sourcegraphTool) Name() string {
func (t *sourcegraphTool) Info() ToolInfo {
return ToolInfo{
Name: SourcegraphToolName,
- Description: sourcegraphToolDescription,
+ Description: string(sourcegraphDescription),
Parameters: map[string]any{
"query": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/sourcegraph.md b/internal/llm/tools/sourcegraph.md
new file mode 100644
index 0000000000000000000000000000000000000000..ec6610d7af429454783282061252a5b40191f5e5
--- /dev/null
+++ b/internal/llm/tools/sourcegraph.md
@@ -0,0 +1,102 @@
+Search code across public repositories using Sourcegraph's GraphQL API.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find code examples or implementations across public repositories
+- Helpful for researching how others have solved similar problems
+- Useful for discovering patterns and best practices in open source code
+
+HOW TO USE:
+
+- Provide a search query using Sourcegraph's query syntax
+- Optionally specify the number of results to return (default: 10)
+- Optionally set a timeout for the request
+
+QUERY SYNTAX:
+
+- Basic search: "fmt.Println" searches for exact matches
+- File filters: "file:.go fmt.Println" limits to Go files
+- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos
+- Language filters: "lang:go fmt.Println" limits to Go code
+- Boolean operators: "fmt.Println AND log.Fatal" for combined terms
+- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching
+- Quoted strings: "\"exact phrase\"" for exact phrase matching
+- Exclude filters: "-file:test" or "-repo:forks" to exclude matches
+
+ADVANCED FILTERS:
+
+- Repository filters:
+ - "repo:name" - Match repositories with name containing "name"
+ - "repo:^github\.com/org/repo$" - Exact repository match
+ - "repo:org/repo@branch" - Search specific branch
+ - "repo:org/repo rev:branch" - Alternative branch syntax
+ - "-repo:name" - Exclude repositories
+ - "fork:yes" or "fork:only" - Include or only show forks
+ - "archived:yes" or "archived:only" - Include or only show archived repos
+ - "visibility:public" or "visibility:private" - Filter by visibility
+
+- File filters:
+ - "file:\.js$" - Files with .js extension
+ - "file:internal/" - Files in internal directory
+ - "-file:test" - Exclude test files
+ - "file:has.content(Copyright)" - Files containing "Copyright"
+ - "file:has.contributor([email protected])" - Files with specific contributor
+
+- Content filters:
+ - "content:\"exact string\"" - Search for exact string
+ - "-content:\"unwanted\"" - Exclude files with unwanted content
+ - "case:yes" - Case-sensitive search
+
+- Type filters:
+ - "type:symbol" - Search for symbols (functions, classes, etc.)
+ - "type:file" - Search file content only
+ - "type:path" - Search filenames only
+ - "type:diff" - Search code changes
+ - "type:commit" - Search commit messages
+
+- Commit/diff search:
+ - "after:\"1 month ago\"" - Commits after date
+ - "before:\"2023-01-01\"" - Commits before date
+ - "author:name" - Commits by author
+ - "message:\"fix bug\"" - Commits with message
+
+- Result selection:
+ - "select:repo" - Show only repository names
+ - "select:file" - Show only file paths
+ - "select:content" - Show only matching content
+ - "select:symbol" - Show only matching symbols
+
+- Result control:
+ - "count:100" - Return up to 100 results
+ - "count:all" - Return all results
+ - "timeout:30s" - Set search timeout
+
+EXAMPLES:
+
+- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout
+- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks
+- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing
+- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code
+- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images
+- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow
+
+BOOLEAN OPERATORS:
+
+- "term1 AND term2" - Results containing both terms
+- "term1 OR term2" - Results containing either term
+- "term1 NOT term2" - Results with term1 but not term2
+- "term1 and (term2 or term3)" - Grouping with parentheses
+
+LIMITATIONS:
+
+- Only searches public repositories
+- Rate limits may apply
+- Complex queries may take longer to execute
+- Maximum of 20 results per query
+
+TIPS:
+
+- Use specific file extensions to narrow results
+- Add repo: filters for more targeted searches
+- Use type:symbol to find function/method definitions
+- Use type:file to find relevant files
diff --git a/internal/llm/tools/view.go b/internal/llm/tools/view.go
index 7e48a91d380a693295a130b0b39e47c685aab142..c3824bf1b1cc11dc22c57d60eb72654999a5e970 100644
--- a/internal/llm/tools/view.go
+++ b/internal/llm/tools/view.go
@@ -3,6 +3,7 @@ package tools
import (
"bufio"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -16,6 +17,9 @@ import (
"github.com/charmbracelet/crush/internal/permission"
)
+//go:embed view.md
+var viewDescription []byte
+
type ViewParams struct {
FilePath string `json:"file_path"`
Offset int `json:"offset"`
@@ -44,42 +48,6 @@ const (
MaxReadSize = 250 * 1024
DefaultReadLimit = 2000
MaxLineLength = 2000
- viewDescription = `File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to read the contents of a specific file
-- Helpful for examining source code, configuration files, or log files
-- Perfect for looking at text-based file formats
-
-HOW TO USE:
-- Provide the path to the file you want to view
-- Optionally specify an offset to start reading from a specific line
-- Optionally specify a limit to control how many lines are read
-- Do not use this for directories use the ls tool instead
-
-FEATURES:
-- Displays file contents with line numbers for easy reference
-- Can read from any position in a file using the offset parameter
-- Handles large files by limiting the number of lines read
-- Automatically truncates very long lines for better display
-- Suggests similar file names when the requested file isn't found
-
-LIMITATIONS:
-- Maximum file size is 250KB
-- Default reading limit is 2000 lines
-- Lines longer than 2000 characters are truncated
-- Cannot display binary files or images
-- Images can be identified but not displayed
-
-WINDOWS NOTES:
-- Handles both Windows (CRLF) and Unix (LF) line endings automatically
-- File paths work with both forward slashes (/) and backslashes (\)
-- Text encoding is detected automatically for most common formats
-
-TIPS:
-- Use with Glob tool to first find files you want to view
-- For code exploration, first use Grep to find relevant files, then View to examine them
-- When viewing large files, use the offset parameter to read specific sections`
)
func NewViewTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, workingDir string) BaseTool {
@@ -97,7 +65,7 @@ func (v *viewTool) Name() string {
func (v *viewTool) Info() ToolInfo {
return ToolInfo{
Name: ViewToolName,
- Description: viewDescription,
+ Description: string(viewDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/view.md b/internal/llm/tools/view.md
new file mode 100644
index 0000000000000000000000000000000000000000..2179b77817b3e9a98ca7077bf8430abdde597392
--- /dev/null
+++ b/internal/llm/tools/view.md
@@ -0,0 +1,42 @@
+File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to read the contents of a specific file
+- Helpful for examining source code, configuration files, or log files
+- Perfect for looking at text-based file formats
+
+HOW TO USE:
+
+- Provide the path to the file you want to view
+- Optionally specify an offset to start reading from a specific line
+- Optionally specify a limit to control how many lines are read
+- Do not use this for directories use the ls tool instead
+
+FEATURES:
+
+- Displays file contents with line numbers for easy reference
+- Can read from any position in a file using the offset parameter
+- Handles large files by limiting the number of lines read
+- Automatically truncates very long lines for better display
+- Suggests similar file names when the requested file isn't found
+
+LIMITATIONS:
+
+- Maximum file size is 250KB
+- Default reading limit is 2000 lines
+- Lines longer than 2000 characters are truncated
+- Cannot display binary files or images
+- Images can be identified but not displayed
+
+WINDOWS NOTES:
+
+- Handles both Windows (CRLF) and Unix (LF) line endings automatically
+- File paths work with both forward slashes (/) and backslashes (\)
+- Text encoding is detected automatically for most common formats
+
+TIPS:
+
+- Use with Glob tool to first find files you want to view
+- For code exploration, first use Grep to find relevant files, then View to examine them
+- When viewing large files, use the offset parameter to read specific sections
diff --git a/internal/llm/tools/write.go b/internal/llm/tools/write.go
index cb256eb3d5c016797635796c8a8cf706810161af..36ea13d2dd6e1930eb645ab036c950b9a4d6fdc8 100644
--- a/internal/llm/tools/write.go
+++ b/internal/llm/tools/write.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -19,6 +20,9 @@ import (
"github.com/charmbracelet/crush/internal/permission"
)
+//go:embed write.md
+var writeDescription []byte
+
type WriteParams struct {
FilePath string `json:"file_path"`
Content string `json:"content"`
@@ -43,41 +47,7 @@ type WriteResponseMetadata struct {
Removals int `json:"removals"`
}
-const (
- WriteToolName = "write"
- writeDescription = `File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to create a new file
-- Helpful for updating existing files with modified content
-- Perfect for saving generated code, configurations, or text data
-
-HOW TO USE:
-- Provide the path to the file you want to write
-- Include the content to be written to the file
-- The tool will create any necessary parent directories
-
-FEATURES:
-- Can create new files or overwrite existing ones
-- Creates parent directories automatically if they don't exist
-- Checks if the file has been modified since last read for safety
-- Avoids unnecessary writes when content hasn't changed
-
-LIMITATIONS:
-- You should read a file before writing to it to avoid conflicts
-- Cannot append to files (rewrites the entire file)
-
-WINDOWS NOTES:
-- File permissions (0o755, 0o644) are Unix-style but work on Windows with appropriate translations
-- Use forward slashes (/) in paths for cross-platform compatibility
-- Windows file attributes and permissions are handled automatically by the Go runtime
-
-TIPS:
-- Use the View tool first to examine existing files before modifying them
-- Use the LS tool to verify the correct location when creating new files
-- Combine with Glob and Grep tools to find and modify multiple files
-- Always include descriptive comments when making changes to existing code`
-)
+const WriteToolName = "write"
func NewWriteTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &writeTool{
@@ -95,7 +65,7 @@ func (w *writeTool) Name() string {
func (w *writeTool) Info() ToolInfo {
return ToolInfo{
Name: WriteToolName,
- Description: writeDescription,
+ Description: string(writeDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/write.md b/internal/llm/tools/write.md
new file mode 100644
index 0000000000000000000000000000000000000000..b8cc811ffe6234e7ebfd34037de57bd483305738
--- /dev/null
+++ b/internal/llm/tools/write.md
@@ -0,0 +1,38 @@
+File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to create a new file
+- Helpful for updating existing files with modified content
+- Perfect for saving generated code, configurations, or text data
+
+HOW TO USE:
+
+- Provide the path to the file you want to write
+- Include the content to be written to the file
+- The tool will create any necessary parent directories
+
+FEATURES:
+
+- Can create new files or overwrite existing ones
+- Creates parent directories automatically if they don't exist
+- Checks if the file has been modified since last read for safety
+- Avoids unnecessary writes when content hasn't changed
+
+LIMITATIONS:
+
+- You should read a file before writing to it to avoid conflicts
+- Cannot append to files (rewrites the entire file)
+
+WINDOWS NOTES:
+
+- File permissions (0o755, 0o644) are Unix-style but work on Windows with appropriate translations
+- Use forward slashes (/) in paths for cross-platform compatibility
+- Windows file attributes and permissions are handled automatically by the Go runtime
+
+TIPS:
+
+- Use the View tool first to examine existing files before modifying them
+- Use the LS tool to verify the correct location when creating new files
+- Combine with Glob and Grep tools to find and modify multiple files
+- Always include descriptive comments when making changes to existing code
From 6c990b25efb2eba3111618cf5491f7eeffd8e637 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 19 Sep 2025 09:59:43 -0300
Subject: [PATCH 113/236] refactor: remove unused prompt (#1083)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/prompt/coder.go | 3 --
internal/llm/prompt/openai.md | 96 -----------------------------------
2 files changed, 99 deletions(-)
delete mode 100644 internal/llm/prompt/openai.md
diff --git a/internal/llm/prompt/coder.go b/internal/llm/prompt/coder.go
index bf3eb5ea47d877c8adf21816900d7acb1434aa82..90e5a17191f346a5df53622e1826bc04214ddbfc 100644
--- a/internal/llm/prompt/coder.go
+++ b/internal/llm/prompt/coder.go
@@ -45,9 +45,6 @@ var anthropicCoderPrompt []byte
//go:embed gemini.md
var geminiCoderPrompt []byte
-//go:embed openai.md
-var openaiCoderPrompt []byte
-
//go:embed v2.md
var coderV2Prompt []byte
diff --git a/internal/llm/prompt/openai.md b/internal/llm/prompt/openai.md
deleted file mode 100644
index 1c69d40152cd380993f2032e0b3967b370be90fc..0000000000000000000000000000000000000000
--- a/internal/llm/prompt/openai.md
+++ /dev/null
@@ -1,96 +0,0 @@
-Please resolve the user's task by editing and testing the code files in your current code execution session.
-You are a deployed coding agent.
-Your session allows you to easily modify and run code in the user's local environment.
-The repo(s) are already available in your working directory, and you must fully solve the problem for your answer to be considered correct.
-
-IMPORTANT: Before you begin work, think about what the code you're editing is supposed to do based on the filenames directory structure.
-
-# Memory
-
-If the current working directory contains a file called CRUSH.md, it will be automatically added to your context. This file serves multiple purposes:
-
-1. Storing frequently used bash commands (build, test, lint, etc.) so you can use them without searching each time
-2. Recording the user's code style preferences (naming conventions, preferred libraries, etc.)
-3. Maintaining useful information about the codebase structure and organization
-
-When you spend time searching for commands to typecheck, lint, build, or test, you should ask the user if it's okay to add those commands to CRUSH.md. Similarly, when learning about code style preferences or important codebase information, ask if it's okay to add that to CRUSH.md so you can remember it for next time.
-
-You MUST adhere to the following criteria when executing the task:
-
-- Working on the repo(s) in the current environment is allowed, even if they are proprietary.
-- Analyzing code for vulnerabilities is allowed.
-- Showing user code and tool call details is allowed.
-- User instructions may overwrite the _CODING GUIDELINES_ section in this developer message.
-- Do not use `ls -R` `find`, or `grep` - these are slow in large repos. Use the Agent tool for searching instead.
-- Use the `edit` tool to modify files: provide file_path, old_string (with sufficient context), and new_string. The edit tool requires:
- - Absolute file paths (starting with /)
- - Unique old_string matches with 3-5 lines of context before and after
- - Exact whitespace and indentation matching
- - For new files: provide file_path and new_string, leave old_string empty
- - For deleting content: provide file_path and old_string, leave new_string empty
-
-# Following conventions
-
-When making changes to files, first understand the file's code conventions. Mimic code style, use existing libraries and utilities, and follow existing patterns.
-
-- NEVER assume that a given library is available, even if it is well known. Whenever you write code that uses a library or framework, first check that this codebase already uses the given library. For example, you might look at neighboring files, or check the package.json (or cargo.toml, and so on depending on the language).
-- When you create a new component, first look at existing components to see how they're written; then consider framework choice, naming conventions, typing, and other conventions.
-- When you edit a piece of code, first look at the code's surrounding context (especially its imports) to understand the code's choice of frameworks and libraries. Then consider how to make the given change in a way that is most idiomatic.
-- Always follow security best practices. Never introduce code that exposes or logs secrets and keys. Never commit secrets or keys to the repository.
-
-# Code style
-
-- IMPORTANT: DO NOT ADD **_ANY_** COMMENTS unless asked
-
-- If completing the user's task requires writing or modifying files:
- - Your code and final answer should follow these _CODING GUIDELINES_:
- - Fix the problem at the root cause rather than applying surface-level patches, when possible.
- - Avoid unneeded complexity in your solution.
- - Ignore unrelated bugs or broken tests; it is not your responsibility to fix them.
- - Update documentation as necessary.
- - Keep changes consistent with the style of the existing codebase. Changes should be minimal and focused on the task.
- - Use `git log` and `git blame` to search the history of the codebase if additional context is required.
- - NEVER add copyright or license headers unless specifically requested.
- - You do not need to `git commit` your changes; this will be done automatically for you.
- - If there is a .pre-commit-config.yaml, use `pre-commit run --files ...` to check that your changes pass the pre-commit checks. However, do not fix pre-existing errors on lines you didn't touch.
- - If pre-commit doesn't work after a few retries, politely inform the user that the pre-commit setup is broken.
- - Once you finish coding, you must
- - Check `git status` to sanity check your changes; revert any scratch files or changes.
- - Remove all inline comments you added as much as possible, even if they look normal. Check using `git diff`. Inline comments must be generally avoided, unless active maintainers of the repo, after long careful study of the code and the issue, will still misinterpret the code without the comments.
- - Check if you accidentally add copyright or license headers. If so, remove them.
- - Try to run pre-commit if it is available.
- - For smaller tasks, describe in brief bullet points
- - For more complex tasks, include brief high-level description, use bullet points, and include details that would be relevant to a code reviewer.
-
-# Doing tasks
-
-The user will primarily request you perform software engineering tasks. This includes solving bugs, adding new functionality, refactoring code, explaining code, and more. For these tasks the following steps are recommended:
-
-1. Use the available search tools to understand the codebase and the user's query.
-2. Implement the solution using all tools available to you
-3. Verify the solution if possible with tests. NEVER assume specific test framework or test script. Check the README or search codebase to determine the testing approach.
-4. VERY IMPORTANT: When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to CRUSH.md so that you will know to run it next time.
-
-NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
-
-# Tool usage policy
-
-- When doing file search, prefer to use the Agent tool in order to reduce context usage.
-- IMPORTANT: All tools are executed in parallel when multiple tool calls are sent in a single message. Only send multiple tool calls when they are safe to run in parallel (no dependencies between them).
-- IMPORTANT: The user does not see the full output of the tool responses, so if you need the output of the tool for the response make sure to summarize it for the user.
-
-# Proactiveness
-
-You are allowed to be proactive, but only when the user asks you to do something. You should strive to strike a balance between:
-
-1. Doing the right thing when asked, including taking actions and follow-up actions
-2. Not surprising the user with actions you take without asking
- For example, if the user asks you how to approach something, you should do your best to answer their question first, and not immediately jump into taking actions.
-3. Do not add additional code explanation summary unless requested by the user. After working on a file, just stop, rather than providing an explanation of what you did.
-
-- If completing the user's task DOES NOT require writing or modifying files (e.g., the user asks a question about the code base):
- - Respond in a friendly tone as a remote teammate, who is knowledgeable, capable and eager to help with coding.
-- When your task involves writing or modifying files:
- - Do NOT tell the user to "save the file" or "copy the code into a file" if you already created or modified the file using `edit`. Instead, reference the file as already saved.
- - Do NOT show the full contents of large files you have already written, unless the user explicitly asks for them.
-- NEVER use emojis in your responses
From f9829651d66b8839e966a911c26ae4f79262689c Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Fri, 19 Sep 2025 10:31:54 -0300
Subject: [PATCH 114/236] chore(legal): @msteinert has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index de147d1ee126d921e62b00e40cad4f36e7afd04b..1a11c99843401cf443f4dfb72ba9ca732af62fd1 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -639,6 +639,14 @@
"created_at": "2025-09-17T05:32:53Z",
"repoId": 987670088,
"pullRequestNo": 1065
+ },
+ {
+ "name": "msteinert",
+ "id": 202852,
+ "comment_id": 3312218015,
+ "created_at": "2025-09-19T13:31:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1084
}
]
}
\ No newline at end of file
From 00072cd29c0d789e146996027902afb4ca861f41 Mon Sep 17 00:00:00 2001
From: Mike Steinert
Date: Fri, 19 Sep 2025 09:15:10 -0500
Subject: [PATCH 115/236] fix: Fix a typo in README.md (#1084)
The environment variable indicated to configure the Azure AI endpoint
did not match the value in [Catwalk][].
[Catwalk]: https://github.com/charmbracelet/catwalk/blob/9d9a86e3a4aec20b8874175c115a3533d5083b54/internal/providers/configs/azure.json#L6
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 299ce72010540c9b6bd7b9730e05c161bc6e523b..616b6191f4364325bcea8d985dff9594173765de 100644
--- a/README.md
+++ b/README.md
@@ -132,7 +132,7 @@ That said, you can also set environment variables for preferred providers.
| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
| `AWS_REGION` | AWS Bedrock (Claude) |
-| `AZURE_OPENAI_ENDPOINT` | Azure OpenAI models |
+| `AZURE_OPENAI_API_ENDPOINT`| Azure OpenAI models |
| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
From 831a95abf071c5837c5ba135c0545da4dc483b35 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 19 Sep 2025 12:40:02 -0300
Subject: [PATCH 116/236] fix(deps): update powernap for zig
improved zig support into https://github.com/charmbracelet/x/pull/567
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 2 +-
go.sum | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/go.mod b/go.mod
index 7f888b04a86aa7bb0ae4631fdef410b88633fe90..8e8301c2856df9e7e4fc7f65ada99bbf06c8c2b2 100644
--- a/go.mod
+++ b/go.mod
@@ -76,7 +76,7 @@ require (
github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
- github.com/charmbracelet/x/powernap v0.0.0-20250911135559-c589b77c25e6
+ github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4
github.com/charmbracelet/x/term v0.2.1
github.com/charmbracelet/x/termios v0.1.1 // indirect
github.com/charmbracelet/x/windows v0.2.2 // indirect
diff --git a/go.sum b/go.sum
index a937b62c5f9ee12e2ef41a12cb8be352f112442d..f1358f93209b6dd575763818ee71d8f4d9c01c53 100644
--- a/go.sum
+++ b/go.sum
@@ -104,8 +104,8 @@ github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHE
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
-github.com/charmbracelet/x/powernap v0.0.0-20250911135559-c589b77c25e6 h1:8XaGEZ453uu9IUBlEWu1I9U+Z7GmdwIzFFzBasRzDEk=
-github.com/charmbracelet/x/powernap v0.0.0-20250911135559-c589b77c25e6/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
+github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
+github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
From 47a6d0285724a7e078b4134a538cf90ebe765cac Mon Sep 17 00:00:00 2001
From: Benjamin Oldenburg
Date: Sat, 20 Sep 2025 01:27:58 +0700
Subject: [PATCH 117/236] fix: session summarization dialog hanging
indefinitely (#528)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
The summarization dialog was getting stuck in "Generating..." state due to two issues:
- Agent was formatting nil error values instead of actual response errors
- Dialog only handled summarize events but not error events, leaving it in loading state
💖 Generated with Crush
Co-authored-by: Crush
---
internal/llm/agent/agent.go | 2 +-
.../tui/components/dialogs/compact/compact.go | 15 +++++++++++----
2 files changed, 12 insertions(+), 5 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 864188113168948c2e59a221c62c6cdad99f75ce..799cec2e67cc1cdc433903081da86db68ae31e99 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -832,7 +832,7 @@ func (a *agent) Summarize(ctx context.Context, sessionID string) error {
if r.Error != nil {
event = AgentEvent{
Type: AgentEventTypeError,
- Error: fmt.Errorf("failed to summarize: %w", err),
+ Error: fmt.Errorf("failed to summarize: %w", r.Error),
Done: true,
}
a.Publish(pubsub.CreatedEvent, event)
diff --git a/internal/tui/components/dialogs/compact/compact.go b/internal/tui/components/dialogs/compact/compact.go
index 86455e3139b4d0eb43baaf509b0fa0e039dd4939..ecde402fd8dfe1f31791834cd4e4bae13ec45e00 100644
--- a/internal/tui/components/dialogs/compact/compact.go
+++ b/internal/tui/components/dialogs/compact/compact.go
@@ -104,17 +104,24 @@ func (c *compactDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
case agent.AgentEvent:
- if msg.Type == agent.AgentEventTypeSummarize {
+ switch msg.Type {
+ case agent.AgentEventTypeSummarize:
if msg.Error != nil {
c.state = stateError
c.progress = "Error: " + msg.Error.Error()
} else if msg.Done {
- return c, util.CmdHandler(
- dialogs.CloseDialogMsg{},
- )
+ return c, util.CmdHandler(dialogs.CloseDialogMsg{})
} else {
c.progress = msg.Progress
}
+ case agent.AgentEventTypeError:
+ // Handle errors that occur during summarization but are sent as separate error events.
+ c.state = stateError
+ if msg.Error != nil {
+ c.progress = "Error: " + msg.Error.Error()
+ } else {
+ c.progress = "An unknown error occurred"
+ }
}
return c, nil
}
From d55f807f120bb7954e67fe9f39fb13c91464e5df Mon Sep 17 00:00:00 2001
From: Kieran Klukas
Date: Fri, 19 Sep 2025 14:32:43 -0400
Subject: [PATCH 118/236] docs: add bit about nixos module (#606)
---
README.md | 56 +++++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 56 insertions(+)
diff --git a/README.md b/README.md
index 616b6191f4364325bcea8d985dff9594173765de..aedd503e42ed3564627a891d41d8874740c1ea3c 100644
--- a/README.md
+++ b/README.md
@@ -64,8 +64,64 @@ nix-channel --update
nix-shell -p '(import { pkgs = import {}; }).repos.charmbracelet.crush'
```
+### NixOS & Home Manager Module Usage via NUR
+
+Crush provides NixOS and Home Manager modules via NUR.
+You can use these modules directly in your flake by importing them from NUR. Since it auto detects whether its a home manager or nixos context you can use the import the exact same way :)
+
+```nix
+{
+ inputs = {
+ nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
+ nur.url = "github:nix-community/NUR";
+ };
+
+ outputs = { self, nixpkgs, nur, ... }: {
+ nixosConfigurations.your-hostname = nixpkgs.lib.nixosSystem {
+ system = "x86_64-linux";
+ modules = [
+ nur.modules.nixos.default
+ nur.repos.charmbracelet.modules.crush
+ {
+ programs.crush = {
+ enable = true;
+ settings = {
+ providers = {
+ openai = {
+ id = "openai";
+ name = "OpenAI";
+ base_url = "https://api.openai.com/v1";
+ type = "openai";
+ api_key = "sk-fake123456789abcdef...";
+ models = [
+ {
+ id = "gpt-4";
+ name = "GPT-4";
+ }
+ ];
+ };
+ };
+ lsp = {
+ go = { command = "gopls"; enabled = true; };
+ nix = { command = "nil"; enabled = true; };
+ };
+ options = {
+ context_paths = [ "/etc/nixos/configuration.nix" ];
+ tui = { compact_mode = true; };
+ debug = false;
+ };
+ };
+ };
+ }
+ ];
+ };
+ };
+}
+```
+
+
Debian/Ubuntu
From 38bde491ad1c74e34ce0e4d8f7eba82c61436680 Mon Sep 17 00:00:00 2001
From: Ayman Bagabas
Date: Fri, 19 Sep 2025 14:45:28 -0400
Subject: [PATCH 119/236] fix(tui): yes/no init selection (#1074)
This commit ensures that the "Yes" and "No" options in the init screen
get properly selected when navigating with the keyboard.
Related: https://github.com/charmbracelet/crush/issues/1048
---
internal/tui/components/chat/splash/splash.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/internal/tui/components/chat/splash/splash.go b/internal/tui/components/chat/splash/splash.go
index b49bd862876f6b3eb880bfe732b956026421aabe..187fc35e6ec47a858b99f35e135a8cef3500fbf1 100644
--- a/internal/tui/components/chat/splash/splash.go
+++ b/internal/tui/components/chat/splash/splash.go
@@ -253,6 +253,7 @@ func (s *splashCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return s, cmd
}
if s.needsProjectInit {
+ s.selectedNo = false
return s, s.initializeProject()
}
case key.Matches(msg, s.keyMap.No):
From 976425c7b17bdd3a02ae64e9251a5c046bde7014 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Mon, 8 Sep 2025 09:23:52 -0400
Subject: [PATCH 120/236] docs(readme): add mini Chinese description (thanks
@ohjia)
---
README.md | 36 ++++++++++++++++++------------------
1 file changed, 18 insertions(+), 18 deletions(-)
diff --git a/README.md b/README.md
index aedd503e42ed3564627a891d41d8874740c1ea3c..e89c651de360e0bcfa0336f4a726b60ea9635d31 100644
--- a/README.md
+++ b/README.md
@@ -7,6 +7,7 @@
Your new coding bestie, now available in your favourite terminal.
Your tools, your code, and your workflows, wired into your LLM of choice.
+你的新编程伙伴,现在就在你最爱的终端中。
你的工具、代码和工作流,都与您选择的 LLM 模型紧密相连。

@@ -66,7 +67,7 @@ nix-shell -p '(import { pkgs = import {}; }).repos.charmbracelet
### NixOS & Home Manager Module Usage via NUR
-Crush provides NixOS and Home Manager modules via NUR.
+Crush provides NixOS and Home Manager modules via NUR.
You can use these modules directly in your flake by importing them from NUR. Since it auto detects whether its a home manager or nixos context you can use the import the exact same way :)
```nix
@@ -121,7 +122,6 @@ You can use these modules directly in your flake by importing them from NUR. Sin
-
Debian/Ubuntu
@@ -175,22 +175,22 @@ Crush. You'll be prompted to enter your API key.
That said, you can also set environment variables for preferred providers.
-| Environment Variable | Provider |
-| -------------------------- | -------------------------------------------------- |
-| `ANTHROPIC_API_KEY` | Anthropic |
-| `OPENAI_API_KEY` | OpenAI |
-| `OPENROUTER_API_KEY` | OpenRouter |
-| `CEREBRAS_API_KEY` | Cerebras |
-| `GEMINI_API_KEY` | Google Gemini |
-| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
-| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
-| `GROQ_API_KEY` | Groq |
-| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
-| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
-| `AWS_REGION` | AWS Bedrock (Claude) |
-| `AZURE_OPENAI_API_ENDPOINT`| Azure OpenAI models |
-| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
-| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
+| Environment Variable | Provider |
+| --------------------------- | -------------------------------------------------- |
+| `ANTHROPIC_API_KEY` | Anthropic |
+| `OPENAI_API_KEY` | OpenAI |
+| `OPENROUTER_API_KEY` | OpenRouter |
+| `CEREBRAS_API_KEY` | Cerebras |
+| `GEMINI_API_KEY` | Google Gemini |
+| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
+| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
+| `GROQ_API_KEY` | Groq |
+| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
+| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
+| `AWS_REGION` | AWS Bedrock (Claude) |
+| `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI models |
+| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
+| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
### By the Way
From 3071026ec7f6a306db73d18ff10f5d4cc3c01566 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 19 Sep 2025 10:53:13 -0300
Subject: [PATCH 121/236] chore(deps): update dependencies
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 28 ++++++++++++++--------------
go.sum | 57 ++++++++++++++++++++++++++++-----------------------------
2 files changed, 42 insertions(+), 43 deletions(-)
diff --git a/go.mod b/go.mod
index 8e8301c2856df9e7e4fc7f65ada99bbf06c8c2b2..b59f4d649ad9c635d5dc8d583e993208b06547a6 100644
--- a/go.mod
+++ b/go.mod
@@ -7,14 +7,14 @@ require (
github.com/MakeNowJust/heredoc v1.0.0
github.com/PuerkitoBio/goquery v1.10.3
github.com/alecthomas/chroma/v2 v2.20.0
- github.com/anthropics/anthropic-sdk-go v1.9.1
+ github.com/anthropics/anthropic-sdk-go v1.12.0
github.com/atotto/clipboard v0.1.4
github.com/aymanbagabas/go-udiff v0.3.1
github.com/bmatcuk/doublestar/v4 v4.9.1
- github.com/charlievieth/fastwalk v1.0.12
+ github.com/charlievieth/fastwalk v1.0.14
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e
- github.com/charmbracelet/catwalk v0.5.3
+ github.com/charmbracelet/catwalk v0.5.8
github.com/charmbracelet/fang v0.4.1
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
@@ -26,20 +26,20 @@ require (
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
github.com/joho/godotenv v1.5.1
- github.com/mark3labs/mcp-go v0.38.0
+ github.com/mark3labs/mcp-go v0.40.0
github.com/muesli/termenv v0.16.0
- github.com/ncruces/go-sqlite3 v0.28.0
+ github.com/ncruces/go-sqlite3 v0.29.0
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
github.com/nxadm/tail v1.4.11
github.com/openai/openai-go v1.12.0
github.com/pressly/goose/v3 v3.25.0
- github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c
+ github.com/qjebbs/go-jsons v1.0.0-alpha.4
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sahilm/fuzzy v0.1.1
- github.com/spf13/cobra v1.9.1
+ github.com/spf13/cobra v1.10.1
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c
github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef
- github.com/stretchr/testify v1.11.0
+ github.com/stretchr/testify v1.11.1
github.com/tidwall/sjson v1.2.5
github.com/zeebo/xxh3 v1.0.2
gopkg.in/natefinch/lumberjack.v2 v2.2.1
@@ -99,7 +99,7 @@ require (
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.0.9 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
- github.com/lucasb-eyer/go-colorful v1.2.0
+ github.com/lucasb-eyer/go-colorful v1.3.0
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
@@ -119,7 +119,7 @@ require (
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
github.com/spf13/cast v1.7.1 // indirect
- github.com/spf13/pflag v1.0.7 // indirect
+ github.com/spf13/pflag v1.0.9 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
@@ -141,18 +141,18 @@ require (
golang.org/x/crypto v0.41.0 // indirect
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
golang.org/x/image v0.26.0 // indirect
- golang.org/x/net v0.42.0 // indirect
+ golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sync v0.17.0 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/term v0.34.0 // indirect
- golang.org/x/text v0.28.0
+ golang.org/x/text v0.29.0
golang.org/x/time v0.8.0 // indirect
google.golang.org/api v0.211.0 // indirect
- google.golang.org/genai v1.21.0
+ google.golang.org/genai v1.25.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
google.golang.org/grpc v1.71.0 // indirect
- google.golang.org/protobuf v1.36.6 // indirect
+ google.golang.org/protobuf v1.36.8 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5
diff --git a/go.sum b/go.sum
index f1358f93209b6dd575763818ee71d8f4d9c01c53..a921201c472e338f3c068503d9404d68a7bcba12 100644
--- a/go.sum
+++ b/go.sum
@@ -30,8 +30,8 @@ github.com/alecthomas/repr v0.5.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW5
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
-github.com/anthropics/anthropic-sdk-go v1.9.1 h1:raRhZKmayVSVZtLpLDd6IsMXvxLeeSU03/2IBTerWlg=
-github.com/anthropics/anthropic-sdk-go v1.9.1/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
+github.com/anthropics/anthropic-sdk-go v1.12.0 h1:xPqlGnq7rWrTiHazIvCiumA0u7mGQnwDQtvA1M82h9U=
+github.com/anthropics/anthropic-sdk-go v1.12.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY=
@@ -74,14 +74,14 @@ github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
-github.com/charlievieth/fastwalk v1.0.12 h1:pwfxe1LajixViQqo7EFLXU2+mQxb6OaO0CeNdVwRKTg=
-github.com/charlievieth/fastwalk v1.0.12/go.mod h1:yGy1zbxog41ZVMcKA/i8ojXLFsuayX5VvwhQVoj9PBI=
+github.com/charlievieth/fastwalk v1.0.14 h1:3Eh5uaFGwHZd8EGwTjJnSpBkfwfsak9h6ICgnWlhAyg=
+github.com/charlievieth/fastwalk v1.0.14/go.mod h1:diVcUreiU1aQ4/Wu3NbxxH4/KYdKpLDojrQ1Bb2KgNY=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e h1:4BBnKWFwJ5FLyhw/ijFxKE04i9rubr8WIPR1kjO57iA=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e/go.mod h1:F7AfLKYQqpM3NNBVs7ctW417tavhvoh9SBjsgtwpzbY=
-github.com/charmbracelet/catwalk v0.5.3 h1:Hw9DlX8u79K9iLQJB4Bti9/rTzMvEpBjE/GyniWxHNY=
-github.com/charmbracelet/catwalk v0.5.3/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
+github.com/charmbracelet/catwalk v0.5.8 h1:Crs1bCgFtFlPAeoa6NQ1RUG8RmFCxFRSi+ccnAg7reE=
+github.com/charmbracelet/catwalk v0.5.8/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
github.com/charmbracelet/fang v0.4.1 h1:NC0Y4oqg7YuZcBg/KKsHy8DSow0ZDjF4UJL7LwtA0dE=
@@ -186,12 +186,12 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
-github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
-github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
+github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
+github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/mark3labs/mcp-go v0.38.0 h1:E5tmJiIXkhwlV0pLAwAT0O5ZjUZSISE/2Jxg+6vpq4I=
-github.com/mark3labs/mcp-go v0.38.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
+github.com/mark3labs/mcp-go v0.40.0 h1:M0oqK412OHBKut9JwXSsj4KanSmEKpzoW8TcxoPOkAU=
+github.com/mark3labs/mcp-go v0.40.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
@@ -214,8 +214,8 @@ github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8=
github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
-github.com/ncruces/go-sqlite3 v0.28.0 h1:AQVTUPgfamONl09LS+4rGFbHmLKM8/QrJJJi1UukjEQ=
-github.com/ncruces/go-sqlite3 v0.28.0/go.mod h1:WqvLhYwtEiZzg1H8BIeahUv/DxbmR+3xG5jDHDiBAGk=
+github.com/ncruces/go-sqlite3 v0.29.0 h1:1tsLiagCoqZEfcHDeKsNSv5jvrY/Iu393pAnw2wLNJU=
+github.com/ncruces/go-sqlite3 v0.29.0/go.mod h1:r1hSvYKPNJ+OlUA1O3r8o9LAawzPAlqeZiIdxTBBBJ0=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt7M=
@@ -235,8 +235,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjGng=
github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
-github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c h1:kmzxiX+OB0knCo1V0dkEkdPelzCdAzCURCfmFArn2/A=
-github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
+github.com/qjebbs/go-jsons v1.0.0-alpha.4 h1:Qsb4ohRUHQODIUAsJKdKJ/SIDbsO7oGOzsfy+h1yQZs=
+github.com/qjebbs/go-jsons v1.0.0-alpha.4/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
@@ -261,11 +261,10 @@ github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFR
github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
-github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
-github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
-github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
-github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M=
-github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
+github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
+github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
+github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c h1:km8GpoQut05eY3GiYWEedbTT0qnSxrCjsVbb7yKY1KE=
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c/go.mod h1:cNQ3dwVJtS5Hmnjxy6AgTPd0Inb3pW05ftPSX7NZO7Q=
github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef h1:Ch6Q+AZUxDBCVqdkI8FSpFyZDtCVBc2VmejdNrm5rRQ=
@@ -274,8 +273,8 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8=
-github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
+github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
+github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
@@ -355,8 +354,8 @@ golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
-golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
-golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8=
+golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
+golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -407,8 +406,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
-golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
-golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
+golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
+golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -420,14 +419,14 @@ golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxb
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
-google.golang.org/genai v1.21.0 h1:0olX8oJPFn0iXNV4cNwgdvc4NHGTZpUbhGhu6Y/zh7U=
-google.golang.org/genai v1.21.0/go.mod h1:QPj5NGJw+3wEOHg+PrsWwJKvG6UC84ex5FR7qAYsN/M=
+google.golang.org/genai v1.25.0 h1:Cpyh2nmEoOS1eM3mT9XKuA/qWTEDoktfP2gsN3EduPE=
+google.golang.org/genai v1.25.0/go.mod h1:OClfdf+r5aaD+sCd4aUSkPzJItmg2wD/WON9lQnRPaY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
-google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
-google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
+google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
+google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
From 5637bcd23290cf37860533c762c69193a2d11db0 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 19 Sep 2025 11:11:29 -0300
Subject: [PATCH 122/236] test: fix
Signed-off-by: Carlos Alexandro Becker
---
internal/config/merge_test.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/config/merge_test.go b/internal/config/merge_test.go
index a00eb992a3edf97beb534353b4f0768c2b53a6d8..1b721bf2e8e4b4596025c2c773bec0093778f430 100644
--- a/internal/config/merge_test.go
+++ b/internal/config/merge_test.go
@@ -15,7 +15,7 @@ func TestMerge(t *testing.T) {
t.Fatalf("expected no error, got %v", err)
}
- expected := `{"baz":"qux","foo":"bar"}`
+ expected := `{"foo":"bar","baz":"qux"}`
got, err := io.ReadAll(merged)
if err != nil {
t.Fatalf("expected no error reading merged data, got %v", err)
From 36b053b867a6b6395e5235a1841dae8101612e35 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Sat, 20 Sep 2025 09:37:50 -0300
Subject: [PATCH 123/236] chore(legal): @zoete has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 1a11c99843401cf443f4dfb72ba9ca732af62fd1..a62172eb28a153115f31a83dd1be2c88193b20df 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -647,6 +647,14 @@
"created_at": "2025-09-19T13:31:42Z",
"repoId": 987670088,
"pullRequestNo": 1084
+ },
+ {
+ "name": "zoete",
+ "id": 33318916,
+ "comment_id": 3314945939,
+ "created_at": "2025-09-20T12:37:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1095
}
]
}
\ No newline at end of file
From 36947685bd6c205bf1c95cc68f1ee0c392b44a24 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 19 Sep 2025 15:07:17 -0300
Subject: [PATCH 124/236] fix: remove from title
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/agent.go | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 799cec2e67cc1cdc433903081da86db68ae31e99..ec48fc2956ac5ed3baa031ba2ed4b2f905b65ae0 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -326,7 +326,13 @@ func (a *agent) generateTitle(ctx context.Context, sessionID string, content str
return fmt.Errorf("no response received from title provider")
}
- title := strings.TrimSpace(strings.ReplaceAll(finalResponse.Content, "\n", " "))
+ title := strings.ReplaceAll(finalResponse.Content, "\n", " ")
+
+ if idx := strings.Index(title, ""); idx > 0 {
+ title = title[idx+len(""):]
+ }
+
+ title = strings.TrimSpace(title)
if title == "" {
return nil
}
From feb184d8023eb6a9a2868d0b603a45cc72c13d3d Mon Sep 17 00:00:00 2001
From: Amolith
Date: Mon, 22 Sep 2025 06:29:01 -0600
Subject: [PATCH 125/236] chore: task fmt (#1098)
---
internal/config/load.go | 14 +++++++-------
internal/llm/provider/anthropic.go | 2 +-
internal/llm/provider/openai.go | 2 +-
internal/llm/provider/provider.go | 2 +-
internal/shell/shell.go | 2 +-
internal/tui/exp/diffview/diffview.go | 6 +++---
internal/tui/exp/diffview/split.go | 2 +-
internal/tui/exp/list/list.go | 2 +-
8 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/internal/config/load.go b/internal/config/load.go
index bdfd6328c8cabb1ea57afcd1f6e7e94645fd93e5..9ac5411f0b1697ce96453c72a01defe219c19a37 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -381,7 +381,7 @@ func (c *Config) applyLSPDefaults() {
func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (largeModel SelectedModel, smallModel SelectedModel, err error) {
if len(knownProviders) == 0 && c.Providers.Len() == 0 {
err = fmt.Errorf("no providers configured, please configure at least one provider")
- return
+ return largeModel, smallModel, err
}
// Use the first provider enabled based on the known providers order
@@ -394,7 +394,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
defaultLargeModel := c.GetModel(string(p.ID), p.DefaultLargeModelID)
if defaultLargeModel == nil {
err = fmt.Errorf("default large model %s not found for provider %s", p.DefaultLargeModelID, p.ID)
- return
+ return largeModel, smallModel, err
}
largeModel = SelectedModel{
Provider: string(p.ID),
@@ -406,7 +406,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
defaultSmallModel := c.GetModel(string(p.ID), p.DefaultSmallModelID)
if defaultSmallModel == nil {
err = fmt.Errorf("default small model %s not found for provider %s", p.DefaultSmallModelID, p.ID)
- return
+ return largeModel, smallModel, err
}
smallModel = SelectedModel{
Provider: string(p.ID),
@@ -414,7 +414,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
MaxTokens: defaultSmallModel.DefaultMaxTokens,
ReasoningEffort: defaultSmallModel.DefaultReasoningEffort,
}
- return
+ return largeModel, smallModel, err
}
enabledProviders := c.EnabledProviders()
@@ -424,13 +424,13 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
if len(enabledProviders) == 0 {
err = fmt.Errorf("no providers configured, please configure at least one provider")
- return
+ return largeModel, smallModel, err
}
providerConfig := enabledProviders[0]
if len(providerConfig.Models) == 0 {
err = fmt.Errorf("provider %s has no models configured", providerConfig.ID)
- return
+ return largeModel, smallModel, err
}
defaultLargeModel := c.GetModel(providerConfig.ID, providerConfig.Models[0].ID)
largeModel = SelectedModel{
@@ -444,7 +444,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
Model: defaultSmallModel.ID,
MaxTokens: defaultSmallModel.DefaultMaxTokens,
}
- return
+ return largeModel, smallModel, err
}
func (c *Config) configureSelectedModels(knownProviders []catwalk.Provider) error {
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index 3fffd6b35fe6ee8b6a765e2e5b815ad36a5b6a55..a5355b09e235d791d178a445ba98095974acbef4 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -175,7 +175,7 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
}
}
- return
+ return anthropicMessages
}
func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolUnionParam {
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index 8d49062044af2c3c2e8663d8d281d0e27ac1a1b1..8df3989abbacbb7e46c59a0c750df8a7879789c1 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -178,7 +178,7 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
}
}
- return
+ return openaiMessages
}
func (o *openaiClient) convertTools(tools []tools.BaseTool) []openai.ChatCompletionToolParam {
diff --git a/internal/llm/provider/provider.go b/internal/llm/provider/provider.go
index 28562f2f484a75c445d9eaa21ce90af4ef5ca613..3705645517cd10803ede285f8d2935f43575b746 100644
--- a/internal/llm/provider/provider.go
+++ b/internal/llm/provider/provider.go
@@ -98,7 +98,7 @@ func (p *baseProvider[C]) cleanMessages(messages []message.Message) (cleaned []m
}
cleaned = append(cleaned, msg)
}
- return
+ return cleaned
}
func (p *baseProvider[C]) SendMessages(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
diff --git a/internal/shell/shell.go b/internal/shell/shell.go
index ef3abf8d30d37490e452478abe38ef39efd8a7fa..5a10be9537714162e4d5ed25360b42690395793f 100644
--- a/internal/shell/shell.go
+++ b/internal/shell/shell.go
@@ -207,7 +207,7 @@ func splitArgsFlags(parts []string) (args []string, flags []string) {
args = append(args, part)
}
}
- return
+ return args, flags
}
func (s *Shell) blockHandler() func(next interp.ExecHandlerFunc) interp.ExecHandlerFunc {
diff --git a/internal/tui/exp/diffview/diffview.go b/internal/tui/exp/diffview/diffview.go
index eaea2837fcaa7522294143f0385bcbb0879316bd..cda4b74b28843beda36bda17c4fbf29137017422 100644
--- a/internal/tui/exp/diffview/diffview.go
+++ b/internal/tui/exp/diffview/diffview.go
@@ -408,7 +408,7 @@ func (dv *DiffView) renderUnified() string {
content = ansi.GraphemeWidth.Cut(content, dv.xOffset, len(content))
content = ansi.Truncate(content, dv.codeWidth, "…")
leadingEllipsis = dv.xOffset > 0 && strings.TrimSpace(content) != ""
- return
+ return content, leadingEllipsis
}
outer:
@@ -531,7 +531,7 @@ func (dv *DiffView) renderSplit() string {
content = ansi.GraphemeWidth.Cut(content, dv.xOffset, len(content))
content = ansi.Truncate(content, dv.codeWidth, "…")
leadingEllipsis = dv.xOffset > 0 && strings.TrimSpace(content) != ""
- return
+ return content, leadingEllipsis
}
outer:
@@ -716,7 +716,7 @@ func (dv *DiffView) hunkShownLines(h *udiff.Hunk) (before, after int) {
before++
}
}
- return
+ return before, after
}
func (dv *DiffView) lineStyleForType(t udiff.OpKind) LineStyle {
diff --git a/internal/tui/exp/diffview/split.go b/internal/tui/exp/diffview/split.go
index ed4fedb543e7ea34f72f20e3c255a85706a4abcf..5a0f13116160b8c7178293944e3a45e4fcb7d8d3 100644
--- a/internal/tui/exp/diffview/split.go
+++ b/internal/tui/exp/diffview/split.go
@@ -69,5 +69,5 @@ func hunkToSplit(h *udiff.Hunk) (sh splitHunk) {
sh.lines = append(sh.lines, &sl)
}
- return
+ return sh
}
diff --git a/internal/tui/exp/list/list.go b/internal/tui/exp/list/list.go
index 8995e0360a6a72868d0819214a410257d1c8fa2b..fd789f90b89b016abb9b9fb5c79227da7ef30fd9 100644
--- a/internal/tui/exp/list/list.go
+++ b/internal/tui/exp/list/list.go
@@ -1372,7 +1372,7 @@ func (l *list[T]) findWordBoundaries(col, line int) (startCol, endCol int) {
if startCol == -1 {
return 0, 0
}
- return
+ return startCol, endCol
}
func (l *list[T]) findParagraphBoundaries(line int) (startLine, endLine int, found bool) {
From f77371bfd3827fda9706ae803c327ea22db2fa79 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Mon, 22 Sep 2025 15:09:22 -0300
Subject: [PATCH 126/236] ci: -trimpath, remove broken target
- windows/arm/v7 was marked broken by the Go team
- add `-trimpath` to trim the `/home/etc` from logs
Signed-off-by: Carlos Alexandro Becker
---
.goreleaser.yml | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/.goreleaser.yml b/.goreleaser.yml
index 7c477b587c324bbcf66883baf385e7caf984c872..c0da1c50aec71d899b0cffe09be64e3756e92f51 100644
--- a/.goreleaser.yml
+++ b/.goreleaser.yml
@@ -67,8 +67,12 @@ builds:
goarch: arm
- goos: android
goarch: "386"
+ - goos: windows
+ goarch: arm
ldflags:
- -s -w -X github.com/charmbracelet/crush/internal/version.Version={{.Version}}
+ flags:
+ - -trimpath
archives:
- name_template: >-
From 701e550fbb7f94ef29002cd92b6c18d8ad5a883d Mon Sep 17 00:00:00 2001
From: bbrodriges
Date: Mon, 22 Sep 2025 22:45:39 +0300
Subject: [PATCH 127/236] feat(config): search`crush.json` recursively up from
the working directory (#898)
---
internal/config/load.go | 33 ++-
internal/fsext/lookup.go | 141 ++++++++++
internal/fsext/lookup_test.go | 483 ++++++++++++++++++++++++++++++++
internal/fsext/owner_windows.go | 6 +
internal/fsext/parent.go | 60 ----
5 files changed, 655 insertions(+), 68 deletions(-)
create mode 100644 internal/fsext/lookup.go
create mode 100644 internal/fsext/lookup_test.go
delete mode 100644 internal/fsext/parent.go
diff --git a/internal/config/load.go b/internal/config/load.go
index 9ac5411f0b1697ce96453c72a01defe219c19a37..06ece6467177a443e7b6af20754976e901f642e9 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -41,13 +41,8 @@ func LoadReader(fd io.Reader) (*Config, error) {
// Load loads the configuration from the default paths.
func Load(workingDir, dataDir string, debug bool) (*Config, error) {
- // uses default config paths
- configPaths := []string{
- globalConfig(),
- GlobalConfigData(),
- filepath.Join(workingDir, fmt.Sprintf("%s.json", appName)),
- filepath.Join(workingDir, fmt.Sprintf(".%s.json", appName)),
- }
+ configPaths := lookupConfigs(workingDir)
+
cfg, err := loadFromConfigPaths(configPaths)
if err != nil {
return nil, fmt.Errorf("failed to load config from paths %v: %w", configPaths, err)
@@ -316,7 +311,7 @@ func (c *Config) setDefaults(workingDir, dataDir string) {
if dataDir != "" {
c.Options.DataDirectory = dataDir
} else if c.Options.DataDirectory == "" {
- if path, ok := fsext.SearchParent(workingDir, defaultDataDirectory); ok {
+ if path, ok := fsext.LookupClosest(workingDir, defaultDataDirectory); ok {
c.Options.DataDirectory = path
} else {
c.Options.DataDirectory = filepath.Join(workingDir, defaultDataDirectory)
@@ -514,6 +509,28 @@ func (c *Config) configureSelectedModels(knownProviders []catwalk.Provider) erro
return nil
}
+// lookupConfigs searches config files recursively from CWD up to FS root
+func lookupConfigs(cwd string) []string {
+ // prepend default config paths
+ configPaths := []string{
+ globalConfig(),
+ GlobalConfigData(),
+ }
+
+ configNames := []string{appName + ".json", "." + appName + ".json"}
+
+ foundConfigs, err := fsext.Lookup(cwd, configNames...)
+ if err != nil {
+ // returns at least default configs
+ return configPaths
+ }
+
+ // reverse order so last config has more priority
+ slices.Reverse(foundConfigs)
+
+ return append(configPaths, foundConfigs...)
+}
+
func loadFromConfigPaths(configPaths []string) (*Config, error) {
var configs []io.Reader
diff --git a/internal/fsext/lookup.go b/internal/fsext/lookup.go
new file mode 100644
index 0000000000000000000000000000000000000000..098426571c69521a5978a2c2e0a4178b51b0aae6
--- /dev/null
+++ b/internal/fsext/lookup.go
@@ -0,0 +1,141 @@
+package fsext
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/charmbracelet/crush/internal/home"
+)
+
+// Lookup searches for a target files or directories starting from dir
+// and walking up the directory tree until filesystem root is reached.
+// It also checks the ownership of files to ensure that the search does
+// not cross ownership boundaries. It skips ownership mismatches without
+// errors.
+// Returns full paths to fount targets.
+// The search includes the starting directory itself.
+func Lookup(dir string, targets ...string) ([]string, error) {
+ if len(targets) == 0 {
+ return nil, nil
+ }
+
+ var found []string
+
+ err := traverseUp(dir, func(cwd string, owner int) error {
+ for _, target := range targets {
+ fpath := filepath.Join(cwd, target)
+ err := probeEnt(fpath, owner)
+
+ // skip to the next file on permission denied
+ if errors.Is(err, os.ErrNotExist) ||
+ errors.Is(err, os.ErrPermission) {
+ continue
+ }
+
+ if err != nil {
+ return fmt.Errorf("error probing file %s: %w", fpath, err)
+ }
+
+ found = append(found, fpath)
+ }
+
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return found, nil
+}
+
+// LookupClosest searches for a target file or directory starting from dir
+// and walking up the directory tree until found or root or home is reached.
+// It also checks the ownership of files to ensure that the search does
+// not cross ownership boundaries.
+// Returns the full path to the target if found, empty string and false otherwise.
+// The search includes the starting directory itself.
+func LookupClosest(dir, target string) (string, bool) {
+ var found string
+
+ err := traverseUp(dir, func(cwd string, owner int) error {
+ fpath := filepath.Join(cwd, target)
+
+ err := probeEnt(fpath, owner)
+ if errors.Is(err, os.ErrNotExist) {
+ return nil
+ }
+
+ if err != nil {
+ return fmt.Errorf("error probing file %s: %w", fpath, err)
+ }
+
+ if cwd == home.Dir() {
+ return filepath.SkipAll
+ }
+
+ found = fpath
+ return filepath.SkipAll
+ })
+
+ return found, err == nil && found != ""
+}
+
+// traverseUp walks up from given directory up until filesystem root reached.
+// It passes absolute path of current directory and staring directory owner ID
+// to callback function. It is up to user to check ownership.
+func traverseUp(dir string, walkFn func(dir string, owner int) error) error {
+ cwd, err := filepath.Abs(dir)
+ if err != nil {
+ return fmt.Errorf("cannot convert CWD to absolute path: %w", err)
+ }
+
+ owner, err := Owner(dir)
+ if err != nil {
+ return fmt.Errorf("cannot get ownership: %w", err)
+ }
+
+ for {
+ err := walkFn(cwd, owner)
+ if err == nil || errors.Is(err, filepath.SkipDir) {
+ parent := filepath.Dir(cwd)
+ if parent == cwd {
+ return nil
+ }
+
+ cwd = parent
+ continue
+ }
+
+ if errors.Is(err, filepath.SkipAll) {
+ return nil
+ }
+
+ return err
+ }
+}
+
+// probeEnt checks if entity at given path exists and belongs to given owner
+func probeEnt(fspath string, owner int) error {
+ _, err := os.Stat(fspath)
+ if err != nil {
+ return fmt.Errorf("cannot stat %s: %w", fspath, err)
+ }
+
+ // special case for ownership check bypass
+ if owner == -1 {
+ return nil
+ }
+
+ fowner, err := Owner(fspath)
+ if err != nil {
+ return fmt.Errorf("cannot get ownership for %s: %w", fspath, err)
+ }
+
+ if fowner != owner {
+ return os.ErrPermission
+ }
+
+ return nil
+}
diff --git a/internal/fsext/lookup_test.go b/internal/fsext/lookup_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..b7604331673aad0d65d34e046901bc9eae722195
--- /dev/null
+++ b/internal/fsext/lookup_test.go
@@ -0,0 +1,483 @@
+package fsext
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/charmbracelet/crush/internal/home"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLookupClosest(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Change to temp directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(tempDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ t.Run("target found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target file in current directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(testDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target found in parent directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in parent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(subDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target found in grandparent directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create nested subdirectories
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ subSubDir := filepath.Join(subDir, "subsubdir")
+ err = os.Mkdir(subSubDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in grandparent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(subSubDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target not found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ foundPath, found := LookupClosest(testDir, "nonexistent.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("target directory found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target directory in current directory
+ targetDir := filepath.Join(testDir, "targetdir")
+ err := os.Mkdir(targetDir, 0o755)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(testDir, "targetdir")
+ require.True(t, found)
+ require.Equal(t, targetDir, foundPath)
+ })
+
+ t.Run("stops at home directory", func(t *testing.T) {
+ // This test is limited as we can't easily create files above home directory
+ // but we can test the behavior by searching from home directory itself
+ homeDir := home.Dir()
+
+ // Search for a file that doesn't exist from home directory
+ foundPath, found := LookupClosest(homeDir, "nonexistent_file_12345.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("invalid starting directory", func(t *testing.T) {
+ foundPath, found := LookupClosest("/invalid/path/that/does/not/exist", "target.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("relative path handling", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Change to test directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(testDir)
+ require.NoError(t, err)
+ defer os.Chdir(oldWd)
+
+ // Create target file in current directory
+ err = os.WriteFile("target.txt", []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Search using relative path
+ foundPath, found := LookupClosest(".", "target.txt")
+ require.True(t, found)
+
+ // Resolve symlinks to handle macOS /private/var vs /var discrepancy
+ expectedPath, err := filepath.EvalSymlinks(filepath.Join(testDir, "target.txt"))
+ require.NoError(t, err)
+ actualPath, err := filepath.EvalSymlinks(foundPath)
+ require.NoError(t, err)
+ require.Equal(t, expectedPath, actualPath)
+ })
+}
+
+func TestLookupClosestWithOwnership(t *testing.T) {
+ // Note: Testing ownership boundaries is difficult in a cross-platform way
+ // without creating complex directory structures with different owners.
+ // This test focuses on the basic functionality when ownership checks pass.
+
+ tempDir := t.TempDir()
+
+ // Change to temp directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(tempDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ t.Run("search respects same ownership", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory structure
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in parent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Search should find the target assuming same ownership
+ foundPath, found := LookupClosest(subDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+}
+
+func TestLookup(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Change to temp directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(tempDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ t.Run("no targets returns empty slice", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ found, err := Lookup(testDir)
+ require.NoError(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("single target found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target file in current directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 1)
+ require.Equal(t, targetFile, found[0])
+ })
+
+ t.Run("multiple targets found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create multiple target files in current directory
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+ targetFile3 := filepath.Join(testDir, "target3.txt")
+
+ err := os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile3, []byte("test3"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target1.txt", "target2.txt", "target3.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 3)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ require.Contains(t, found, targetFile3)
+ })
+
+ t.Run("targets found in parent directories", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target files in parent directory
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+ err = os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(subDir, "target1.txt", "target2.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ })
+
+ t.Run("targets found across multiple directory levels", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create nested subdirectories
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ subSubDir := filepath.Join(subDir, "subsubdir")
+ err = os.Mkdir(subSubDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target files at different levels
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(subDir, "target2.txt")
+ targetFile3 := filepath.Join(subSubDir, "target3.txt")
+
+ err = os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile3, []byte("test3"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(subSubDir, "target1.txt", "target2.txt", "target3.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 3)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ require.Contains(t, found, targetFile3)
+ })
+
+ t.Run("some targets not found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create only some target files
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+
+ err := os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ // Search for existing and non-existing targets
+ found, err := Lookup(testDir, "target1.txt", "nonexistent.txt", "target2.txt", "another_nonexistent.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ })
+
+ t.Run("no targets found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ found, err := Lookup(testDir, "nonexistent1.txt", "nonexistent2.txt", "nonexistent3.txt")
+ require.NoError(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("target directories found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target directories
+ targetDir1 := filepath.Join(testDir, "targetdir1")
+ targetDir2 := filepath.Join(testDir, "targetdir2")
+ err := os.Mkdir(targetDir1, 0o755)
+ require.NoError(t, err)
+ err = os.Mkdir(targetDir2, 0o755)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "targetdir1", "targetdir2")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetDir1)
+ require.Contains(t, found, targetDir2)
+ })
+
+ t.Run("mixed files and directories", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target files and directories
+ targetFile := filepath.Join(testDir, "target.txt")
+ targetDir := filepath.Join(testDir, "targetdir")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+ err = os.Mkdir(targetDir, 0o755)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target.txt", "targetdir")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile)
+ require.Contains(t, found, targetDir)
+ })
+
+ t.Run("invalid starting directory", func(t *testing.T) {
+ found, err := Lookup("/invalid/path/that/does/not/exist", "target.txt")
+ require.Error(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("relative path handling", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Change to test directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(testDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ // Create target files in current directory
+ err = os.WriteFile("target1.txt", []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile("target2.txt", []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ // Search using relative path
+ found, err := Lookup(".", "target1.txt", "target2.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+
+ // Resolve symlinks to handle macOS /private/var vs /var discrepancy
+ expectedPath1, err := filepath.EvalSymlinks(filepath.Join(testDir, "target1.txt"))
+ require.NoError(t, err)
+ expectedPath2, err := filepath.EvalSymlinks(filepath.Join(testDir, "target2.txt"))
+ require.NoError(t, err)
+
+ // Check that found paths match expected paths (order may vary)
+ foundEvalSymlinks := make([]string, len(found))
+ for i, path := range found {
+ evalPath, err := filepath.EvalSymlinks(path)
+ require.NoError(t, err)
+ foundEvalSymlinks[i] = evalPath
+ }
+
+ require.Contains(t, foundEvalSymlinks, expectedPath1)
+ require.Contains(t, foundEvalSymlinks, expectedPath2)
+ })
+}
+
+func TestProbeEnt(t *testing.T) {
+ t.Run("existing file with correct owner", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Get owner of temp directory
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ // Test probeEnt with correct owner
+ err = probeEnt(testFile, owner)
+ require.NoError(t, err)
+ })
+
+ t.Run("existing directory with correct owner", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test directory
+ testDir := filepath.Join(tempDir, "testdir")
+ err := os.Mkdir(testDir, 0o755)
+ require.NoError(t, err)
+
+ // Get owner of temp directory
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ // Test probeEnt with correct owner
+ err = probeEnt(testDir, owner)
+ require.NoError(t, err)
+ })
+
+ t.Run("nonexistent file", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ nonexistentFile := filepath.Join(tempDir, "nonexistent.txt")
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ err = probeEnt(nonexistentFile, owner)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrNotExist))
+ })
+
+ t.Run("nonexistent file in nonexistent directory", func(t *testing.T) {
+ nonexistentFile := "/this/directory/does/not/exists/nonexistent.txt"
+
+ err := probeEnt(nonexistentFile, -1)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrNotExist))
+ })
+
+ t.Run("ownership bypass with -1", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Test probeEnt with -1 (bypass ownership check)
+ err = probeEnt(testFile, -1)
+ require.NoError(t, err)
+ })
+
+ t.Run("ownership mismatch returns permission error", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Test probeEnt with different owner (use 9999 which is unlikely to be the actual owner)
+ err = probeEnt(testFile, 9999)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrPermission))
+ })
+}
diff --git a/internal/fsext/owner_windows.go b/internal/fsext/owner_windows.go
index 107cda009b5fc152cba3200271c7145ff3227a39..41f9091c3e75e8f187984a8e1ddb7a7aa72c9dab 100644
--- a/internal/fsext/owner_windows.go
+++ b/internal/fsext/owner_windows.go
@@ -2,8 +2,14 @@
package fsext
+import "os"
+
// Owner retrieves the user ID of the owner of the file or directory at the
// specified path.
func Owner(path string) (int, error) {
+ _, err := os.Stat(path)
+ if err != nil {
+ return 0, err
+ }
return -1, nil
}
diff --git a/internal/fsext/parent.go b/internal/fsext/parent.go
deleted file mode 100644
index bd3193610a79cbc80b5bb2c1d75be32a819f34f5..0000000000000000000000000000000000000000
--- a/internal/fsext/parent.go
+++ /dev/null
@@ -1,60 +0,0 @@
-package fsext
-
-import (
- "errors"
- "os"
- "path/filepath"
-
- "github.com/charmbracelet/crush/internal/home"
-)
-
-// SearchParent searches for a target file or directory starting from dir
-// and walking up the directory tree until found or root or home is reached.
-// It also checks the ownership of directories to ensure that the search does
-// not cross ownership boundaries.
-// Returns the full path to the target if found, empty string and false otherwise.
-// The search includes the starting directory itself.
-func SearchParent(dir, target string) (string, bool) {
- absDir, err := filepath.Abs(dir)
- if err != nil {
- return "", false
- }
-
- path := filepath.Join(absDir, target)
- if _, err := os.Stat(path); err == nil {
- return path, true
- } else if !errors.Is(err, os.ErrNotExist) {
- return "", false
- }
-
- previousParent := absDir
- previousOwner, err := Owner(previousParent)
- if err != nil {
- return "", false
- }
-
- for {
- parent := filepath.Dir(previousParent)
- if parent == previousParent || parent == home.Dir() {
- return "", false
- }
-
- parentOwner, err := Owner(parent)
- if err != nil {
- return "", false
- }
- if parentOwner != previousOwner {
- return "", false
- }
-
- path := filepath.Join(parent, target)
- if _, err := os.Stat(path); err == nil {
- return path, true
- } else if !errors.Is(err, os.ErrNotExist) {
- return "", false
- }
-
- previousParent = parent
- previousOwner = parentOwner
- }
-}
From 1f35d54ac294f61297be4584f4ccb387207028f2 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 09:07:46 -0300
Subject: [PATCH 128/236] fix: lsp/mcp command expand ~ (#1105)
* fix(lsp): powernap load by name
Signed-off-by: Carlos Alexandro Becker
* fix(mcp): expand command
Signed-off-by: Carlos Alexandro Becker
* fix(lsp): expand command
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
---
internal/config/load.go | 2 +-
internal/llm/agent/mcp-tools.go | 3 ++-
internal/lsp/client.go | 3 ++-
3 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/internal/config/load.go b/internal/config/load.go
index 06ece6467177a443e7b6af20754976e901f642e9..500fc236834ae6bfd17811ad64692d2341b9f8e1 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -352,7 +352,7 @@ func (c *Config) applyLSPDefaults() {
// Apply defaults to each LSP configuration
for name, cfg := range c.LSP {
// Try to get defaults from powernap based on command name
- base, ok := configManager.GetServer(cfg.Command)
+ base, ok := configManager.GetServer(name)
if !ok {
continue
}
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 90011c43a0fce476c119c7a981ea6760c294b806..67d46b54b637af8d3dacef8149d34202157a9565 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -15,6 +15,7 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/llm/tools"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/pubsub"
@@ -360,7 +361,7 @@ func createMcpClient(name string, m config.MCPConfig) (*client.Client, error) {
return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
}
return client.NewStdioMCPClientWithOptions(
- m.Command,
+ home.Long(m.Command),
m.ResolvedEnv(),
m.Args,
transport.WithCommandLogger(mcpLogger{name: name}),
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index 70146d3ad181459db3d2193383373159f72b2022..aedf2476918fd5394c4a876bf7cd5ec177348905 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -15,6 +15,7 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/home"
powernap "github.com/charmbracelet/x/powernap/pkg/lsp"
"github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
"github.com/charmbracelet/x/powernap/pkg/transport"
@@ -55,7 +56,7 @@ func New(ctx context.Context, name string, config config.LSPConfig) (*Client, er
// Create powernap client config
clientConfig := powernap.ClientConfig{
- Command: config.Command,
+ Command: home.Long(config.Command),
Args: config.Args,
RootURI: rootURI,
Environment: func() map[string]string {
From 3ba15bfda8f8ce6181a5e96c77a02b914164e521 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 09:08:19 -0300
Subject: [PATCH 129/236] docs: aws profile/region envs (#1104)
closes #382
Signed-off-by: Carlos Alexandro Becker
---
README.md | 2 ++
1 file changed, 2 insertions(+)
diff --git a/README.md b/README.md
index e89c651de360e0bcfa0336f4a726b60ea9635d31..89b1386c193c1fad67336f0e06f5da3c598f9a8c 100644
--- a/README.md
+++ b/README.md
@@ -188,6 +188,8 @@ That said, you can also set environment variables for preferred providers.
| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
| `AWS_REGION` | AWS Bedrock (Claude) |
+| `AWS_PROFILE` | Custom AWS Profile |
+| `AWS_REGION` | AWS Region |
| `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI models |
| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
From ee860b9ace8136ce46338073ee710822fa1fec56 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 09:26:29 -0300
Subject: [PATCH 130/236] docs: document more mcp options
refs #970
Signed-off-by: Carlos Alexandro Becker
---
README.md | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/README.md b/README.md
index 89b1386c193c1fad67336f0e06f5da3c598f9a8c..0b3c72a538e2548e724d2b5bd4fce83edae59e14 100644
--- a/README.md
+++ b/README.md
@@ -272,6 +272,8 @@ using `$(echo $VAR)` syntax.
"type": "stdio",
"command": "node",
"args": ["/path/to/mcp-server.js"],
+ "timeout": 120,
+ "disabled": false,
"env": {
"NODE_ENV": "production"
}
@@ -279,6 +281,8 @@ using `$(echo $VAR)` syntax.
"github": {
"type": "http",
"url": "https://example.com/mcp/",
+ "timeout": 120,
+ "disabled": false,
"headers": {
"Authorization": "$(echo Bearer $EXAMPLE_MCP_TOKEN)"
}
@@ -286,6 +290,8 @@ using `$(echo $VAR)` syntax.
"streaming-service": {
"type": "sse",
"url": "https://example.com/mcp/sse",
+ "timeout": 120,
+ "disabled": false,
"headers": {
"API-Key": "$(echo $API_KEY)"
}
From d0edf44c4fe4ad5af3f76e3c7fde694abef9fe6b Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 09:26:47 -0300
Subject: [PATCH 131/236] chore: home pkg godoc
Signed-off-by: Carlos Alexandro Becker
---
internal/home/home.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/internal/home/home.go b/internal/home/home.go
index f2a9b73b922abd8f027ba68655afc68f42a58b09..74ab5594bf19377a86e6e96cae298a91b4858cff 100644
--- a/internal/home/home.go
+++ b/internal/home/home.go
@@ -1,3 +1,4 @@
+// Package home provides utilities for dealing with the user's home directory.
package home
import (
From 56d6486d73897bc291a1e482686ffecd5d80be28 Mon Sep 17 00:00:00 2001
From: Adam Stringer
Date: Tue, 23 Sep 2025 14:29:12 +0100
Subject: [PATCH 132/236] test: verify tools are taken from agent when disabled
(#1103)
---
internal/config/load_test.go | 23 +++++++++++++++++++++++
1 file changed, 23 insertions(+)
diff --git a/internal/config/load_test.go b/internal/config/load_test.go
index e0ce94f3995fb64cc8f66348723a4e6c62a0ea2b..90276c96ad113f453ed699c8deeb30b4f5fef9d5 100644
--- a/internal/config/load_test.go
+++ b/internal/config/load_test.go
@@ -492,6 +492,29 @@ func TestConfig_setupAgentsWithDisabledTools(t *testing.T) {
assert.Equal(t, []string{"glob", "ls", "sourcegraph", "view"}, taskAgent.AllowedTools)
}
+func TestConfig_setupAgentsWithEveryReadOnlyToolDisabled(t *testing.T) {
+ cfg := &Config{
+ Options: &Options{
+ DisabledTools: []string{
+ "glob",
+ "grep",
+ "ls",
+ "sourcegraph",
+ "view",
+ },
+ },
+ }
+
+ cfg.SetupAgents()
+ coderAgent, ok := cfg.Agents["coder"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"bash", "download", "edit", "multiedit", "fetch", "write"}, coderAgent.AllowedTools)
+
+ taskAgent, ok := cfg.Agents["task"]
+ require.True(t, ok)
+ assert.Equal(t, []string{}, taskAgent.AllowedTools)
+}
+
func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
knownProviders := []catwalk.Provider{
{
From c2d85d15612d2cb89c50341dd38f3fc1a0085fd7 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Tue, 23 Sep 2025 15:54:38 +0200
Subject: [PATCH 133/236] docs: add huggingface inference
---
README.md | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 0b3c72a538e2548e724d2b5bd4fce83edae59e14..a2b07093c1e45162018614411ba7a4300f9ef680 100644
--- a/README.md
+++ b/README.md
@@ -180,8 +180,9 @@ That said, you can also set environment variables for preferred providers.
| `ANTHROPIC_API_KEY` | Anthropic |
| `OPENAI_API_KEY` | OpenAI |
| `OPENROUTER_API_KEY` | OpenRouter |
-| `CEREBRAS_API_KEY` | Cerebras |
| `GEMINI_API_KEY` | Google Gemini |
+| `CEREBRAS_API_KEY` | Cerebras |
+| `HF_TOKEN` | Huggingface Inference |
| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
| `GROQ_API_KEY` | Groq |
From 1f46a69b546885ac95b93d8c4d5b06b97cf84977 Mon Sep 17 00:00:00 2001
From: Rory de Zoete <33318916+zoete@users.noreply.github.com>
Date: Tue, 23 Sep 2025 16:39:47 +0200
Subject: [PATCH 134/236] fix(grep): resolve Windows path parsing with null
separation (#1095)
---
internal/llm/tools/grep.go | 37 +++++++++++++++++++++++++++++++------
internal/llm/tools/rg.go | 4 ++--
2 files changed, 33 insertions(+), 8 deletions(-)
diff --git a/internal/llm/tools/grep.go b/internal/llm/tools/grep.go
index 30d6e0b16a06c28aa33783f76fcdaa5ccb800915..cbf50360b9355c05797690678a99d1310b19556f 100644
--- a/internal/llm/tools/grep.go
+++ b/internal/llm/tools/grep.go
@@ -259,18 +259,16 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
continue
}
- // Parse ripgrep output format: file:line:content
- parts := strings.SplitN(line, ":", 3)
- if len(parts) < 3 {
+ // Parse ripgrep output using null separation
+ filePath, lineNumStr, lineText, ok := parseRipgrepLine(line)
+ if !ok {
continue
}
- filePath := parts[0]
- lineNum, err := strconv.Atoi(parts[1])
+ lineNum, err := strconv.Atoi(lineNumStr)
if err != nil {
continue
}
- lineText := parts[2]
fileInfo, err := os.Stat(filePath)
if err != nil {
@@ -288,6 +286,33 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
return matches, nil
}
+// parseRipgrepLine parses ripgrep output with null separation to handle Windows paths
+func parseRipgrepLine(line string) (filePath, lineNum, lineText string, ok bool) {
+ // Split on null byte first to separate filename from rest
+ parts := strings.SplitN(line, "\x00", 2)
+ if len(parts) != 2 {
+ return "", "", "", false
+ }
+
+ filePath = parts[0]
+ remainder := parts[1]
+
+ // Now split the remainder on first colon: "linenum:content"
+ colonIndex := strings.Index(remainder, ":")
+ if colonIndex == -1 {
+ return "", "", "", false
+ }
+
+ lineNumStr := remainder[:colonIndex]
+ lineText = remainder[colonIndex+1:]
+
+ if _, err := strconv.Atoi(lineNumStr); err != nil {
+ return "", "", "", false
+ }
+
+ return filePath, lineNumStr, lineText, true
+}
+
func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
matches := []grepMatch{}
diff --git a/internal/llm/tools/rg.go b/internal/llm/tools/rg.go
index 40ab7f2f520697659e3ef092a7ff3e96b2c3c47c..8809b57c8db30b4ac1ed6c070df5a7218c59e233 100644
--- a/internal/llm/tools/rg.go
+++ b/internal/llm/tools/rg.go
@@ -42,8 +42,8 @@ func getRgSearchCmd(ctx context.Context, pattern, path, include string) *exec.Cm
if name == "" {
return nil
}
- // Use -n to show line numbers and include the matched line
- args := []string{"-H", "-n", pattern}
+ // Use -n to show line numbers, -0 for null separation to handle Windows paths
+ args := []string{"-H", "-n", "-0", pattern}
if include != "" {
args = append(args, "--glob", include)
}
From 0da5e6b17d7c3d379784fd4b43ac9d8a790a8ae4 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 12:12:07 -0300
Subject: [PATCH 135/236] feat(lsp): load defaults by either name or command
(#1109)
* fix(lsp): load defaults by name or command name
Signed-off-by: Carlos Alexandro Becker
* fix(lsp): simplify
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
---
crush.json | 4 +---
internal/config/config.go | 2 +-
internal/config/load.go | 13 +++++++++++--
3 files changed, 13 insertions(+), 6 deletions(-)
diff --git a/crush.json b/crush.json
index ba4dc18bc63381ad4bdbca5470a1527986c74205..f5daef89add28ad4924c2bb87ca70020af005d67 100644
--- a/crush.json
+++ b/crush.json
@@ -1,8 +1,6 @@
{
"$schema": "https://charm.land/crush.json",
"lsp": {
- "Go": {
- "command": "gopls"
- }
+ "gopls": {}
}
}
diff --git a/internal/config/config.go b/internal/config/config.go
index 67378e9ff00356358bfedd403aacd655b763cfc6..8e4b8e5437e31af351b14b7330ab1bf4326b4863 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -118,7 +118,7 @@ type MCPConfig struct {
type LSPConfig struct {
Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
- Command string `json:"command" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
+ Command string `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
diff --git a/internal/config/load.go b/internal/config/load.go
index 500fc236834ae6bfd17811ad64692d2341b9f8e1..59cef29e24d94d4d74d6be39953133b2e91efdf0 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -351,10 +351,13 @@ func (c *Config) applyLSPDefaults() {
// Apply defaults to each LSP configuration
for name, cfg := range c.LSP {
- // Try to get defaults from powernap based on command name
+ // Try to get defaults from powernap based on name or command name.
base, ok := configManager.GetServer(name)
if !ok {
- continue
+ base, ok = configManager.GetServer(cfg.Command)
+ if !ok {
+ continue
+ }
}
if cfg.Options == nil {
cfg.Options = base.Settings
@@ -368,6 +371,12 @@ func (c *Config) applyLSPDefaults() {
if len(cfg.RootMarkers) == 0 {
cfg.RootMarkers = base.RootMarkers
}
+ if len(cfg.Args) == 0 {
+ cfg.Args = base.Args
+ }
+ if len(cfg.Env) == 0 {
+ cfg.Env = base.Environment
+ }
// Update the config in the map
c.LSP[name] = cfg
}
From 2fd3b4360e011a2c5c8d81181317b85d43a21b40 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 12:12:24 -0300
Subject: [PATCH 136/236] fix(mcp): improve timeout errors (#1108)
* fix(mcp): improve timeout errors
refs #970
Signed-off-by: Carlos Alexandro Becker
* fix: improve
Signed-off-by: Carlos Alexandro Becker
---------
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/mcp-tools.go | 25 +++++++++++++++++++------
1 file changed, 19 insertions(+), 6 deletions(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 67d46b54b637af8d3dacef8149d34202157a9565..1043ca3b9820e72096a0aafe7cdb7868c8d29720 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -152,13 +152,14 @@ func getOrRenewClient(ctx context.Context, name string) (*client.Client, error)
m := config.Get().MCP[name]
state, _ := mcpStates.Get(name)
- pingCtx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
+ timeout := mcpTimeout(m)
+ pingCtx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
err := c.Ping(pingCtx)
if err == nil {
return c, nil
}
- updateMCPState(name, MCPStateError, err, nil, state.ToolCount)
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, state.ToolCount)
c, err = createAndInitializeClient(ctx, name, m)
if err != nil {
@@ -334,17 +335,22 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
slog.Error("error creating mcp client", "error", err, "name", name)
return nil, err
}
+
+ timeout := mcpTimeout(m)
+ initCtx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
// Only call Start() for non-stdio clients, as stdio clients auto-start
if m.Type != config.MCPStdio {
- if err := c.Start(ctx); err != nil {
- updateMCPState(name, MCPStateError, err, nil, 0)
+ if err := c.Start(initCtx); err != nil {
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error starting mcp client", "error", err, "name", name)
_ = c.Close()
return nil, err
}
}
- if _, err := c.Initialize(ctx, mcpInitRequest); err != nil {
- updateMCPState(name, MCPStateError, err, nil, 0)
+ if _, err := c.Initialize(initCtx, mcpInitRequest); err != nil {
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error initializing mcp client", "error", err, "name", name)
_ = c.Close()
return nil, err
@@ -354,6 +360,13 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
return c, nil
}
+func maybeTimeoutErr(err error, timeout time.Duration) error {
+ if errors.Is(err, context.DeadlineExceeded) {
+ return fmt.Errorf("timed out after %s", timeout)
+ }
+ return err
+}
+
func createMcpClient(name string, m config.MCPConfig) (*client.Client, error) {
switch m.Type {
case config.MCPStdio:
From 4926722bff84205322d2a27663b0da45c2462794 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 13:56:17 -0300
Subject: [PATCH 137/236] fix(lsp): command
Signed-off-by: Carlos Alexandro Becker
---
internal/config/load.go | 3 +++
1 file changed, 3 insertions(+)
diff --git a/internal/config/load.go b/internal/config/load.go
index 59cef29e24d94d4d74d6be39953133b2e91efdf0..16cb531ddfa9d452ed55dd82914c6d77f7650f0d 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -371,6 +371,9 @@ func (c *Config) applyLSPDefaults() {
if len(cfg.RootMarkers) == 0 {
cfg.RootMarkers = base.RootMarkers
}
+ if cfg.Command == "" {
+ cfg.Command = base.Command
+ }
if len(cfg.Args) == 0 {
cfg.Args = base.Args
}
From 4dd2b17f1116ed0481444bff894f63e80676e973 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 14:00:26 -0300
Subject: [PATCH 138/236] fix(lsp): improve error messages
Signed-off-by: Carlos Alexandro Becker
---
internal/lsp/client.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index aedf2476918fd5394c4a876bf7cd5ec177348905..226d6c6f3896e29dcbc75c04bee23a34bdc85952 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -77,7 +77,7 @@ func New(ctx context.Context, name string, config config.LSPConfig) (*Client, er
// Create the powernap client
powernapClient, err := powernap.NewClient(clientConfig)
if err != nil {
- return nil, fmt.Errorf("failed to create powernap client: %w", err)
+ return nil, fmt.Errorf("failed to create lsp client: %w", err)
}
client := &Client{
@@ -98,7 +98,7 @@ func New(ctx context.Context, name string, config config.LSPConfig) (*Client, er
// Initialize initializes the LSP client and returns the server capabilities.
func (c *Client) Initialize(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
if err := c.client.Initialize(ctx, false); err != nil {
- return nil, fmt.Errorf("failed to initialize powernap client: %w", err)
+ return nil, fmt.Errorf("failed to initialize the lsp client: %w", err)
}
// Convert powernap capabilities to protocol capabilities
From ab96589e7ed71411582ae2b970ddadd1bb9d3a83 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 23 Sep 2025 16:11:38 -0300
Subject: [PATCH 139/236] fix: disable providers (#1087)
* fix: disable providers
if we remove then from the list, they'll still show up because they
won't get merged with catwalk providers later on.
closes #1037
Signed-off-by: Carlos Alexandro Becker
---
internal/config/load.go | 5 -----
internal/config/load_test.go | 8 ++++----
2 files changed, 4 insertions(+), 9 deletions(-)
diff --git a/internal/config/load.go b/internal/config/load.go
index 16cb531ddfa9d452ed55dd82914c6d77f7650f0d..ad2b75b75df8c6f8d7a5cd2e62df1a831157b9e1 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -126,11 +126,6 @@ func (c *Config) configureProviders(env env.Env, resolver VariableResolver, know
config, configExists := c.Providers.Get(string(p.ID))
// if the user configured a known provider we need to allow it to override a couple of parameters
if configExists {
- if config.Disable {
- slog.Debug("Skipping provider due to disable flag", "provider", p.ID)
- c.Providers.Del(string(p.ID))
- continue
- }
if config.BaseURL != "" {
p.APIEndpoint = config.BaseURL
}
diff --git a/internal/config/load_test.go b/internal/config/load_test.go
index 90276c96ad113f453ed699c8deeb30b4f5fef9d5..756f849db426e226c197879740f0dc47d3048dd9 100644
--- a/internal/config/load_test.go
+++ b/internal/config/load_test.go
@@ -543,10 +543,10 @@ func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
err := cfg.configureProviders(env, resolver, knownProviders)
require.NoError(t, err)
- // Provider should be removed from config when disabled
- require.Equal(t, cfg.Providers.Len(), 0)
- _, exists := cfg.Providers.Get("openai")
- require.False(t, exists)
+ require.Equal(t, cfg.Providers.Len(), 1)
+ prov, exists := cfg.Providers.Get("openai")
+ require.True(t, exists)
+ require.True(t, prov.Disable)
}
func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
From 3b6a37597433a631bcbed50c11e102b6dfeffa96 Mon Sep 17 00:00:00 2001
From: Max Justus Spransy
Date: Wed, 27 Aug 2025 12:26:19 -0700
Subject: [PATCH 140/236] feat: add alt/option+esc binding to current esc key
behavior
This mimics the behavior of Claude Code and allows folks who use
Crush from within a terminal emulator that captures the escape key
(like Nvim's built in terminal emulator) to use it.
---
internal/tui/components/chat/editor/editor.go | 2 +-
internal/tui/components/chat/editor/keys.go | 2 +-
internal/tui/components/chat/messages/messages.go | 2 +-
internal/tui/components/chat/splash/keys.go | 2 +-
internal/tui/components/completions/keys.go | 2 +-
internal/tui/components/dialogs/commands/keys.go | 2 +-
internal/tui/components/dialogs/compact/keys.go | 2 +-
internal/tui/components/dialogs/filepicker/keys.go | 2 +-
internal/tui/components/dialogs/keys.go | 2 +-
internal/tui/components/dialogs/models/keys.go | 2 +-
internal/tui/components/dialogs/quit/keys.go | 2 +-
internal/tui/components/dialogs/sessions/keys.go | 2 +-
internal/tui/page/chat/chat.go | 14 +++++++-------
internal/tui/page/chat/keys.go | 2 +-
14 files changed, 20 insertions(+), 20 deletions(-)
diff --git a/internal/tui/components/chat/editor/editor.go b/internal/tui/components/chat/editor/editor.go
index 04fb5ed1976c7cf7ba4af372dd16ecef48ceb82f..86390611f6115fc14def1e8a7713b252b0d6a59d 100644
--- a/internal/tui/components/chat/editor/editor.go
+++ b/internal/tui/components/chat/editor/editor.go
@@ -75,7 +75,7 @@ var DeleteKeyMaps = DeleteAttachmentKeyMaps{
key.WithHelp("ctrl+r+{i}", "delete attachment at index i"),
),
Escape: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
DeleteAllAttachments: key.NewBinding(
diff --git a/internal/tui/components/chat/editor/keys.go b/internal/tui/components/chat/editor/keys.go
index 9d2274753b4667031bb43a76f54fce18c1decf51..8bc8b2354dfb72120d9e6173256635e903d012fd 100644
--- a/internal/tui/components/chat/editor/keys.go
+++ b/internal/tui/components/chat/editor/keys.go
@@ -61,7 +61,7 @@ var AttachmentsKeyMaps = DeleteAttachmentKeyMaps{
key.WithHelp("ctrl+r+{i}", "delete attachment at index i"),
),
Escape: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
DeleteAllAttachments: key.NewBinding(
diff --git a/internal/tui/components/chat/messages/messages.go b/internal/tui/components/chat/messages/messages.go
index 5cc15d0303fb152f299aef9a2cdc596b9ffb57d4..296b02478a7d0738fef2f60ae6b2211d44424a2f 100644
--- a/internal/tui/components/chat/messages/messages.go
+++ b/internal/tui/components/chat/messages/messages.go
@@ -29,7 +29,7 @@ import (
var CopyKey = key.NewBinding(key.WithKeys("c", "y", "C", "Y"), key.WithHelp("c/y", "copy"))
// ClearSelectionKey is the key binding for clearing the current selection in the chat interface.
-var ClearSelectionKey = key.NewBinding(key.WithKeys("esc"), key.WithHelp("esc", "clear selection"))
+var ClearSelectionKey = key.NewBinding(key.WithKeys("esc", "alt+esc"), key.WithHelp("esc", "clear selection"))
// MessageCmp defines the interface for message components in the chat interface.
// It combines standard UI model interfaces with message-specific functionality.
diff --git a/internal/tui/components/chat/splash/keys.go b/internal/tui/components/chat/splash/keys.go
index 675c608a94af4aa72b701376f3983506166ac7d7..d36c8d8e7ee2231ef8bc27eb053a5745a0bd3885 100644
--- a/internal/tui/components/chat/splash/keys.go
+++ b/internal/tui/components/chat/splash/keys.go
@@ -46,7 +46,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("←/→", "switch"),
),
Back: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "back"),
),
}
diff --git a/internal/tui/components/completions/keys.go b/internal/tui/components/completions/keys.go
index 82372358028aec2b1384f1b4b6bff90be4a05eb8..dec1059f8cde34b7a65faad279ebe551a2108a3a 100644
--- a/internal/tui/components/completions/keys.go
+++ b/internal/tui/components/completions/keys.go
@@ -28,7 +28,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("enter", "select"),
),
Cancel: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
DownInsert: key.NewBinding(
diff --git a/internal/tui/components/dialogs/commands/keys.go b/internal/tui/components/dialogs/commands/keys.go
index 9685216817c02cdfaab682f94e0f89aa64af365f..7b79a29c28a024154a3b4d8c763969585409fd00 100644
--- a/internal/tui/components/dialogs/commands/keys.go
+++ b/internal/tui/components/dialogs/commands/keys.go
@@ -31,7 +31,7 @@ func DefaultCommandsDialogKeyMap() CommandsDialogKeyMap {
key.WithHelp("tab", "switch selection"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/components/dialogs/compact/keys.go b/internal/tui/components/dialogs/compact/keys.go
index c3dd98e13035085b7d46e7a2e94450b25a7f0d59..cec1486491e342c28f148a50d37f1129944c002e 100644
--- a/internal/tui/components/dialogs/compact/keys.go
+++ b/internal/tui/components/dialogs/compact/keys.go
@@ -33,7 +33,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("n", "no"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/components/dialogs/filepicker/keys.go b/internal/tui/components/dialogs/filepicker/keys.go
index 9f3b706e3cf677b66cbc3136a7b98a466470d949..72e32f2ab9dd07d8b7165aee74744e8be5fd78e8 100644
--- a/internal/tui/components/dialogs/filepicker/keys.go
+++ b/internal/tui/components/dialogs/filepicker/keys.go
@@ -38,7 +38,7 @@ func DefaultKeyMap() KeyMap {
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "close/exit"),
),
}
diff --git a/internal/tui/components/dialogs/keys.go b/internal/tui/components/dialogs/keys.go
index c382b7e09e15de04efb5b2520bc490ef9d57b985..264ce3d42f6a99f441f961128f109e6baebf4c1b 100644
--- a/internal/tui/components/dialogs/keys.go
+++ b/internal/tui/components/dialogs/keys.go
@@ -12,7 +12,7 @@ type KeyMap struct {
func DefaultKeyMap() KeyMap {
return KeyMap{
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
),
}
}
diff --git a/internal/tui/components/dialogs/models/keys.go b/internal/tui/components/dialogs/models/keys.go
index df546863d87d3a68777e51938f58eee28a5c6473..ef4a6228b839c43a3862e251999dadf81dd6403f 100644
--- a/internal/tui/components/dialogs/models/keys.go
+++ b/internal/tui/components/dialogs/models/keys.go
@@ -34,7 +34,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("tab", "toggle type"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/components/dialogs/quit/keys.go b/internal/tui/components/dialogs/quit/keys.go
index 3268749b20c703ae1faf7640e253ce557f051c65..2e8dbc199264eb9221544319f81ef859d71e58b5 100644
--- a/internal/tui/components/dialogs/quit/keys.go
+++ b/internal/tui/components/dialogs/quit/keys.go
@@ -37,7 +37,7 @@ func DefaultKeymap() KeyMap {
key.WithHelp("tab", "switch options"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/components/dialogs/sessions/keys.go b/internal/tui/components/dialogs/sessions/keys.go
index a3ca4b31f0c04c491fa7990f7e69ac546f608a7d..bc7ec1ba9f83915caee9189504abf0b07bd4a24b 100644
--- a/internal/tui/components/dialogs/sessions/keys.go
+++ b/internal/tui/components/dialogs/sessions/keys.go
@@ -26,7 +26,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("↑", "previous item"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/page/chat/chat.go b/internal/tui/page/chat/chat.go
index 88523388e31824a65d7e9922b89a1886a5fbcc0d..2918925068cb2f012bead47bbf44260c6255288c 100644
--- a/internal/tui/page/chat/chat.go
+++ b/internal/tui/page/chat/chat.go
@@ -766,7 +766,7 @@ func (p *chatPage) Bindings() []key.Binding {
cancelBinding := p.keyMap.Cancel
if p.isCanceling {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "press again to cancel"),
)
}
@@ -835,7 +835,7 @@ func (p *chatPage) Help() help.KeyMap {
shortList = append(shortList,
// Go back
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "back"),
),
)
@@ -870,7 +870,7 @@ func (p *chatPage) Help() help.KeyMap {
key.WithHelp("tab/enter", "complete"),
),
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
key.NewBinding(
@@ -885,18 +885,18 @@ func (p *chatPage) Help() help.KeyMap {
}
if p.app.CoderAgent != nil && p.app.CoderAgent.IsBusy() {
cancelBinding := key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
)
if p.isCanceling {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "press again to cancel"),
)
}
if p.app.CoderAgent != nil && p.app.CoderAgent.QueuedPrompts(p.session.ID) > 0 {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "clear queue"),
)
}
@@ -1042,7 +1042,7 @@ func (p *chatPage) Help() help.KeyMap {
key.WithHelp("ctrl+r+r", "delete all attachments"),
),
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
})
diff --git a/internal/tui/page/chat/keys.go b/internal/tui/page/chat/keys.go
index ef896aaab10fe36ee8ce88d3f70a3f03e3c61d3e..679a97c69522c0e831e59bddc7b0c1ddcc55fbb9 100644
--- a/internal/tui/page/chat/keys.go
+++ b/internal/tui/page/chat/keys.go
@@ -23,7 +23,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("ctrl+f", "add attachment"),
),
Cancel: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
Tab: key.NewBinding(
From 6b161d29e9832fe1abafea231d7947e03d76f574 Mon Sep 17 00:00:00 2001
From: kslamph <15257433+kslamph@users.noreply.github.com>
Date: Wed, 24 Sep 2025 21:12:37 +0800
Subject: [PATCH 141/236] feat(config): allow custom providers of type gemini
(#585)
This change extends the provider configuration to allow users to define custom providers with `type: "gemini"`.
This enables connecting to any Gemini-compatible API by specifying its `base_url` and `api_key` within the `providers` section of `crush.json`. It supports complex setups, such as using a local proxy or a model-balancing service.
---
internal/config/load.go | 2 +-
internal/llm/provider/gemini.go | 3 +++
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/internal/config/load.go b/internal/config/load.go
index ad2b75b75df8c6f8d7a5cd2e62df1a831157b9e1..e39074f78bdb8df0ddc98bfbc7322541175b71d6 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -270,7 +270,7 @@ func (c *Config) configureProviders(env env.Env, resolver VariableResolver, know
c.Providers.Del(id)
continue
}
- if providerConfig.Type != catwalk.TypeOpenAI && providerConfig.Type != catwalk.TypeAnthropic {
+ if providerConfig.Type != catwalk.TypeOpenAI && providerConfig.Type != catwalk.TypeAnthropic && providerConfig.Type != catwalk.TypeGemini {
slog.Warn("Skipping custom provider because the provider type is not supported", "provider", id, "type", providerConfig.Type)
c.Providers.Del(id)
continue
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 256e21bf7d59216a41be4603c1475dc9e24bdeea..c1db9561e7db5fd3ae8da1ae1c9ea143f5ea20ec 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -43,6 +43,9 @@ func createGeminiClient(opts providerClientOptions) (*genai.Client, error) {
cc := &genai.ClientConfig{
APIKey: opts.apiKey,
Backend: genai.BackendGeminiAPI,
+ HTTPOptions: genai.HTTPOptions{
+ BaseURL: opts.baseURL,
+ },
}
if config.Get().Options.Debug {
cc.HTTPClient = log.NewHTTPClient()
From 9654218e61ff58326e4524f34086822e6d7c2864 Mon Sep 17 00:00:00 2001
From: Amolith
Date: Sun, 21 Sep 2025 16:17:50 -0600
Subject: [PATCH 142/236] feat(permissions): pretty-print MCP JSON
Add pretty-printed JSON parameters to MCP tool permission dialogs so
users can read more of the content.
Co-Authored-By: Crush
---
internal/llm/agent/mcp-tools.go | 2 +-
.../dialogs/permissions/permissions.go | 30 +++++++++++++++++++
2 files changed, 31 insertions(+), 1 deletion(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 1043ca3b9820e72096a0aafe7cdb7868c8d29720..4a4435dccbdb48ea6d2d64bf7af9f257e8e3730b 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -176,7 +176,7 @@ func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
if sessionID == "" || messageID == "" {
return tools.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
- permissionDescription := fmt.Sprintf("execute %s with the following parameters: %s", b.Info().Name, params.Input)
+ permissionDescription := fmt.Sprintf("execute %s with the following parameters:", b.Info().Name)
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
diff --git a/internal/tui/components/dialogs/permissions/permissions.go b/internal/tui/components/dialogs/permissions/permissions.go
index 2633c0a2f1a50f78adf010214680c157f302073b..9e0a6b05d7385c354f8faba3110b1c0951f9a97d 100644
--- a/internal/tui/components/dialogs/permissions/permissions.go
+++ b/internal/tui/components/dialogs/permissions/permissions.go
@@ -1,6 +1,7 @@
package permissions
import (
+ "encoding/json"
"fmt"
"strings"
@@ -614,6 +615,35 @@ func (p *permissionDialogCmp) generateDefaultContent() string {
content := p.permission.Description
+ // Add pretty-printed JSON parameters for MCP tools
+ if p.permission.Params != nil {
+ var paramStr string
+
+ // Ensure params is a string
+ if str, ok := p.permission.Params.(string); ok {
+ paramStr = str
+ } else {
+ paramStr = fmt.Sprintf("%v", p.permission.Params)
+ }
+
+ // Try to parse as JSON for pretty printing
+ var parsed any
+ if err := json.Unmarshal([]byte(paramStr), &parsed); err == nil {
+ if b, err := json.MarshalIndent(parsed, "", " "); err == nil {
+ if content != "" {
+ content += "\n\n"
+ }
+ content += string(b)
+ }
+ } else {
+ // Not JSON, show as-is
+ if content != "" {
+ content += "\n\n"
+ }
+ content += paramStr
+ }
+ }
+
content = strings.TrimSpace(content)
content = "\n" + content + "\n"
lines := strings.Split(content, "\n")
From ef6a32453abdf3b943ef9f56800a7ec9c0135c46 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Tue, 23 Sep 2025 11:23:57 -0300
Subject: [PATCH 143/236] chore: add metrics and error tracking
---
README.md | 34 +++++-
go.mod | 3 +
go.sum | 6 +
internal/cmd/root.go | 25 ++++
internal/config/config.go | 1 +
internal/event/all.go | 59 +++++++++
internal/event/event.go | 114 ++++++++++++++++++
internal/event/logger.go | 27 +++++
internal/llm/agent/agent.go | 40 +++---
internal/llm/agent/errors.go | 15 +++
internal/llm/agent/event.go | 53 ++++++++
internal/log/log.go | 3 +
internal/session/session.go | 3 +
.../components/dialogs/sessions/sessions.go | 2 +
internal/tui/tui.go | 4 +
main.go | 2 +
schema.json | 5 +
17 files changed, 379 insertions(+), 17 deletions(-)
create mode 100644 internal/event/all.go
create mode 100644 internal/event/event.go
create mode 100644 internal/event/logger.go
create mode 100644 internal/llm/agent/errors.go
create mode 100644 internal/llm/agent/event.go
diff --git a/README.md b/README.md
index a2b07093c1e45162018614411ba7a4300f9ef680..7f28c5c049cdb6c45bc83ec59f94f4310c13b7c5 100644
--- a/README.md
+++ b/README.md
@@ -545,7 +545,7 @@ config:
}
```
-## Disabling Provider Auto-Updates
+## Provider Auto-Updates
By default, Crush automatically checks for the latest and greatest list of
providers and models from [Catwalk](https://github.com/charmbracelet/catwalk),
@@ -553,6 +553,8 @@ the open source Crush provider database. This means that when new providers and
models are available, or when model metadata changes, Crush automatically
updates your local configuration.
+### Disabling automatic provider updates
+
For those with restricted internet access, or those who prefer to work in
air-gapped environments, this might not be want you want, and this feature can
be disabled.
@@ -597,6 +599,36 @@ crush update-providers embedded
crush update-providers --help
```
+## Metrics
+
+Crush records pseudonymous usage metrics (tied to a device-specific hash),
+which maintainers rely on to inform development and support priorities. The
+metrics include solely usage metadata; prompts and responses are NEVER
+collected.
+
+Details on exactly what’s collected are in the source code ([here](https://github.com/charmbracelet/crush/tree/main/internal/event)
+and [here](https://github.com/charmbracelet/crush/blob/main/internal/llm/agent/event.go)).
+
+You can opt out of metrics collection at any time by setting the environment
+variable by setting the following in your environment:
+
+```bash
+export CRUSH_DISABLE_METRICS=1
+```
+
+Or by setting the following in your config:
+
+```json
+{
+ "options": {
+ "disable_metrics": true
+ }
+}
+```
+
+Crush also respects the [`DO_NOT_TRACK`](https://consoledonottrack.com)
+convention which can be enabled via `export DO_NOT_TRACK=1`.
+
## A Note on Claude Max and GitHub Copilot
Crush only supports model providers through official, compliant APIs. We do not
diff --git a/go.mod b/go.mod
index b59f4d649ad9c635d5dc8d583e993208b06547a6..ea62993931c7532b55127f54b35bab0be2eb23a7 100644
--- a/go.mod
+++ b/go.mod
@@ -81,6 +81,7 @@ require (
github.com/charmbracelet/x/termios v0.1.1 // indirect
github.com/charmbracelet/x/windows v0.2.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/denisbrodbeck/machineid v1.0.1
github.com/disintegration/gift v1.1.2 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
@@ -95,6 +96,7 @@ require (
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
+ github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.0.9 // indirect
@@ -114,6 +116,7 @@ require (
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/posthog/posthog-go v1.6.10
github.com/rivo/uniseg v0.4.7
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
diff --git a/go.sum b/go.sum
index a921201c472e338f3c068503d9404d68a7bcba12..fedfeb243142a92fe79d7d6b474ca921dbc952bd 100644
--- a/go.sum
+++ b/go.sum
@@ -118,6 +118,8 @@ github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfv
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=
+github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/disintegration/gift v1.1.2 h1:9ZyHJr+kPamiH10FX3Pynt1AxFUob812bU9Wt4GMzhs=
github.com/disintegration/gift v1.1.2/go.mod h1:Jh2i7f7Q2BM7Ezno3PhfezbR1xpUg9dUg3/RlKGr4HI=
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec h1:YrB6aVr9touOt75I9O1SiancmR2GMg45U9UYf0gtgWg=
@@ -162,6 +164,8 @@ github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
+github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@@ -233,6 +237,8 @@ github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjL
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/posthog/posthog-go v1.6.10 h1:OA6bkiUg89rI7f5cSXbcrH5+wLinyS6hHplnD92Pu/M=
+github.com/posthog/posthog-go v1.6.10/go.mod h1:LcC1Nu4AgvV22EndTtrMXTy+7RGVC0MhChSw7Qk5XkY=
github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjGng=
github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/qjebbs/go-jsons v1.0.0-alpha.4 h1:Qsb4ohRUHQODIUAsJKdKJ/SIDbsO7oGOzsfy+h1yQZs=
diff --git a/internal/cmd/root.go b/internal/cmd/root.go
index 3ecb23e5acd68c1666cf9798b17bcc408b9290e1..ea9c218b67c65815b6bcc2c8b1cb17fd02390b39 100644
--- a/internal/cmd/root.go
+++ b/internal/cmd/root.go
@@ -7,11 +7,13 @@ import (
"log/slog"
"os"
"path/filepath"
+ "strconv"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/db"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/tui"
"github.com/charmbracelet/crush/internal/version"
"github.com/charmbracelet/fang"
@@ -66,6 +68,8 @@ crush -y
}
defer app.Shutdown()
+ event.AppInitialized()
+
// Set up the TUI.
program := tea.NewProgram(
tui.New(app),
@@ -78,11 +82,15 @@ crush -y
go app.Subscribe(program)
if _, err := program.Run(); err != nil {
+ event.Error(err)
slog.Error("TUI run error", "error", err)
return fmt.Errorf("TUI error: %v", err)
}
return nil
},
+ PostRun: func(cmd *cobra.Command, args []string) {
+ event.AppExited()
+ },
}
func Execute() {
@@ -135,9 +143,26 @@ func setupApp(cmd *cobra.Command) (*app.App, error) {
return nil, err
}
+ if shouldEnableMetrics() {
+ event.Init()
+ }
+
return appInstance, nil
}
+func shouldEnableMetrics() bool {
+ if v, _ := strconv.ParseBool(os.Getenv("CRUSH_DISABLE_METRICS")); v {
+ return false
+ }
+ if v, _ := strconv.ParseBool(os.Getenv("DO_NOT_TRACK")); v {
+ return false
+ }
+ if config.Get().Options.DisableMetrics {
+ return false
+ }
+ return true
+}
+
func MaybePrependStdin(prompt string) (string, error) {
if term.IsTerminal(os.Stdin.Fd()) {
return prompt, nil
diff --git a/internal/config/config.go b/internal/config/config.go
index 8e4b8e5437e31af351b14b7330ab1bf4326b4863..3578850d228b78503e67e630a9b688c575403b9c 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -153,6 +153,7 @@ type Options struct {
DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
+ DisableMetrics bool `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
}
type MCPs map[string]MCPConfig
diff --git a/internal/event/all.go b/internal/event/all.go
new file mode 100644
index 0000000000000000000000000000000000000000..8caf98e62ff3f39b291e341959ebc943361eec05
--- /dev/null
+++ b/internal/event/all.go
@@ -0,0 +1,59 @@
+package event
+
+import (
+ "time"
+)
+
+var appStartTime time.Time
+
+func AppInitialized() {
+ appStartTime = time.Now()
+ send("app initialized")
+}
+
+func AppExited() {
+ duration := time.Since(appStartTime).Truncate(time.Second)
+ send(
+ "app exited",
+ "app duration pretty", duration.String(),
+ "app duration in seconds", int64(duration.Seconds()),
+ )
+ Flush()
+}
+
+func SessionCreated() {
+ send("session created")
+}
+
+func SessionDeleted() {
+ send("session deleted")
+}
+
+func SessionSwitched() {
+ send("session switched")
+}
+
+func FilePickerOpened() {
+ send("filepicker opened")
+}
+
+func PromptSent(props ...any) {
+ send(
+ "prompt sent",
+ props...,
+ )
+}
+
+func PromptResponded(props ...any) {
+ send(
+ "prompt responded",
+ props...,
+ )
+}
+
+func TokensUsed(props ...any) {
+ send(
+ "tokens used",
+ props...,
+ )
+}
diff --git a/internal/event/event.go b/internal/event/event.go
new file mode 100644
index 0000000000000000000000000000000000000000..89a02411eefbbdefc94e784abbca7e3cd638027d
--- /dev/null
+++ b/internal/event/event.go
@@ -0,0 +1,114 @@
+package event
+
+import (
+ "fmt"
+ "log/slog"
+ "os"
+ "reflect"
+ "runtime"
+
+ "github.com/charmbracelet/crush/internal/version"
+ "github.com/denisbrodbeck/machineid"
+ "github.com/posthog/posthog-go"
+)
+
+const (
+ endpoint = "https://data.charm.land"
+ key = "phc_4zt4VgDWLqbYnJYEwLRxFoaTL2noNrQij0C6E8k3I0V"
+)
+
+var (
+ client posthog.Client
+
+ baseProps = posthog.NewProperties().
+ Set("GOOS", runtime.GOOS).
+ Set("GOARCH", runtime.GOARCH).
+ Set("TERM", os.Getenv("TERM")).
+ Set("SHELL", os.Getenv("SHELL")).
+ Set("Version", version.Version).
+ Set("GoVersion", runtime.Version())
+)
+
+func Init() {
+ c, err := posthog.NewWithConfig(key, posthog.Config{
+ Endpoint: endpoint,
+ Logger: logger{},
+ })
+ if err != nil {
+ slog.Error("Failed to initialize PostHog client", "error", err)
+ }
+ client = c
+}
+
+// send logs an event to PostHog with the given event name and properties.
+func send(event string, props ...any) {
+ if client == nil {
+ return
+ }
+ err := client.Enqueue(posthog.Capture{
+ DistinctId: distinctId(),
+ Event: event,
+ Properties: pairsToProps(props...).Merge(baseProps),
+ })
+ if err != nil {
+ slog.Error("Failed to enqueue PostHog event", "event", event, "props", props, "error", err)
+ return
+ }
+}
+
+// Error logs an error event to PostHog with the error type and message.
+func Error(err any, props ...any) {
+ if client == nil {
+ return
+ }
+ // The PostHog Go client does not yet support sending exceptions.
+ // We're mimicking the behavior by sending the minimal info required
+ // for PostHog to recognize this as an exception event.
+ props = append(
+ []any{
+ "$exception_list",
+ []map[string]string{
+ {"type": reflect.TypeOf(err).String(), "value": fmt.Sprintf("%v", err)},
+ },
+ },
+ props...,
+ )
+ send("$exception", props...)
+}
+
+func Flush() {
+ if client == nil {
+ return
+ }
+ if err := client.Close(); err != nil {
+ slog.Error("Failed to flush PostHog events", "error", err)
+ }
+}
+
+func pairsToProps(props ...any) posthog.Properties {
+ p := posthog.NewProperties()
+
+ if !isEven(len(props)) {
+ slog.Error("Event properties must be provided as key-value pairs", "props", props)
+ return p
+ }
+
+ for i := 0; i < len(props); i += 2 {
+ key := props[i].(string)
+ value := props[i+1]
+ p = p.Set(key, value)
+ }
+ return p
+}
+
+func isEven(n int) bool {
+ return n%2 == 0
+}
+
+func distinctId() string {
+ id, err := machineid.ProtectedID("charm")
+ if err != nil {
+ return "crush-cli"
+ }
+ return id
+}
diff --git a/internal/event/logger.go b/internal/event/logger.go
new file mode 100644
index 0000000000000000000000000000000000000000..7648ae2c2cca91ed20535c0d65a677cd4db84500
--- /dev/null
+++ b/internal/event/logger.go
@@ -0,0 +1,27 @@
+package event
+
+import (
+ "log/slog"
+
+ "github.com/posthog/posthog-go"
+)
+
+var _ posthog.Logger = logger{}
+
+type logger struct{}
+
+func (logger) Debugf(format string, args ...any) {
+ slog.Debug(format, args...)
+}
+
+func (logger) Logf(format string, args ...any) {
+ slog.Info(format, args...)
+}
+
+func (logger) Warnf(format string, args ...any) {
+ slog.Warn(format, args...)
+}
+
+func (logger) Errorf(format string, args ...any) {
+ slog.Error(format, args...)
+}
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index ec48fc2956ac5ed3baa031ba2ed4b2f905b65ae0..74b1cb74659238de917c823872698f1b2ed31332 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -12,6 +12,7 @@ import (
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/history"
"github.com/charmbracelet/crush/internal/llm/prompt"
"github.com/charmbracelet/crush/internal/llm/provider"
@@ -25,12 +26,6 @@ import (
"github.com/charmbracelet/crush/internal/shell"
)
-// Common errors
-var (
- ErrRequestCancelled = errors.New("request canceled by user")
- ErrSessionBusy = errors.New("session is currently processing another request")
-)
-
type AgentEventType string
const (
@@ -66,10 +61,11 @@ type Service interface {
type agent struct {
*pubsub.Broker[AgentEvent]
- agentCfg config.Agent
- sessions session.Service
- messages message.Service
- mcpTools []McpTool
+ agentCfg config.Agent
+ sessions session.Service
+ messages message.Service
+ permissions permission.Service
+ mcpTools []McpTool
tools *csync.LazySlice[tools.BaseTool]
// We need this to be able to update it when model changes
@@ -237,6 +233,7 @@ func NewAgent(
activeRequests: csync.NewMap[string, context.CancelFunc](),
tools: csync.NewLazySlice(toolFn),
promptQueue: csync.NewMap[string, []string](),
+ permissions: permissions,
}, nil
}
@@ -365,8 +362,9 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
}
genCtx, cancel := context.WithCancel(ctx)
-
a.activeRequests.Set(sessionID, cancel)
+ startTime := time.Now()
+
go func() {
slog.Debug("Request started", "sessionID", sessionID)
defer log.RecoverPanic("agent.Run", func() {
@@ -377,16 +375,24 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
}
result := a.processGeneration(genCtx, sessionID, content, attachmentParts)
- if result.Error != nil && !errors.Is(result.Error, ErrRequestCancelled) && !errors.Is(result.Error, context.Canceled) {
- slog.Error(result.Error.Error())
+ if result.Error != nil {
+ if isCancelledErr(result.Error) {
+ slog.Error("Request canceled", "sessionID", sessionID)
+ } else {
+ slog.Error("Request errored", "sessionID", sessionID, "error", result.Error.Error())
+ event.Error(result.Error)
+ }
+ } else {
+ slog.Debug("Request completed", "sessionID", sessionID)
}
- slog.Debug("Request completed", "sessionID", sessionID)
+ a.eventPromptResponded(sessionID, time.Since(startTime).Truncate(time.Second))
a.activeRequests.Del(sessionID)
cancel()
a.Publish(pubsub.CreatedEvent, result)
events <- result
close(events)
}()
+ a.eventPromptSent(sessionID)
return events, nil
}
@@ -726,13 +732,13 @@ func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg
if err := a.messages.Update(ctx, *assistantMsg); err != nil {
return fmt.Errorf("failed to update message: %w", err)
}
- return a.TrackUsage(ctx, sessionID, a.Model(), event.Response.Usage)
+ return a.trackUsage(ctx, sessionID, a.Model(), event.Response.Usage)
}
return nil
}
-func (a *agent) TrackUsage(ctx context.Context, sessionID string, model catwalk.Model, usage provider.TokenUsage) error {
+func (a *agent) trackUsage(ctx context.Context, sessionID string, model catwalk.Model, usage provider.TokenUsage) error {
sess, err := a.sessions.Get(ctx, sessionID)
if err != nil {
return fmt.Errorf("failed to get session: %w", err)
@@ -743,6 +749,8 @@ func (a *agent) TrackUsage(ctx context.Context, sessionID string, model catwalk.
model.CostPer1MIn/1e6*float64(usage.InputTokens) +
model.CostPer1MOut/1e6*float64(usage.OutputTokens)
+ a.eventTokensUsed(sessionID, usage, cost)
+
sess.Cost += cost
sess.CompletionTokens = usage.OutputTokens + usage.CacheReadTokens
sess.PromptTokens = usage.InputTokens + usage.CacheCreationTokens
diff --git a/internal/llm/agent/errors.go b/internal/llm/agent/errors.go
new file mode 100644
index 0000000000000000000000000000000000000000..0e2f983d64b42b93ad3a51f32ce0335b0374a613
--- /dev/null
+++ b/internal/llm/agent/errors.go
@@ -0,0 +1,15 @@
+package agent
+
+import (
+ "context"
+ "errors"
+)
+
+var (
+ ErrRequestCancelled = errors.New("request canceled by user")
+ ErrSessionBusy = errors.New("session is currently processing another request")
+)
+
+func isCancelledErr(err error) bool {
+ return errors.Is(err, context.Canceled) || errors.Is(err, ErrRequestCancelled)
+}
diff --git a/internal/llm/agent/event.go b/internal/llm/agent/event.go
new file mode 100644
index 0000000000000000000000000000000000000000..8642d9990dc31689292abe9f2b39e685462f158e
--- /dev/null
+++ b/internal/llm/agent/event.go
@@ -0,0 +1,53 @@
+package agent
+
+import (
+ "time"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/event"
+ "github.com/charmbracelet/crush/internal/llm/provider"
+)
+
+func (a *agent) eventPromptSent(sessionID string) {
+ event.PromptSent(
+ a.eventCommon(sessionID)...,
+ )
+}
+
+func (a *agent) eventPromptResponded(sessionID string, duration time.Duration) {
+ event.PromptResponded(
+ append(
+ a.eventCommon(sessionID),
+ "prompt duration pretty", duration.String(),
+ "prompt duration in seconds", int64(duration.Seconds()),
+ )...,
+ )
+}
+
+func (a *agent) eventTokensUsed(sessionID string, usage provider.TokenUsage, cost float64) {
+ event.TokensUsed(
+ append(
+ a.eventCommon(sessionID),
+ "input tokens", usage.InputTokens,
+ "output tokens", usage.OutputTokens,
+ "cache read tokens", usage.CacheReadTokens,
+ "cache creation tokens", usage.CacheCreationTokens,
+ "total tokens", usage.InputTokens+usage.OutputTokens+usage.CacheReadTokens+usage.CacheCreationTokens,
+ "cost", cost,
+ )...,
+ )
+}
+
+func (a *agent) eventCommon(sessionID string) []any {
+ cfg := config.Get()
+ currentModel := cfg.Models[cfg.Agents["coder"].Model]
+
+ return []any{
+ "session id", sessionID,
+ "provider", currentModel.Provider,
+ "model", currentModel.Model,
+ "reasoning effort", currentModel.ReasoningEffort,
+ "thinking mode", currentModel.Think,
+ "yolo mode", a.permissions.SkipRequests(),
+ }
+}
diff --git a/internal/log/log.go b/internal/log/log.go
index bf99fe60fa9a5015029af171adfd6b3f9bf5596b..9463c3bd97956da3ab895b8600f79d1c05790844 100644
--- a/internal/log/log.go
+++ b/internal/log/log.go
@@ -9,6 +9,7 @@ import (
"sync/atomic"
"time"
+ "github.com/charmbracelet/crush/internal/event"
"gopkg.in/natefinch/lumberjack.v2"
)
@@ -48,6 +49,8 @@ func Initialized() bool {
func RecoverPanic(name string, cleanup func()) {
if r := recover(); r != nil {
+ event.Error(r, "panic", true, "name", name)
+
// Create a timestamped panic log file
timestamp := time.Now().Format("20060102-150405")
filename := fmt.Sprintf("crush-panic-%s-%s.log", name, timestamp)
diff --git a/internal/session/session.go b/internal/session/session.go
index d988dac3414fa7dd00d13b375e1309f8d6c515dd..f83f66ffa4d1cfb75c6a0d41f09caebcb1c64cf3 100644
--- a/internal/session/session.go
+++ b/internal/session/session.go
@@ -5,6 +5,7 @@ import (
"database/sql"
"github.com/charmbracelet/crush/internal/db"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/google/uuid"
)
@@ -48,6 +49,7 @@ func (s *service) Create(ctx context.Context, title string) (Session, error) {
}
session := s.fromDBItem(dbSession)
s.Publish(pubsub.CreatedEvent, session)
+ event.SessionCreated()
return session, nil
}
@@ -89,6 +91,7 @@ func (s *service) Delete(ctx context.Context, id string) error {
return err
}
s.Publish(pubsub.DeletedEvent, session)
+ event.SessionDeleted()
return nil
}
diff --git a/internal/tui/components/dialogs/sessions/sessions.go b/internal/tui/components/dialogs/sessions/sessions.go
index 4e5cbdef7fdb42f4c667de7ac5bdd5066e7be4df..037eb5ebb727a24b8ab9bfda2e2c72943120e819 100644
--- a/internal/tui/components/dialogs/sessions/sessions.go
+++ b/internal/tui/components/dialogs/sessions/sessions.go
@@ -4,6 +4,7 @@ import (
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/components/chat"
"github.com/charmbracelet/crush/internal/tui/components/core"
@@ -99,6 +100,7 @@ func (s *sessionDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
selectedItem := s.sessionsList.SelectedItem()
if selectedItem != nil {
selected := *selectedItem
+ event.SessionSwitched()
return s, tea.Sequence(
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(
diff --git a/internal/tui/tui.go b/internal/tui/tui.go
index 0986aca31dcd779ca6fe611e1d71eff8ad6908e9..2c935810b833af01c582866ec38d5f7b277bc203 100644
--- a/internal/tui/tui.go
+++ b/internal/tui/tui.go
@@ -10,6 +10,7 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/llm/agent"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/pubsub"
@@ -196,6 +197,7 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
if a.app.CoderAgent.IsBusy() {
return a, util.ReportWarn("Agent is busy, please wait...")
}
+
config.Get().UpdatePreferredModel(msg.ModelType, msg.Model)
// Update the agent with the new model/provider configuration
@@ -211,6 +213,8 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// File Picker
case commands.OpenFilePickerMsg:
+ event.FilePickerOpened()
+
if a.dialog.ActiveDialogID() == filepicker.FilePickerID {
// If the commands dialog is already open, close it
return a, util.CmdHandler(dialogs.CloseDialogMsg{})
diff --git a/main.go b/main.go
index 072e3b35d2a2f408d8ed6a09423712b324df8b96..49dbcd7d3c045ae1510d7ca2055fa480c6fadadf 100644
--- a/main.go
+++ b/main.go
@@ -10,11 +10,13 @@ import (
_ "github.com/joho/godotenv/autoload" // automatically load .env files
"github.com/charmbracelet/crush/internal/cmd"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/log"
)
func main() {
defer log.RecoverPanic("main", func() {
+ event.Flush()
slog.Error("Application terminated due to unhandled panic")
})
diff --git a/schema.json b/schema.json
index f0cb2053e188d918e4c49168080026de5f0bffe5..deb65846fe30ca689779e36745b9a429082c452b 100644
--- a/schema.json
+++ b/schema.json
@@ -320,6 +320,11 @@
"attribution": {
"$ref": "#/$defs/Attribution",
"description": "Attribution settings for generated content"
+ },
+ "disable_metrics": {
+ "type": "boolean",
+ "description": "Disable sending metrics",
+ "default": false
}
},
"additionalProperties": false,
From 2cc3fe25d95f22bdf336955fca2ec1299465e895 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 24 Sep 2025 13:49:42 -0300
Subject: [PATCH 145/236] fix: strip path from `$SHELL` (#1119)
This ensure we'll collect `zsh` instead of `/bin/zsh` or
`/opt/homebrew/bin/zsh`, for example.
---
internal/event/event.go | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/internal/event/event.go b/internal/event/event.go
index 89a02411eefbbdefc94e784abbca7e3cd638027d..42272c7035638fee7167b5c3510c7975cb9c9394 100644
--- a/internal/event/event.go
+++ b/internal/event/event.go
@@ -4,6 +4,7 @@ import (
"fmt"
"log/slog"
"os"
+ "path/filepath"
"reflect"
"runtime"
@@ -24,7 +25,7 @@ var (
Set("GOOS", runtime.GOOS).
Set("GOARCH", runtime.GOARCH).
Set("TERM", os.Getenv("TERM")).
- Set("SHELL", os.Getenv("SHELL")).
+ Set("SHELL", filepath.Base(os.Getenv("SHELL"))).
Set("Version", version.Version).
Set("GoVersion", runtime.Version())
)
From 09d8e75b7be61fb501ecd28682b5a56717db4465 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 14:35:12 -0300
Subject: [PATCH 146/236] fix(mcp/lsp): expand variable in commands (#1116)
closes #806
Signed-off-by: Carlos Alexandro Becker
Co-authored-by: taigr
---
internal/app/lsp.go | 2 +-
internal/llm/agent/mcp-tools.go | 21 +++++++++++++--------
internal/lsp/client.go | 9 +++++++--
internal/lsp/client_test.go | 9 ++++++---
4 files changed, 27 insertions(+), 14 deletions(-)
diff --git a/internal/app/lsp.go b/internal/app/lsp.go
index 057e9ce39363f3fd68c8c980ce22e3e8b0e78154..f4c26af2f4ed369a94c7078600ce9639874dc643 100644
--- a/internal/app/lsp.go
+++ b/internal/app/lsp.go
@@ -36,7 +36,7 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
updateLSPState(name, lsp.StateStarting, nil, nil, 0)
// Create LSP client.
- lspClient, err := lsp.New(ctx, name, config)
+ lspClient, err := lsp.New(ctx, name, config, app.config.Resolver())
if err != nil {
slog.Error("Failed to create LSP client for", name, err)
updateLSPState(name, lsp.StateError, err, nil, 0)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 4a4435dccbdb48ea6d2d64bf7af9f257e8e3730b..d670a5797548cd52bbfd23c8cd16fea96b021e8a 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -149,7 +149,8 @@ func getOrRenewClient(ctx context.Context, name string) (*client.Client, error)
return nil, fmt.Errorf("mcp '%s' not available", name)
}
- m := config.Get().MCP[name]
+ cfg := config.Get()
+ m := cfg.MCP[name]
state, _ := mcpStates.Get(name)
timeout := mcpTimeout(m)
@@ -161,7 +162,7 @@ func getOrRenewClient(ctx context.Context, name string) (*client.Client, error)
}
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, state.ToolCount)
- c, err = createAndInitializeClient(ctx, name, m)
+ c, err = createAndInitializeClient(ctx, name, m, cfg.Resolver())
if err != nil {
return nil, err
}
@@ -313,7 +314,7 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
defer cancel()
- c, err := createAndInitializeClient(ctx, name, m)
+ c, err := createAndInitializeClient(ctx, name, m, cfg.Resolver())
if err != nil {
return
}
@@ -328,8 +329,8 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
return slices.Collect(result.Seq())
}
-func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig) (*client.Client, error) {
- c, err := createMcpClient(name, m)
+func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
+ c, err := createMcpClient(name, m, resolver)
if err != nil {
updateMCPState(name, MCPStateError, err, nil, 0)
slog.Error("error creating mcp client", "error", err, "name", name)
@@ -367,14 +368,18 @@ func maybeTimeoutErr(err error, timeout time.Duration) error {
return err
}
-func createMcpClient(name string, m config.MCPConfig) (*client.Client, error) {
+func createMcpClient(name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
switch m.Type {
case config.MCPStdio:
- if strings.TrimSpace(m.Command) == "" {
+ command, err := resolver.ResolveValue(m.Command)
+ if err != nil {
+ return nil, fmt.Errorf("invalid mcp command: %w", err)
+ }
+ if strings.TrimSpace(command) == "" {
return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
}
return client.NewStdioMCPClientWithOptions(
- home.Long(m.Command),
+ home.Long(command),
m.ResolvedEnv(),
m.Args,
transport.WithCommandLogger(mcpLogger{name: name}),
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index 226d6c6f3896e29dcbc75c04bee23a34bdc85952..259f6ba8c4876dcbeb441d48839685012c48ac32 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -45,7 +45,7 @@ type Client struct {
}
// New creates a new LSP client using the powernap implementation.
-func New(ctx context.Context, name string, config config.LSPConfig) (*Client, error) {
+func New(ctx context.Context, name string, config config.LSPConfig, resolver config.VariableResolver) (*Client, error) {
// Convert working directory to file URI
workDir, err := os.Getwd()
if err != nil {
@@ -54,9 +54,14 @@ func New(ctx context.Context, name string, config config.LSPConfig) (*Client, er
rootURI := string(protocol.URIFromPath(workDir))
+ command, err := resolver.ResolveValue(config.Command)
+ if err != nil {
+ return nil, fmt.Errorf("invalid lsp command: %w", err)
+ }
+
// Create powernap client config
clientConfig := powernap.ClientConfig{
- Command: home.Long(config.Command),
+ Command: home.Long(command),
Args: config.Args,
RootURI: rootURI,
Environment: func() map[string]string {
diff --git a/internal/lsp/client_test.go b/internal/lsp/client_test.go
index 99ef0ca3143e5b8689ba3b63fd5c172456a46c24..7cc9f2f4ba230a4c6896e7ccef367a450c1c55c7 100644
--- a/internal/lsp/client_test.go
+++ b/internal/lsp/client_test.go
@@ -5,14 +5,15 @@ import (
"testing"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/env"
)
-func TestPowernapClient(t *testing.T) {
+func TestClient(t *testing.T) {
ctx := context.Background()
// Create a simple config for testing
cfg := config.LSPConfig{
- Command: "echo", // Use echo as a dummy command that won't fail
+ Command: "$THE_CMD", // Use echo as a dummy command that won't fail
Args: []string{"hello"},
FileTypes: []string{"go"},
Env: map[string]string{},
@@ -20,7 +21,9 @@ func TestPowernapClient(t *testing.T) {
// Test creating a powernap client - this will likely fail with echo
// but we can still test the basic structure
- client, err := New(ctx, "test", cfg)
+ client, err := New(ctx, "test", cfg, config.NewEnvironmentVariableResolver(env.NewFromMap(map[string]string{
+ "THE_CMD": "echo",
+ })))
if err != nil {
// Expected to fail with echo command, skip the rest
t.Skipf("Powernap client creation failed as expected with dummy command: %v", err)
From b22fd0884367fbd59286c9bd158c4fc615f20f66 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 17:12:45 -0300
Subject: [PATCH 147/236] fix(fsext): panic on fastwalk (#1122)
this seems to fix a panic I've been getting some times.
bubbletea eats part of the trace, so I'm not sure it's a race, but
judging by the code, and the fact that fastwalk creates multiple
goroutines, it makes sense.
i've been using this for the past few hours and haven't encountered the
error since.
Signed-off-by: Carlos Alexandro Becker
---
internal/fsext/ls.go | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/internal/fsext/ls.go b/internal/fsext/ls.go
index 2c46416f28a2777ddc9092883686c8a3461a9f7d..2027f734c4156572b134c012b2e3c143c364bd29 100644
--- a/internal/fsext/ls.go
+++ b/internal/fsext/ls.go
@@ -4,6 +4,7 @@ import (
"log/slog"
"os"
"path/filepath"
+ "slices"
"strings"
"sync"
@@ -200,7 +201,7 @@ func (dl *directoryLister) getIgnore(path string) ignore.IgnoreParser {
// ListDirectory lists files and directories in the specified path,
func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]string, bool, error) {
- var results []string
+ results := csync.NewSlice[string]()
truncated := false
dl := NewDirectoryLister(initialPath)
@@ -227,19 +228,19 @@ func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]st
if d.IsDir() {
path = path + string(filepath.Separator)
}
- results = append(results, path)
+ results.Append(path)
}
- if limit > 0 && len(results) >= limit {
+ if limit > 0 && results.Len() >= limit {
truncated = true
return filepath.SkipAll
}
return nil
})
- if err != nil && len(results) == 0 {
+ if err != nil && results.Len() == 0 {
return nil, truncated, err
}
- return results, truncated, nil
+ return slices.Collect(results.Seq()), truncated, nil
}
From 5633f242b26e043db12004959d867406c3b71e8d Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 14:25:33 -0300
Subject: [PATCH 148/236] fix(provider): do not retry auth errors
If auth failed... its unlikely it'll work next time
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/provider/anthropic.go | 7 +------
internal/llm/provider/gemini.go | 10 +---------
internal/llm/provider/openai.go | 7 +------
internal/llm/provider/provider.go | 2 +-
4 files changed, 4 insertions(+), 22 deletions(-)
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index a5355b09e235d791d178a445ba98095974acbef4..d07b657e4ca2861bdbf8f82401a9283c82263e57 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -493,12 +493,7 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
}
if apiErr.StatusCode == 401 {
- a.providerOptions.apiKey, err = config.Get().Resolve(a.providerOptions.config.APIKey)
- if err != nil {
- return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
- }
- a.client = createAnthropicClient(a.providerOptions, a.tp)
- return true, 0, nil
+ return false, 0, err
}
// Handle context limit exceeded error (400 Bad Request)
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index c1db9561e7db5fd3ae8da1ae1c9ea143f5ea20ec..54835596d171ded734b245c27fc3a628ddc8c36a 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -436,15 +436,7 @@ func (g *geminiClient) shouldRetry(attempts int, err error) (bool, int64, error)
// Check for token expiration (401 Unauthorized)
if contains(errMsg, "unauthorized", "invalid api key", "api key expired") {
- g.providerOptions.apiKey, err = config.Get().Resolve(g.providerOptions.config.APIKey)
- if err != nil {
- return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
- }
- g.client, err = createGeminiClient(g.providerOptions)
- if err != nil {
- return false, 0, fmt.Errorf("failed to create Gemini client after API key refresh: %w", err)
- }
- return true, 0, nil
+ return false, 0, err
}
// Check for common rate limit error messages
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index 8df3989abbacbb7e46c59a0c750df8a7879789c1..587f01384a151a940dcbcdcecb71eb5ba27a554b 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -514,12 +514,7 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
if errors.As(err, &apiErr) {
// Check for token expiration (401 Unauthorized)
if apiErr.StatusCode == 401 {
- o.providerOptions.apiKey, err = config.Get().Resolve(o.providerOptions.config.APIKey)
- if err != nil {
- return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
- }
- o.client = createOpenAIClient(o.providerOptions)
- return true, 0, nil
+ return false, 0, err
}
if apiErr.StatusCode != 429 && apiErr.StatusCode != 500 {
diff --git a/internal/llm/provider/provider.go b/internal/llm/provider/provider.go
index 3705645517cd10803ede285f8d2935f43575b746..0dada9d8b1e353801fde43b1d9ebb1fc6eaa0a1e 100644
--- a/internal/llm/provider/provider.go
+++ b/internal/llm/provider/provider.go
@@ -13,7 +13,7 @@ import (
type EventType string
-const maxRetries = 8
+const maxRetries = 3
const (
EventContentStart EventType = "content_start"
From 1be21ca52a7372f2725bd07397bfa5e31f53204c Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 14:28:01 -0300
Subject: [PATCH 149/236] refactor: use http.Status... consts
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/provider/anthropic.go | 5 +++--
internal/llm/provider/openai.go | 5 +++--
2 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index d07b657e4ca2861bdbf8f82401a9283c82263e57..cfe8f6210fce8ad75eb930374a618e32ac1e2a03 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"log/slog"
+ "net/http"
"regexp"
"strconv"
"strings"
@@ -492,12 +493,12 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
}
- if apiErr.StatusCode == 401 {
+ if apiErr.StatusCode == http.StatusUnauthorized {
return false, 0, err
}
// Handle context limit exceeded error (400 Bad Request)
- if apiErr.StatusCode == 400 {
+ if apiErr.StatusCode == http.StatusBadRequest {
if adjusted, ok := a.handleContextLimitError(apiErr); ok {
a.adjustedMaxTokens = adjusted
slog.Debug("Adjusted max_tokens due to context limit", "new_max_tokens", adjusted)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index 587f01384a151a940dcbcdcecb71eb5ba27a554b..d2563cfec5104145e8571d40540e9d7acd886a1d 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"log/slog"
+ "net/http"
"strings"
"time"
@@ -513,11 +514,11 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
retryAfterValues := []string{}
if errors.As(err, &apiErr) {
// Check for token expiration (401 Unauthorized)
- if apiErr.StatusCode == 401 {
+ if apiErr.StatusCode == http.StatusUnauthorized {
return false, 0, err
}
- if apiErr.StatusCode != 429 && apiErr.StatusCode != 500 {
+ if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != http.StatusInternalServerError {
return false, 0, err
}
From 9365343e4f1c16e695029026ff7a97ba6320d73f Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 14:29:08 -0300
Subject: [PATCH 150/236] refactor: use http.Status... consts
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/provider/anthropic.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index cfe8f6210fce8ad75eb930374a618e32ac1e2a03..11c131c6cbc0919e9847c1820740b81cccf7f781 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -507,7 +507,7 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
}
isOverloaded := strings.Contains(apiErr.Error(), "overloaded") || strings.Contains(apiErr.Error(), "rate limit exceeded")
- if apiErr.StatusCode != 429 && apiErr.StatusCode != 529 && !isOverloaded {
+ if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != 529 && !isOverloaded {
return false, 0, err
}
From 685c81bd7c1d5d601c4c6d65608caf08e106e51f Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 14:30:43 -0300
Subject: [PATCH 151/236] chore: comment
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/provider/anthropic.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index 11c131c6cbc0919e9847c1820740b81cccf7f781..636257565caf91fc4f7f81af20a6e742aadbd3ee 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -507,6 +507,7 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
}
isOverloaded := strings.Contains(apiErr.Error(), "overloaded") || strings.Contains(apiErr.Error(), "rate limit exceeded")
+ // 529 (unofficial): The service is overloaded
if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != 529 && !isOverloaded {
return false, 0, err
}
From fa822909be01eda4de16064eb188160a9e9db77b Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 14:38:56 -0300
Subject: [PATCH 152/236] fix: improve retry
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/provider/anthropic.go | 13 ++++++++++++-
internal/llm/provider/gemini.go | 16 +++++++++++++++-
internal/llm/provider/openai.go | 13 ++++++++++++-
3 files changed, 39 insertions(+), 3 deletions(-)
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index 636257565caf91fc4f7f81af20a6e742aadbd3ee..9648bfd282b5b467399cb715389c5c5969fba121 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -509,7 +509,18 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
isOverloaded := strings.Contains(apiErr.Error(), "overloaded") || strings.Contains(apiErr.Error(), "rate limit exceeded")
// 529 (unofficial): The service is overloaded
if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != 529 && !isOverloaded {
- return false, 0, err
+ prev := a.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
+ a.providerOptions.apiKey, err = config.Get().Resolve(a.providerOptions.config.APIKey)
+ if err != nil {
+ return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
+ }
+ // if it didn't change, do not retry.
+ if prev == a.providerOptions.apiKey {
+ return false, 0, err
+ }
+ a.client = createAnthropicClient(a.providerOptions, a.tp)
+ return true, 0, nil
}
retryMs := 0
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 54835596d171ded734b245c27fc3a628ddc8c36a..3987deb7ebcc6330c9d3bcb4a52aeeb292eab43f 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -436,7 +436,21 @@ func (g *geminiClient) shouldRetry(attempts int, err error) (bool, int64, error)
// Check for token expiration (401 Unauthorized)
if contains(errMsg, "unauthorized", "invalid api key", "api key expired") {
- return false, 0, err
+ prev := g.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
+ g.providerOptions.apiKey, err = config.Get().Resolve(g.providerOptions.config.APIKey)
+ if err != nil {
+ return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
+ }
+ // if it didn't change, do not retry.
+ if prev == g.providerOptions.apiKey {
+ return false, 0, err
+ }
+ g.client, err = createGeminiClient(g.providerOptions)
+ if err != nil {
+ return false, 0, fmt.Errorf("failed to create Gemini client after API key refresh: %w", err)
+ }
+ return true, 0, nil
}
// Check for common rate limit error messages
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index d2563cfec5104145e8571d40540e9d7acd886a1d..8ec366caff4156fbf4baae76fc24ce5c30d4a91d 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -515,7 +515,18 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
if errors.As(err, &apiErr) {
// Check for token expiration (401 Unauthorized)
if apiErr.StatusCode == http.StatusUnauthorized {
- return false, 0, err
+ prev := o.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
+ o.providerOptions.apiKey, err = config.Get().Resolve(o.providerOptions.config.APIKey)
+ if err != nil {
+ return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
+ }
+ // if it didn't change, do not retry.
+ if prev == o.providerOptions.apiKey {
+ return false, 0, err
+ }
+ o.client = createOpenAIClient(o.providerOptions)
+ return true, 0, nil
}
if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != http.StatusInternalServerError {
From 63eda4deeb95abdd683edb31c187de22d5ac3dc5 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 14:39:58 -0300
Subject: [PATCH 153/236] fix: improve retry
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/provider/anthropic.go | 26 +++++++++++++-------------
1 file changed, 13 insertions(+), 13 deletions(-)
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index 9648bfd282b5b467399cb715389c5c5969fba121..981ff4590fd7db92288ff11b3d8f607e594cb0fd 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -494,7 +494,18 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
}
if apiErr.StatusCode == http.StatusUnauthorized {
- return false, 0, err
+ prev := a.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
+ a.providerOptions.apiKey, err = config.Get().Resolve(a.providerOptions.config.APIKey)
+ if err != nil {
+ return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
+ }
+ // if it didn't change, do not retry.
+ if prev == a.providerOptions.apiKey {
+ return false, 0, err
+ }
+ a.client = createAnthropicClient(a.providerOptions, a.tp)
+ return true, 0, nil
}
// Handle context limit exceeded error (400 Bad Request)
@@ -509,18 +520,7 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
isOverloaded := strings.Contains(apiErr.Error(), "overloaded") || strings.Contains(apiErr.Error(), "rate limit exceeded")
// 529 (unofficial): The service is overloaded
if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != 529 && !isOverloaded {
- prev := a.providerOptions.apiKey
- // in case the key comes from a script, we try to re-evaluate it.
- a.providerOptions.apiKey, err = config.Get().Resolve(a.providerOptions.config.APIKey)
- if err != nil {
- return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
- }
- // if it didn't change, do not retry.
- if prev == a.providerOptions.apiKey {
- return false, 0, err
- }
- a.client = createAnthropicClient(a.providerOptions, a.tp)
- return true, 0, nil
+ return false, 0, err
}
retryMs := 0
From 51cee9e947104ece95de6553bd06ce4f1d3cc866 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 11:57:14 -0300
Subject: [PATCH 154/236] chore: add task release
you can use it as:
```sh
task release -- -m 'some short description'
```
and it'll:
- figure out the next tag using svu
- check if you're on main
- check branch is clean
- drop the nightly tag (it's recreated, so if you have an old one, `git push --tags` will complain about it)
- `git tag --sign ` + any args you pass to release
- `git push --tags`
Signed-off-by: Carlos Alexandro Becker
---
Taskfile.yaml | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 443531fa2435d5557536a4d2e6d88014ea4a5677..7f821f584704393dffc750795e0c48ecdf5ea8ab 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -84,3 +84,20 @@ tasks:
- echo "Generated schema.json"
generates:
- schema.json
+
+ release:
+ desc: Create and push a new tag following semver
+ vars:
+ NEXT:
+ sh: go run github.com/caarlos0/svu@latest next
+ prompt: "This will release {{.NEXT}}. Continue?"
+ preconditions:
+ - sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
+ msg: Not on main branch
+ - sh: "[ $(git status --porcelain=2 | wc -l) = 0 ]"
+ msg: "Git is dirty"
+ cmds:
+ - git tag -d nightly
+ - git tag --sign {{.NEXT}} {{.CLI_ARGS}}
+ - echo "pushing {{.NEXT}}..."
+ - git push origin --tags
From 5f7c46dd1418f8ce7c7f11aeed7e278a1592f799 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 12:53:14 -0300
Subject: [PATCH 155/236] chore: fix version
Signed-off-by: Carlos Alexandro Becker
---
Taskfile.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 7f821f584704393dffc750795e0c48ecdf5ea8ab..3388022a61eb8bbaa3410c06e57e4914247a5dbf 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -89,7 +89,7 @@ tasks:
desc: Create and push a new tag following semver
vars:
NEXT:
- sh: go run github.com/caarlos0/svu@latest next
+ sh: go run github.com/caarlos0/svu/v3@latest next
prompt: "This will release {{.NEXT}}. Continue?"
preconditions:
- sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
From 4203e52994fc9b8942cbca3bad0a01aa16af9065 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 24 Sep 2025 13:30:40 -0300
Subject: [PATCH 156/236] Update Taskfile.yaml
Co-authored-by: Andrey Nering
---
Taskfile.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 3388022a61eb8bbaa3410c06e57e4914247a5dbf..80d6bd86d1070e2f4e900660a7cab060ebdfbcea 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -89,7 +89,7 @@ tasks:
desc: Create and push a new tag following semver
vars:
NEXT:
- sh: go run github.com/caarlos0/svu/v3@latest next
+ sh: go run github.com/caarlos0/svu/v3@latest next --always
prompt: "This will release {{.NEXT}}. Continue?"
preconditions:
- sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
From a6a4fa7e419fc9c9ba0ed3d530120d25b0e355b2 Mon Sep 17 00:00:00 2001
From: tauraamui
Date: Wed, 24 Sep 2025 12:02:43 +0100
Subject: [PATCH 157/236] chore: add name for helper tool name resolver
---
internal/config/config.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/internal/config/config.go b/internal/config/config.go
index 3578850d228b78503e67e630a9b688c575403b9c..fc5d62ef1c361c4e4aae29a2683ed92c8e76fd9d 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -428,6 +428,7 @@ func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
func allToolNames() []string {
return []string{
+ "agent",
"bash",
"download",
"edit",
From 925e5faf85622f5a1ea5aeb6758f194ec8673a15 Mon Sep 17 00:00:00 2001
From: tauraamui
Date: Wed, 24 Sep 2025 12:03:19 +0100
Subject: [PATCH 158/236] feat: if agent has been disabled do not set the agent
fn
---
internal/llm/agent/agent.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 74b1cb74659238de917c823872698f1b2ed31332..44efba31835aa4d68a79538fd637f1eff43cbb3e 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -100,7 +100,7 @@ func NewAgent(
cfg := config.Get()
var agentToolFn func() (tools.BaseTool, error)
- if agentCfg.ID == "coder" {
+ if agentCfg.ID == "coder" && slices.Contains(agentCfg.AllowedTools, AgentToolName) {
agentToolFn = func() (tools.BaseTool, error) {
taskAgentCfg := config.Get().Agents["task"]
if taskAgentCfg.ID == "" {
From 32dac11c423726a485731d8914b083ed8cc3dcc8 Mon Sep 17 00:00:00 2001
From: tauraamui
Date: Wed, 24 Sep 2025 12:03:34 +0100
Subject: [PATCH 159/236] test: ensure agent tool name is on list of tool names
---
internal/config/load_test.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/internal/config/load_test.go b/internal/config/load_test.go
index 756f849db426e226c197879740f0dc47d3048dd9..406fe07d523c8b0d5d7f038f8d94cc74a0b58f89 100644
--- a/internal/config/load_test.go
+++ b/internal/config/load_test.go
@@ -485,7 +485,7 @@ func TestConfig_setupAgentsWithDisabledTools(t *testing.T) {
cfg.SetupAgents()
coderAgent, ok := cfg.Agents["coder"]
require.True(t, ok)
- assert.Equal(t, []string{"bash", "multiedit", "fetch", "glob", "ls", "sourcegraph", "view", "write"}, coderAgent.AllowedTools)
+ assert.Equal(t, []string{"agent", "bash", "multiedit", "fetch", "glob", "ls", "sourcegraph", "view", "write"}, coderAgent.AllowedTools)
taskAgent, ok := cfg.Agents["task"]
require.True(t, ok)
@@ -508,7 +508,7 @@ func TestConfig_setupAgentsWithEveryReadOnlyToolDisabled(t *testing.T) {
cfg.SetupAgents()
coderAgent, ok := cfg.Agents["coder"]
require.True(t, ok)
- assert.Equal(t, []string{"bash", "download", "edit", "multiedit", "fetch", "write"}, coderAgent.AllowedTools)
+ assert.Equal(t, []string{"agent", "bash", "download", "edit", "multiedit", "fetch", "write"}, coderAgent.AllowedTools)
taskAgent, ok := cfg.Agents["task"]
require.True(t, ok)
From c1e8b6b44ccd8b81b0899c27625b61b95e950708 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 25 Sep 2025 10:36:09 -0300
Subject: [PATCH 160/236] chore(deps): update deps, dependabot config (#1125)
Signed-off-by: Carlos Alexandro Becker
---
.github/dependabot.yml | 4 ++++
go.mod | 8 ++++----
go.sum | 16 ++++++++--------
3 files changed, 16 insertions(+), 12 deletions(-)
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 94ff8c80c13621146bf40ccd90325c65b86bbaf3..0172187ca829e659ae9e31e2c58929a259411b0d 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -17,6 +17,10 @@ updates:
all:
patterns:
- "*"
+ ignore:
+ - dependency-name: github.com/charmbracelet/lipgloss/v2
+ versions:
+ - v2.0.0-beta1
- package-ecosystem: "github-actions"
directory: "/"
diff --git a/go.mod b/go.mod
index ea62993931c7532b55127f54b35bab0be2eb23a7..d82c2490d3a7267f7d9be9b7a2715134dfc96bfa 100644
--- a/go.mod
+++ b/go.mod
@@ -14,10 +14,10 @@ require (
github.com/charlievieth/fastwalk v1.0.14
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e
- github.com/charmbracelet/catwalk v0.5.8
- github.com/charmbracelet/fang v0.4.1
+ github.com/charmbracelet/catwalk v0.6.1
+ github.com/charmbracelet/fang v0.4.2
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
- github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
+ github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706
github.com/charmbracelet/x/ansi v0.10.1
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3
@@ -73,7 +73,7 @@ require (
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/charmbracelet/colorprofile v0.3.2 // indirect
- github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf
+ github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4
diff --git a/go.sum b/go.sum
index fedfeb243142a92fe79d7d6b474ca921dbc952bd..bb80e08e5226e202a096ec7046458d19a0a9bfa3 100644
--- a/go.sum
+++ b/go.sum
@@ -80,20 +80,20 @@ github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e h1:4BBnKWFwJ5FLyhw/ijFxKE04i9rubr8WIPR1kjO57iA=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e/go.mod h1:F7AfLKYQqpM3NNBVs7ctW417tavhvoh9SBjsgtwpzbY=
-github.com/charmbracelet/catwalk v0.5.8 h1:Crs1bCgFtFlPAeoa6NQ1RUG8RmFCxFRSi+ccnAg7reE=
-github.com/charmbracelet/catwalk v0.5.8/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
+github.com/charmbracelet/catwalk v0.6.1 h1:2rRqUlwo+fdyIty8jEvUufRTgqBl0aea21LV6YQPqb0=
+github.com/charmbracelet/catwalk v0.6.1/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
-github.com/charmbracelet/fang v0.4.1 h1:NC0Y4oqg7YuZcBg/KKsHy8DSow0ZDjF4UJL7LwtA0dE=
-github.com/charmbracelet/fang v0.4.1/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU=
+github.com/charmbracelet/fang v0.4.2 h1:nWr7Tb82/TTNNGMGG35aTZ1X68loAOQmpb0qxkKXjas=
+github.com/charmbracelet/fang v0.4.2/go.mod h1:wHJKQYO5ReYsxx+yZl+skDtrlKO/4LLEQ6EXsdHhRhg=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0 h1:sWRGoSw/JsO2S4t2+fmmEkRbkOxphI0AxZkQPQVKWbs=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
+github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea h1:g1HfUgSMvye8mgecMD1mPscpt+pzJoDEiSA+p2QXzdQ=
+github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea/go.mod h1:ngHerf1JLJXBrDXdphn5gFrBPriCL437uwukd5c93pM=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mSIGnGuSdKl9qDSyfbYK50z2wc2gGMggegE=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
-github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf h1:2fs3BT8BFjpJ4134Tq4VoBm/fE9FB2f2P/FhmzsWelQ=
-github.com/charmbracelet/ultraviolet v0.0.0-20250912143111-9785ff826cbf/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
+github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef h1:VrWaUi2LXYLjfjCHowdSOEc6dQ9Ro14KY7Bw4IWd19M=
+github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef/go.mod h1:AThRsQH1t+dfyOKIwXRoJBniYFQUkUpQq4paheHMc2o=
github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
From afdb81d8b1a7cc5e26e893a43190ca4dcdf0d633 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 26 Sep 2025 10:03:42 -0300
Subject: [PATCH 161/236] chore(deps): fix dependabot config
Signed-off-by: Carlos Alexandro Becker
---
.github/dependabot.yml | 3 +++
1 file changed, 3 insertions(+)
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 0172187ca829e659ae9e31e2c58929a259411b0d..cf970b5887bc33fd822ab7fc4fe4540df045a6e1 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -18,6 +18,9 @@ updates:
patterns:
- "*"
ignore:
+ - dependency-name: github.com/charmbracelet/bubbletea/v2
+ versions:
+ - v2.0.0-beta1
- dependency-name: github.com/charmbracelet/lipgloss/v2
versions:
- v2.0.0-beta1
From 35b8aada21494ae5e85baf73d40daa46bc552962 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 26 Sep 2025 13:10:52 +0000
Subject: [PATCH 162/236] chore(deps): bump google.golang.org/genai in the all
group (#1134)
Bumps the all group with 1 update: [google.golang.org/genai](https://github.com/googleapis/go-genai).
Updates `google.golang.org/genai` from 1.25.0 to 1.26.0
- [Release notes](https://github.com/googleapis/go-genai/releases)
- [Changelog](https://github.com/googleapis/go-genai/blob/main/CHANGELOG.md)
- [Commits](https://github.com/googleapis/go-genai/compare/v1.25.0...v1.26.0)
---
updated-dependencies:
- dependency-name: google.golang.org/genai
dependency-version: 1.26.0
dependency-type: direct:production
update-type: version-update:semver-minor
dependency-group: all
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
go.mod | 2 +-
go.sum | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/go.mod b/go.mod
index d82c2490d3a7267f7d9be9b7a2715134dfc96bfa..d3e668320cfdec39160d618b189f1470bf07d028 100644
--- a/go.mod
+++ b/go.mod
@@ -152,7 +152,7 @@ require (
golang.org/x/text v0.29.0
golang.org/x/time v0.8.0 // indirect
google.golang.org/api v0.211.0 // indirect
- google.golang.org/genai v1.25.0
+ google.golang.org/genai v1.26.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
google.golang.org/grpc v1.71.0 // indirect
google.golang.org/protobuf v1.36.8 // indirect
diff --git a/go.sum b/go.sum
index bb80e08e5226e202a096ec7046458d19a0a9bfa3..f69217e9d4e9831abc8e1b47b80e23a19dcfcffa 100644
--- a/go.sum
+++ b/go.sum
@@ -425,8 +425,8 @@ golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxb
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
-google.golang.org/genai v1.25.0 h1:Cpyh2nmEoOS1eM3mT9XKuA/qWTEDoktfP2gsN3EduPE=
-google.golang.org/genai v1.25.0/go.mod h1:OClfdf+r5aaD+sCd4aUSkPzJItmg2wD/WON9lQnRPaY=
+google.golang.org/genai v1.26.0 h1:r4HGL54kFv/WCRMTAbZg05Ct+vXfhAbTRlXhFyBkEQo=
+google.golang.org/genai v1.26.0/go.mod h1:OClfdf+r5aaD+sCd4aUSkPzJItmg2wD/WON9lQnRPaY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
From c8bdb0b659cd98bd4ef537d7a77b3ab66645ef19 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 26 Sep 2025 10:17:40 -0300
Subject: [PATCH 163/236] fix: improve shutdown (#1133)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/mcp-tools.go | 4 +--
internal/lsp/client.go | 51 +++++----------------------------
2 files changed, 9 insertions(+), 46 deletions(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index d670a5797548cd52bbfd23c8cd16fea96b021e8a..a2e6b912ab503c61522501ad522a9f0a65fc37b0 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -259,9 +259,9 @@ func updateMCPState(name string, state MCPState, err error, client *client.Clien
// CloseMCPClients closes all MCP clients. This should be called during application shutdown.
func CloseMCPClients() error {
var errs []error
- for c := range mcpClients.Seq() {
+ for name, c := range mcpClients.Seq2() {
if err := c.Close(); err != nil {
- errs = append(errs, err)
+ errs = append(errs, fmt.Errorf("close mcp: %s: %w", name, err))
}
}
mcpBroker.Shutdown()
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index 259f6ba8c4876dcbeb441d48839685012c48ac32..ff9a3ac9b5249663c151fb2df04a4acb168e4de4 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -319,30 +319,6 @@ func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
return c.client.NotifyDidChangeTextDocument(ctx, uri, int(fileInfo.Version), changes)
}
-// CloseFile closes a file in the LSP server.
-//
-// NOTE: this is only ever called on LSP shutdown.
-func (c *Client) CloseFile(ctx context.Context, filepath string) error {
- cfg := config.Get()
- uri := string(protocol.URIFromPath(filepath))
-
- if _, exists := c.openFiles.Get(uri); !exists {
- return nil // Already closed
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Closing file", "file", filepath)
- }
-
- if err := c.client.NotifyDidCloseTextDocument(ctx, uri); err != nil {
- return err
- }
-
- c.openFiles.Del(uri)
-
- return nil
-}
-
// IsFileOpen checks if a file is currently open.
func (c *Client) IsFileOpen(filepath string) bool {
uri := string(protocol.URIFromPath(filepath))
@@ -353,29 +329,16 @@ func (c *Client) IsFileOpen(filepath string) bool {
// CloseAllFiles closes all currently open files.
func (c *Client) CloseAllFiles(ctx context.Context) {
cfg := config.Get()
- filesToClose := make([]string, 0, c.openFiles.Len())
-
- // First collect all URIs that need to be closed
+ debugLSP := cfg != nil && cfg.Options.DebugLSP
for uri := range c.openFiles.Seq2() {
- // Convert URI back to file path using proper URI handling
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path for file closing", "uri", uri, "error", err)
- continue
+ if debugLSP {
+ slog.Debug("Closing file", "file", uri)
}
- filesToClose = append(filesToClose, filePath)
- }
-
- // Then close them all
- for _, filePath := range filesToClose {
- err := c.CloseFile(ctx, filePath)
- if err != nil && cfg != nil && cfg.Options.DebugLSP {
- slog.Warn("Error closing file", "file", filePath, "error", err)
+ if err := c.client.NotifyDidCloseTextDocument(ctx, uri); err != nil {
+ slog.Warn("Error closing rile", "uri", uri, "error", err)
+ continue
}
- }
-
- if cfg != nil && cfg.Options.DebugLSP {
- slog.Debug("Closed all files", "files", filesToClose)
+ c.openFiles.Del(uri)
}
}
From 9fd7f48e07bb6231676c6891879ee510e81cdd0d Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Fri, 26 Sep 2025 10:27:38 -0300
Subject: [PATCH 164/236] chore(metrics): have a better identifier fallback
(#1130)
---
internal/event/event.go | 12 ++-------
internal/event/identifier.go | 49 ++++++++++++++++++++++++++++++++++++
2 files changed, 51 insertions(+), 10 deletions(-)
create mode 100644 internal/event/identifier.go
diff --git a/internal/event/event.go b/internal/event/event.go
index 42272c7035638fee7167b5c3510c7975cb9c9394..ca02c6d89d67be1756b166aea152da165b2712c9 100644
--- a/internal/event/event.go
+++ b/internal/event/event.go
@@ -9,7 +9,6 @@ import (
"runtime"
"github.com/charmbracelet/crush/internal/version"
- "github.com/denisbrodbeck/machineid"
"github.com/posthog/posthog-go"
)
@@ -39,6 +38,7 @@ func Init() {
slog.Error("Failed to initialize PostHog client", "error", err)
}
client = c
+ distinctId = getDistinctId()
}
// send logs an event to PostHog with the given event name and properties.
@@ -47,7 +47,7 @@ func send(event string, props ...any) {
return
}
err := client.Enqueue(posthog.Capture{
- DistinctId: distinctId(),
+ DistinctId: distinctId,
Event: event,
Properties: pairsToProps(props...).Merge(baseProps),
})
@@ -105,11 +105,3 @@ func pairsToProps(props ...any) posthog.Properties {
func isEven(n int) bool {
return n%2 == 0
}
-
-func distinctId() string {
- id, err := machineid.ProtectedID("charm")
- if err != nil {
- return "crush-cli"
- }
- return id
-}
diff --git a/internal/event/identifier.go b/internal/event/identifier.go
new file mode 100644
index 0000000000000000000000000000000000000000..ee05f8f58f6dd9a8f662e94992983ce26a94d9b9
--- /dev/null
+++ b/internal/event/identifier.go
@@ -0,0 +1,49 @@
+package event
+
+import (
+ "crypto/hmac"
+ "crypto/sha256"
+ "encoding/hex"
+ "fmt"
+ "net"
+
+ "github.com/denisbrodbeck/machineid"
+)
+
+var distinctId string
+
+const (
+ hashKey = "charm"
+ fallbackId = "unknown"
+)
+
+func getDistinctId() string {
+ if id, err := machineid.ProtectedID(hashKey); err == nil {
+ return id
+ }
+ if macAddr, err := getMacAddr(); err == nil {
+ return hashString(macAddr)
+ }
+ return fallbackId
+}
+
+func getMacAddr() (string, error) {
+ interfaces, err := net.Interfaces()
+ if err != nil {
+ return "", err
+ }
+ for _, iface := range interfaces {
+ if iface.Flags&net.FlagUp != 0 && iface.Flags&net.FlagLoopback == 0 && len(iface.HardwareAddr) > 0 {
+ if addrs, err := iface.Addrs(); err == nil && len(addrs) > 0 {
+ return iface.HardwareAddr.String(), nil
+ }
+ }
+ }
+ return "", fmt.Errorf("no active interface with mac address found")
+}
+
+func hashString(str string) string {
+ hash := hmac.New(sha256.New, []byte(str))
+ hash.Write([]byte(hashKey))
+ return hex.EncodeToString(hash.Sum(nil))
+}
From a116beac6a889309dcce558fd47862c6c29495e8 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Fri, 26 Sep 2025 10:30:25 -0300
Subject: [PATCH 165/236] chore(legal): @Kaneki-x has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index a62172eb28a153115f31a83dd1be2c88193b20df..136d5857bf881c2fca2a5ada072cde11ac97af3c 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -655,6 +655,14 @@
"created_at": "2025-09-20T12:37:42Z",
"repoId": 987670088,
"pullRequestNo": 1095
+ },
+ {
+ "name": "Kaneki-x",
+ "id": 6857108,
+ "comment_id": 3338743039,
+ "created_at": "2025-09-26T13:30:16Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1135
}
]
}
\ No newline at end of file
From e3240a2209b9b2f99a6fb03283fac7ca8ebe861d Mon Sep 17 00:00:00 2001
From: Amolith
Date: Fri, 26 Sep 2025 09:34:05 -0600
Subject: [PATCH 166/236] fix(lsp): allow directories as root markers (#1117)
* test(fsext): add GlobWithDoubleStar tests
Covers
- Basic file and directory matching
- Nested directory patterns
- Limit parameter behavior
- Modification time sorting
- Empty and non-existent directory handling
- Ignore pattern respect
Co-Authored-By: Crush
* fix: allow directories as root markers
The removed line was preventing users from setting directories
as root markers, which should be allowed according to the
configuration specification.
Co-Authored-By: Crush
* fix: normalize slashes for Windows glob matching
Normalize both paths and patterns to forward slashes for consistent
cross-platform glob matching:
- Normalize relative paths from filepath.Rel before pattern matching
- Normalize input glob patterns on Windows at function entry
- Fixes Windows test failures where backslashes don't match forward
slash patterns
Co-Authored-By: Crush
* test(fsext): remove unnecessary GOOS check
Co-Authored-By: Crush
* test(fsext): more concise error checking
Co-Authored-By: Crush
* test(fsext): simplify assertions
Co-Authored-By: Crush
* test(fsext): use deterministic mod times
Co-Authored-By: Crush
* test(fsext): further simplifications
Co-Authored-By: Crush
* test(fsext): improve readability
and yeet useless LLM comments
---------
Co-authored-by: Crush
---
internal/fsext/fileutil.go | 10 +-
internal/fsext/fileutil_test.go | 273 ++++++++++++++++++++++++++++++++
2 files changed, 281 insertions(+), 2 deletions(-)
create mode 100644 internal/fsext/fileutil_test.go
diff --git a/internal/fsext/fileutil.go b/internal/fsext/fileutil.go
index e83cfc915219320f34cd4f813ac253be6b2c5053..30c552324452cbce4436701506419916c014d7f9 100644
--- a/internal/fsext/fileutil.go
+++ b/internal/fsext/fileutil.go
@@ -75,6 +75,10 @@ func (w *FastGlobWalker) ShouldSkip(path string) bool {
}
func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool, error) {
+ // Normalize pattern to forward slashes on Windows so their config can use
+ // backslashes
+ pattern = filepath.ToSlash(pattern)
+
walker := NewFastGlobWalker(searchPath)
var matches []FileInfo
conf := fastwalk.Config{
@@ -92,19 +96,21 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
if walker.ShouldSkip(path) {
return filepath.SkipDir
}
- return nil
}
if walker.ShouldSkip(path) {
return nil
}
- // Check if path matches the pattern
relPath, err := filepath.Rel(searchPath, path)
if err != nil {
relPath = path
}
+ // Normalize separators to forward slashes
+ relPath = filepath.ToSlash(relPath)
+
+ // Check if path matches the pattern
matched, err := doublestar.Match(pattern, relPath)
if err != nil || !matched {
return nil
diff --git a/internal/fsext/fileutil_test.go b/internal/fsext/fileutil_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..1779bfb9312f7834748badaf72a47563878f21da
--- /dev/null
+++ b/internal/fsext/fileutil_test.go
@@ -0,0 +1,273 @@
+package fsext
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+ "testing/synctest"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestGlobWithDoubleStar(t *testing.T) {
+ t.Run("finds files matching pattern", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ mainGo := filepath.Join(testDir, "src", "main.go")
+ utilsGo := filepath.Join(testDir, "src", "utils.go")
+ helperGo := filepath.Join(testDir, "pkg", "helper.go")
+ readmeMd := filepath.Join(testDir, "README.md")
+
+ for _, file := range []string{mainGo, utilsGo, helperGo, readmeMd} {
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test content"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/main.go", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{mainGo})
+ })
+
+ t.Run("finds directories matching pattern", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ srcDir := filepath.Join(testDir, "src")
+ pkgDir := filepath.Join(testDir, "pkg")
+ internalDir := filepath.Join(testDir, "internal")
+ cmdDir := filepath.Join(testDir, "cmd")
+ pkgFile := filepath.Join(testDir, "pkg.txt")
+
+ for _, dir := range []string{srcDir, pkgDir, internalDir, cmdDir} {
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ }
+
+ require.NoError(t, os.WriteFile(filepath.Join(srcDir, "main.go"), []byte("package main"), 0o644))
+ require.NoError(t, os.WriteFile(pkgFile, []byte("test"), 0o644))
+
+ matches, truncated, err := GlobWithDoubleStar("pkg", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{pkgDir})
+ })
+
+ t.Run("finds nested directories with wildcard patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ srcPkgDir := filepath.Join(testDir, "src", "pkg")
+ libPkgDir := filepath.Join(testDir, "lib", "pkg")
+ mainPkgDir := filepath.Join(testDir, "pkg")
+ otherDir := filepath.Join(testDir, "other")
+
+ for _, dir := range []string{srcPkgDir, libPkgDir, mainPkgDir, otherDir} {
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/pkg", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ var relativeMatches []string
+ for _, match := range matches {
+ rel, err := filepath.Rel(testDir, match)
+ require.NoError(t, err)
+ relativeMatches = append(relativeMatches, filepath.ToSlash(rel))
+ }
+
+ require.ElementsMatch(t, relativeMatches, []string{"pkg", "src/pkg", "lib/pkg"})
+ })
+
+ t.Run("finds directory contents with recursive patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ pkgDir := filepath.Join(testDir, "pkg")
+ pkgFile1 := filepath.Join(pkgDir, "main.go")
+ pkgFile2 := filepath.Join(pkgDir, "utils.go")
+ pkgSubdir := filepath.Join(pkgDir, "internal")
+ pkgSubfile := filepath.Join(pkgSubdir, "helper.go")
+
+ require.NoError(t, os.MkdirAll(pkgSubdir, 0o755))
+
+ for _, file := range []string{pkgFile1, pkgFile2, pkgSubfile} {
+ require.NoError(t, os.WriteFile(file, []byte("package main"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("pkg/**", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ var relativeMatches []string
+ for _, match := range matches {
+ rel, err := filepath.Rel(testDir, match)
+ require.NoError(t, err)
+ relativeMatches = append(relativeMatches, filepath.ToSlash(rel))
+ }
+
+ require.ElementsMatch(t, relativeMatches, []string{
+ "pkg",
+ "pkg/main.go",
+ "pkg/utils.go",
+ "pkg/internal",
+ "pkg/internal/helper.go",
+ })
+ })
+
+ t.Run("respects limit parameter", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ for i := range 10 {
+ file := filepath.Join(testDir, "file", fmt.Sprintf("test%d.txt", i))
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/*.txt", testDir, 5)
+ require.NoError(t, err)
+ require.True(t, truncated, "Expected truncation with limit")
+ require.Len(t, matches, 5, "Expected exactly 5 matches with limit")
+ })
+
+ t.Run("handles nested directory patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ file1 := filepath.Join(testDir, "a", "b", "c", "file1.txt")
+ file2 := filepath.Join(testDir, "a", "b", "file2.txt")
+ file3 := filepath.Join(testDir, "a", "file3.txt")
+ file4 := filepath.Join(testDir, "file4.txt")
+
+ for _, file := range []string{file1, file2, file3, file4} {
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("a/b/c/file1.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{file1})
+ })
+
+ t.Run("returns results sorted by modification time (newest first)", func(t *testing.T) {
+ synctest.Test(t, func(t *testing.T) {
+ testDir := t.TempDir()
+
+ file1 := filepath.Join(testDir, "file1.txt")
+ require.NoError(t, os.WriteFile(file1, []byte("first"), 0o644))
+
+ file2 := filepath.Join(testDir, "file2.txt")
+ require.NoError(t, os.WriteFile(file2, []byte("second"), 0o644))
+
+ file3 := filepath.Join(testDir, "file3.txt")
+ require.NoError(t, os.WriteFile(file3, []byte("third"), 0o644))
+
+ base := time.Now()
+ m1 := base
+ m2 := base.Add(1 * time.Millisecond)
+ m3 := base.Add(2 * time.Millisecond)
+
+ require.NoError(t, os.Chtimes(file1, m1, m1))
+ require.NoError(t, os.Chtimes(file2, m2, m2))
+ require.NoError(t, os.Chtimes(file3, m3, m3))
+
+ matches, truncated, err := GlobWithDoubleStar("*.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{file3, file2, file1})
+ })
+ })
+
+ t.Run("handles empty directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ matches, truncated, err := GlobWithDoubleStar("**", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ // Even empty directories should return the directory itself
+ require.Equal(t, matches, []string{testDir})
+ })
+
+ t.Run("handles non-existent search path", func(t *testing.T) {
+ nonExistentDir := filepath.Join(t.TempDir(), "does", "not", "exist")
+
+ matches, truncated, err := GlobWithDoubleStar("**", nonExistentDir, 0)
+ require.Error(t, err, "Should return error for non-existent search path")
+ require.False(t, truncated)
+ require.Empty(t, matches)
+ })
+
+ t.Run("respects basic ignore patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ rootIgnore := filepath.Join(testDir, ".crushignore")
+
+ require.NoError(t, os.WriteFile(rootIgnore, []byte("*.tmp\nbackup/\n"), 0o644))
+
+ goodFile := filepath.Join(testDir, "good.txt")
+ require.NoError(t, os.WriteFile(goodFile, []byte("content"), 0o644))
+
+ badFile := filepath.Join(testDir, "bad.tmp")
+ require.NoError(t, os.WriteFile(badFile, []byte("temp content"), 0o644))
+
+ goodDir := filepath.Join(testDir, "src")
+ require.NoError(t, os.MkdirAll(goodDir, 0o755))
+
+ ignoredDir := filepath.Join(testDir, "backup")
+ require.NoError(t, os.MkdirAll(ignoredDir, 0o755))
+
+ ignoredFileInDir := filepath.Join(testDir, "backup", "old.txt")
+ require.NoError(t, os.WriteFile(ignoredFileInDir, []byte("old content"), 0o644))
+
+ matches, truncated, err := GlobWithDoubleStar("*.tmp", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Empty(t, matches, "Expected no matches for '*.tmp' pattern (should be ignored)")
+
+ matches, truncated, err = GlobWithDoubleStar("backup", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Empty(t, matches, "Expected no matches for 'backup' pattern (should be ignored)")
+
+ matches, truncated, err = GlobWithDoubleStar("*.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Equal(t, matches, []string{goodFile})
+ })
+
+ t.Run("handles mixed file and directory matching with sorting", func(t *testing.T) {
+ synctest.Test(t, func(t *testing.T) {
+ testDir := t.TempDir()
+
+ oldestFile := filepath.Join(testDir, "old.test")
+ require.NoError(t, os.WriteFile(oldestFile, []byte("old"), 0o644))
+
+ middleDir := filepath.Join(testDir, "mid.test")
+ require.NoError(t, os.MkdirAll(middleDir, 0o755))
+
+ newestFile := filepath.Join(testDir, "new.test")
+ require.NoError(t, os.WriteFile(newestFile, []byte("new"), 0o644))
+
+ base := time.Now()
+ tOldest := base
+ tMiddle := base.Add(1 * time.Millisecond)
+ tNewest := base.Add(2 * time.Millisecond)
+
+ // Reverse the expected order
+ require.NoError(t, os.Chtimes(newestFile, tOldest, tOldest))
+ require.NoError(t, os.Chtimes(middleDir, tMiddle, tMiddle))
+ require.NoError(t, os.Chtimes(oldestFile, tNewest, tNewest))
+
+ matches, truncated, err := GlobWithDoubleStar("*.test", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ // Results should be sorted by mod time, but we set the oldestFile
+ // to have the most recent mod time
+ require.Equal(t, matches, []string{oldestFile, middleDir, newestFile})
+ })
+ })
+}
From e6a6cbe995507c3b093c21025ff276ca8db2ace9 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 26 Sep 2025 12:34:28 -0300
Subject: [PATCH 167/236] ci: pin actions (#1132)
Signed-off-by: Carlos Alexandro Becker
---
.github/workflows/cla.yml | 2 +-
.github/workflows/labeler.yml | 2 +-
.github/workflows/nightly.yml | 2 +-
.github/workflows/schema-update.yml | 4 ++--
4 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml
index 0ae65fe25556483b90f71c07a0a336170b66d93c..a7b6485e6b5f89cf5a566cf9f5058dd8e72b0d23 100644
--- a/.github/workflows/cla.yml
+++ b/.github/workflows/cla.yml
@@ -22,7 +22,7 @@ jobs:
github.event.comment.body == 'recheck' ||
github.event.comment.body == 'I have read the Contributor License Agreement (CLA) and hereby sign the CLA.' ||
github.event_name == 'pull_request_target'
- uses: contributor-assistant/github-action@v2.6.1
+ uses: contributor-assistant/github-action@ca4a40a7d1004f18d9960b404b97e5f30a505a08 # v2.6.1
env:
GITHUB_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
with:
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
index afc6427ff864eaf4929b831c7df23a2699304528..ff40f751d372d17bf458a33efc7776f258739cf3 100644
--- a/.github/workflows/labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -20,7 +20,7 @@ jobs:
triage:
runs-on: ubuntu-latest
steps:
- - uses: github/issue-labeler@v3.4
+ - uses: github/issue-labeler@c1b0f9f52a63158c4adc09425e858e87b32e9685 # v3.4
with:
configuration-path: .github/labeler.yml
enable-versioned-regex: 0
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index ede4752153124701ff75e3780f12d179f98fe530..1e711cfcaa5beb465f7dabacdb10157f4f35ac68 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -11,7 +11,7 @@ jobs:
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 1
- id: check
diff --git a/.github/workflows/schema-update.yml b/.github/workflows/schema-update.yml
index bc7e19b67f5021e8d3ff00342a062f2c6c000e86..bc1a69c68273c007a764c268958858be3b62bcd2 100644
--- a/.github/workflows/schema-update.yml
+++ b/.github/workflows/schema-update.yml
@@ -10,10 +10,10 @@ jobs:
update-schema:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- - uses: actions/setup-go@v6
+ - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
go-version-file: go.mod
- run: go run . schema > ./schema.json
From a48d4e3538ad46db534c20e8f5860789a7cf5966 Mon Sep 17 00:00:00 2001
From: Vadim Inshakov
Date: Sat, 27 Sep 2025 00:54:34 +0500
Subject: [PATCH 168/236] fix(stream): stream hang, add stream timeout (#1070)
Co-authored-by: Carlos Alexandro Becker
---
internal/llm/agent/agent.go | 29 ++++++++++++++++++++---------
1 file changed, 20 insertions(+), 9 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 44efba31835aa4d68a79538fd637f1eff43cbb3e..dfc8cbc3f2d0030d0ba0df2f9d33fe75cbb5599c 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -26,6 +26,8 @@ import (
"github.com/charmbracelet/crush/internal/shell"
)
+const streamChunkTimeout = 80 * time.Second
+
type AgentEventType string
const (
@@ -553,16 +555,25 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
ctx = context.WithValue(ctx, tools.MessageIDContextKey, assistantMsg.ID)
// Process each event in the stream.
- for event := range eventChan {
- if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
- if errors.Is(processErr, context.Canceled) {
- a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
- } else {
- a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "API Error", processErr.Error())
+loop:
+ for {
+ select {
+ case event, ok := <-eventChan:
+ if !ok {
+ break loop
}
- return assistantMsg, nil, processErr
- }
- if ctx.Err() != nil {
+ if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
+ if errors.Is(processErr, context.Canceled) {
+ a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
+ } else {
+ a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "API Error", processErr.Error())
+ }
+ return assistantMsg, nil, processErr
+ }
+ case <-time.After(streamChunkTimeout):
+ a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "Stream timeout", "No chunk received within timeout")
+ return assistantMsg, nil, fmt.Errorf("stream chunk timeout")
+ case <-ctx.Done():
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
return assistantMsg, nil, ctx.Err()
}
From 42a41595d1d9725ad653759694323802e9496f8a Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Sat, 27 Sep 2025 10:09:31 -0300
Subject: [PATCH 169/236] chore(legal): @maxious has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 136d5857bf881c2fca2a5ada072cde11ac97af3c..e92b62b75054734fd16450e5ec5e3eb56ee3cf57 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -663,6 +663,14 @@
"created_at": "2025-09-26T13:30:16Z",
"repoId": 987670088,
"pullRequestNo": 1135
+ },
+ {
+ "name": "maxious",
+ "id": 81432,
+ "comment_id": 3341700737,
+ "created_at": "2025-09-27T13:09:22Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1141
}
]
}
\ No newline at end of file
From baf053409d1f4945513ab77ff8772c3abe150985 Mon Sep 17 00:00:00 2001
From: daniel <15257433+kslamph@users.noreply.github.com>
Date: Sun, 28 Sep 2025 21:18:02 +0800
Subject: [PATCH 170/236] fix(gemini): add baseURL resolution and conditional
HTTPOptions configuration (#1144)
---
internal/llm/provider/gemini.go | 13 +++++++++----
1 file changed, 9 insertions(+), 4 deletions(-)
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 3987deb7ebcc6330c9d3bcb4a52aeeb292eab43f..f8d1c6ab4f0577afe2bbbd0b12c8c11a4b203b32 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -43,9 +43,14 @@ func createGeminiClient(opts providerClientOptions) (*genai.Client, error) {
cc := &genai.ClientConfig{
APIKey: opts.apiKey,
Backend: genai.BackendGeminiAPI,
- HTTPOptions: genai.HTTPOptions{
- BaseURL: opts.baseURL,
- },
+ }
+ if opts.baseURL != "" {
+ resolvedBaseURL, err := config.Get().Resolve(opts.baseURL)
+ if err == nil && resolvedBaseURL != "" {
+ cc.HTTPOptions = genai.HTTPOptions{
+ BaseURL: resolvedBaseURL,
+ }
+ }
}
if config.Get().Options.Debug {
cc.HTTPClient = log.NewHTTPClient()
@@ -572,4 +577,4 @@ func contains(s string, substrs ...string) bool {
}
}
return false
-}
+}
\ No newline at end of file
From efb77aced1e23c9363958bb04f0a747bb3a58f64 Mon Sep 17 00:00:00 2001
From: daniel <15257433+kslamph@users.noreply.github.com>
Date: Sun, 28 Sep 2025 21:37:21 +0800
Subject: [PATCH 171/236] fix(gemini): add missing newline at end of file
---
internal/llm/provider/gemini.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index f8d1c6ab4f0577afe2bbbd0b12c8c11a4b203b32..91733844e0699b6f1ea62d47e0c4f61b2cde6e36 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -577,4 +577,4 @@ func contains(s string, substrs ...string) bool {
}
}
return false
-}
\ No newline at end of file
+}
From 2bb58b8042414a100bd488286d6547e4991204dd Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Sun, 28 Sep 2025 22:19:50 -0300
Subject: [PATCH 173/236] chore(legal): @Wangch29 has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index e92b62b75054734fd16450e5ec5e3eb56ee3cf57..4ecfd86887d5d072491a8fa764628e8935e4ebfe 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -671,6 +671,14 @@
"created_at": "2025-09-27T13:09:22Z",
"repoId": 987670088,
"pullRequestNo": 1141
+ },
+ {
+ "name": "Wangch29",
+ "id": 115294077,
+ "comment_id": 3344526018,
+ "created_at": "2025-09-29T01:19:40Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1148
}
]
}
\ No newline at end of file
From e0c1cfcb3783edbbf50a5391c3db299042f89275 Mon Sep 17 00:00:00 2001
From: daniel <15257433+kslamph@users.noreply.github.com>
Date: Mon, 29 Sep 2025 14:00:28 +0800
Subject: [PATCH 174/236] fix(gemini): use full MIME type for binary content in
message conversion (fixes charmbracelet/crush#995)
---
internal/llm/provider/gemini.go | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 91733844e0699b6f1ea62d47e0c4f61b2cde6e36..a846d8d582524bb6bf9c8ed31e3796ec8d94b419 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -70,9 +70,8 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
var parts []*genai.Part
parts = append(parts, &genai.Part{Text: msg.Content().String()})
for _, binaryContent := range msg.BinaryContent() {
- imageFormat := strings.Split(binaryContent.MIMEType, "/")
parts = append(parts, &genai.Part{InlineData: &genai.Blob{
- MIMEType: imageFormat[1],
+ MIMEType: binaryContent.MIMEType,
Data: binaryContent.Data,
}})
}
From 34ff32621ccdeccc72947a9249a253a75f67a79c Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Mon, 29 Sep 2025 08:19:23 +0200
Subject: [PATCH 175/236] fix(agent): timer should reset after each chunk
---
internal/llm/agent/agent.go | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index dfc8cbc3f2d0030d0ba0df2f9d33fe75cbb5599c..91661dabc7bbe6a8099a67dc24b43de2a9545dbf 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -555,6 +555,9 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
ctx = context.WithValue(ctx, tools.MessageIDContextKey, assistantMsg.ID)
// Process each event in the stream.
+ timer := time.NewTimer(streamChunkTimeout)
+ defer timer.Stop()
+
loop:
for {
select {
@@ -562,6 +565,9 @@ loop:
if !ok {
break loop
}
+ // Reset the timeout timer since we received a chunk
+ timer.Reset(streamChunkTimeout)
+
if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
if errors.Is(processErr, context.Canceled) {
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
@@ -570,7 +576,7 @@ loop:
}
return assistantMsg, nil, processErr
}
- case <-time.After(streamChunkTimeout):
+ case <-timer.C:
a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "Stream timeout", "No chunk received within timeout")
return assistantMsg, nil, fmt.Errorf("stream chunk timeout")
case <-ctx.Done():
From b5a0d14cca0ecd0c271917508c4f02a169bbbbcc Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Mon, 29 Sep 2025 08:22:28 +0200
Subject: [PATCH 176/236] fix(lint): remove empty line
---
internal/llm/agent/agent.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 91661dabc7bbe6a8099a67dc24b43de2a9545dbf..1f260f7c4134a3561f34aa64aa2096a9cd14641d 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -567,7 +567,7 @@ loop:
}
// Reset the timeout timer since we received a chunk
timer.Reset(streamChunkTimeout)
-
+
if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
if errors.Is(processErr, context.Canceled) {
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
From fb5c89219e72c97232f790e71a905b4336933991 Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Mon, 29 Sep 2025 09:04:43 +0200
Subject: [PATCH 177/236] chore: small fixes
---
internal/event/logger.go | 9 +++++----
internal/llm/agent/agent.go | 2 +-
internal/llm/agent/errors.go | 1 +
3 files changed, 7 insertions(+), 5 deletions(-)
diff --git a/internal/event/logger.go b/internal/event/logger.go
index 7648ae2c2cca91ed20535c0d65a677cd4db84500..7581676b018f5ac6001827db851a132792d21985 100644
--- a/internal/event/logger.go
+++ b/internal/event/logger.go
@@ -1,6 +1,7 @@
package event
import (
+ "fmt"
"log/slog"
"github.com/posthog/posthog-go"
@@ -11,17 +12,17 @@ var _ posthog.Logger = logger{}
type logger struct{}
func (logger) Debugf(format string, args ...any) {
- slog.Debug(format, args...)
+ slog.Debug(fmt.Sprintf(format, args...))
}
func (logger) Logf(format string, args ...any) {
- slog.Info(format, args...)
+ slog.Info(fmt.Sprintf(format, args...))
}
func (logger) Warnf(format string, args ...any) {
- slog.Warn(format, args...)
+ slog.Warn(fmt.Sprintf(format, args...))
}
func (logger) Errorf(format string, args ...any) {
- slog.Error(format, args...)
+ slog.Error(fmt.Sprintf(format, args...))
}
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 1f260f7c4134a3561f34aa64aa2096a9cd14641d..ce2a08d81ef9494e57739061f85696f771163991 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -578,7 +578,7 @@ loop:
}
case <-timer.C:
a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "Stream timeout", "No chunk received within timeout")
- return assistantMsg, nil, fmt.Errorf("stream chunk timeout")
+ return assistantMsg, nil, ErrStreamTimeout
case <-ctx.Done():
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
return assistantMsg, nil, ctx.Err()
diff --git a/internal/llm/agent/errors.go b/internal/llm/agent/errors.go
index 0e2f983d64b42b93ad3a51f32ce0335b0374a613..943918390c6708b5ae6ea5e40e50ebce209cc263 100644
--- a/internal/llm/agent/errors.go
+++ b/internal/llm/agent/errors.go
@@ -7,6 +7,7 @@ import (
var (
ErrRequestCancelled = errors.New("request canceled by user")
+ ErrStreamTimeout = errors.New("stream chunk timeout")
ErrSessionBusy = errors.New("session is currently processing another request")
)
From 06a5840ca0df258fabd4ceac1b132af88158311c Mon Sep 17 00:00:00 2001
From: kujtimiihoxha
Date: Mon, 29 Sep 2025 09:12:18 +0200
Subject: [PATCH 178/236] chore: increase timeout a bit
in case of opus for e.x it could take longer for a response even in
streaming for some reason
---
internal/llm/agent/agent.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index ce2a08d81ef9494e57739061f85696f771163991..2a3f45a2c56c74c80d32abead5038f9697dbf319 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -26,7 +26,7 @@ import (
"github.com/charmbracelet/crush/internal/shell"
)
-const streamChunkTimeout = 80 * time.Second
+const streamChunkTimeout = 2 * time.Minute
type AgentEventType string
From 107a82355d85442613e419ca5f29ac471f867c84 Mon Sep 17 00:00:00 2001
From: nguyen <51743767+ngnhng@users.noreply.github.com>
Date: Mon, 29 Sep 2025 16:58:09 +0700
Subject: [PATCH 179/236] fix(openai): 429 insuffice not retry (#546)
---
internal/llm/provider/openai.go | 12 ++++-
internal/llm/provider/openai_test.go | 76 ++++++++++++++++++++++++++++
2 files changed, 86 insertions(+), 2 deletions(-)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index 8ec366caff4156fbf4baae76fc24ce5c30d4a91d..3e92e077b3156ddccc186e0b104b7db174290c18 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -529,11 +529,19 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
return true, 0, nil
}
- if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != http.StatusInternalServerError {
+ if apiErr.StatusCode == http.StatusTooManyRequests {
+ // Check if this is an insufficient quota error (permanent)
+ if apiErr.Type == "insufficient_quota" || apiErr.Code == "insufficient_quota" {
+ return false, 0, fmt.Errorf("OpenAI quota exceeded: %s. Please check your plan and billing details", apiErr.Message)
+ }
+ // Other 429 errors (rate limiting) can be retried
+ } else if apiErr.StatusCode != http.StatusInternalServerError {
return false, 0, err
}
- retryAfterValues = apiErr.Response.Header.Values("Retry-After")
+ if apiErr.Response != nil {
+ retryAfterValues = apiErr.Response.Header.Values("Retry-After")
+ }
}
if apiErr != nil {
diff --git a/internal/llm/provider/openai_test.go b/internal/llm/provider/openai_test.go
index 8088ba22b4cd49b26130cd3812e8705e8dfe1cba..52b0a20c9316d67ba987ccc5051aa2f6d321aff4 100644
--- a/internal/llm/provider/openai_test.go
+++ b/internal/llm/provider/openai_test.go
@@ -6,6 +6,7 @@ import (
"net/http"
"net/http/httptest"
"os"
+ "strings"
"testing"
"time"
@@ -88,3 +89,78 @@ func TestOpenAIClientStreamChoices(t *testing.T) {
}
}
}
+
+func TestOpenAIClient429InsufficientQuotaError(t *testing.T) {
+ client := &openaiClient{
+ providerOptions: providerClientOptions{
+ modelType: config.SelectedModelTypeLarge,
+ apiKey: "test-key",
+ systemMessage: "test",
+ config: config.ProviderConfig{
+ ID: "test-openai",
+ APIKey: "test-key",
+ },
+ model: func(config.SelectedModelType) catwalk.Model {
+ return catwalk.Model{
+ ID: "test-model",
+ Name: "test-model",
+ }
+ },
+ },
+ }
+
+ // Test insufficient_quota error should not retry
+ apiErr := &openai.Error{
+ StatusCode: 429,
+ Message: "You exceeded your current quota, please check your plan and billing details. For more information on this error, read the docs: https://platform.openai.com/docs/guides/error-codes/api-errors.",
+ Type: "insufficient_quota",
+ Code: "insufficient_quota",
+ }
+
+ retry, _, err := client.shouldRetry(1, apiErr)
+ if retry {
+ t.Error("Expected shouldRetry to return false for insufficient_quota error, but got true")
+ }
+ if err == nil {
+ t.Error("Expected shouldRetry to return an error for insufficient_quota, but got nil")
+ }
+ if err != nil && !strings.Contains(err.Error(), "quota") {
+ t.Errorf("Expected error message to mention quota, got: %v", err)
+ }
+}
+
+func TestOpenAIClient429RateLimitError(t *testing.T) {
+ client := &openaiClient{
+ providerOptions: providerClientOptions{
+ modelType: config.SelectedModelTypeLarge,
+ apiKey: "test-key",
+ systemMessage: "test",
+ config: config.ProviderConfig{
+ ID: "test-openai",
+ APIKey: "test-key",
+ },
+ model: func(config.SelectedModelType) catwalk.Model {
+ return catwalk.Model{
+ ID: "test-model",
+ Name: "test-model",
+ }
+ },
+ },
+ }
+
+ // Test regular rate limit error should retry
+ apiErr := &openai.Error{
+ StatusCode: 429,
+ Message: "Rate limit reached for requests",
+ Type: "rate_limit_exceeded",
+ Code: "rate_limit_exceeded",
+ }
+
+ retry, _, err := client.shouldRetry(1, apiErr)
+ if !retry {
+ t.Error("Expected shouldRetry to return true for rate_limit_exceeded error, but got false")
+ }
+ if err != nil {
+ t.Errorf("Expected shouldRetry to return nil error for rate_limit_exceeded, but got: %v", err)
+ }
+}
From 4f28c51bf5d38084c332b7383f8a927fab839c3b Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Mon, 29 Sep 2025 08:19:03 -0300
Subject: [PATCH 180/236] fix(mcp): do not eat list tools errors (#1138)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/mcp-tools.go | 35 +++++++++++++++++----------------
1 file changed, 18 insertions(+), 17 deletions(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index a2e6b912ab503c61522501ad522a9f0a65fc37b0..f5125c8b89f2dda534396f3c51df3839390022ce 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -196,14 +196,10 @@ func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
return runTool(ctx, b.mcpName, b.tool.Name, params.Input)
}
-func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) []tools.BaseTool {
+func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) ([]tools.BaseTool, error) {
result, err := c.ListTools(ctx, mcp.ListToolsRequest{})
if err != nil {
- slog.Error("error listing tools", "error", err)
- updateMCPState(name, MCPStateError, err, nil, 0)
- c.Close()
- mcpClients.Del(name)
- return nil
+ return nil, err
}
mcpTools := make([]tools.BaseTool, 0, len(result.Tools))
for _, tool := range result.Tools {
@@ -214,7 +210,7 @@ func getTools(ctx context.Context, name string, permissions permission.Service,
workingDir: workingDir,
})
}
- return mcpTools
+ return mcpTools, nil
}
// SubscribeMCPEvents returns a channel for MCP events
@@ -314,13 +310,21 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
defer cancel()
+
c, err := createAndInitializeClient(ctx, name, m, cfg.Resolver())
if err != nil {
return
}
- mcpClients.Set(name, c)
- tools := getTools(ctx, name, permissions, c, cfg.WorkingDir())
+ tools, err := getTools(ctx, name, permissions, c, cfg.WorkingDir())
+ if err != nil {
+ slog.Error("error listing tools", "error", err)
+ updateMCPState(name, MCPStateError, err, nil, 0)
+ c.Close()
+ return
+ }
+
+ mcpClients.Set(name, c)
updateMCPState(name, MCPStateConnected, nil, c, len(tools))
result.Append(tools...)
}(name, m)
@@ -341,14 +345,11 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
initCtx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
- // Only call Start() for non-stdio clients, as stdio clients auto-start
- if m.Type != config.MCPStdio {
- if err := c.Start(initCtx); err != nil {
- updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
- slog.Error("error starting mcp client", "error", err, "name", name)
- _ = c.Close()
- return nil, err
- }
+ if err := c.Start(initCtx); err != nil {
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
+ slog.Error("error starting mcp client", "error", err, "name", name)
+ _ = c.Close()
+ return nil, err
}
if _, err := c.Initialize(initCtx, mcpInitRequest); err != nil {
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
From 38bed4ade99397d9657722ae477ee5256fa82505 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 29 Sep 2025 11:49:37 +0000
Subject: [PATCH 181/236] chore(deps): bump github.com/mark3labs/mcp-go from
0.40.0 to 0.41.0 in the all group (#1156)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
go.mod | 2 +-
go.sum | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/go.mod b/go.mod
index d3e668320cfdec39160d618b189f1470bf07d028..699233cdd52fe59165e8f9c44a85d1413f1bc4b6 100644
--- a/go.mod
+++ b/go.mod
@@ -26,7 +26,7 @@ require (
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
github.com/joho/godotenv v1.5.1
- github.com/mark3labs/mcp-go v0.40.0
+ github.com/mark3labs/mcp-go v0.41.0
github.com/muesli/termenv v0.16.0
github.com/ncruces/go-sqlite3 v0.29.0
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
diff --git a/go.sum b/go.sum
index f69217e9d4e9831abc8e1b47b80e23a19dcfcffa..f54651f8f6b5fa0e6f9f4a3ee53a61d0eec0970c 100644
--- a/go.sum
+++ b/go.sum
@@ -194,8 +194,8 @@ github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQ
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/mark3labs/mcp-go v0.40.0 h1:M0oqK412OHBKut9JwXSsj4KanSmEKpzoW8TcxoPOkAU=
-github.com/mark3labs/mcp-go v0.40.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
+github.com/mark3labs/mcp-go v0.41.0 h1:IFfJaovCet65F3av00bE1HzSnmHpMRWM1kz96R98I70=
+github.com/mark3labs/mcp-go v0.41.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
From e762ddf8232a9ca2419f4c3c4b0417026b8d4d59 Mon Sep 17 00:00:00 2001
From: Kujtim Hoxha
Date: Mon, 29 Sep 2025 17:21:27 +0200
Subject: [PATCH 182/236] fix(agent): remove timout for now (#1158)
---
internal/llm/agent/agent.go | 12 ------------
internal/llm/agent/errors.go | 1 -
2 files changed, 13 deletions(-)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 2a3f45a2c56c74c80d32abead5038f9697dbf319..9bae6e5b8092b987b1c8146460cef946e595beb5 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -26,8 +26,6 @@ import (
"github.com/charmbracelet/crush/internal/shell"
)
-const streamChunkTimeout = 2 * time.Minute
-
type AgentEventType string
const (
@@ -554,10 +552,6 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
// Add the session and message ID into the context if needed by tools.
ctx = context.WithValue(ctx, tools.MessageIDContextKey, assistantMsg.ID)
- // Process each event in the stream.
- timer := time.NewTimer(streamChunkTimeout)
- defer timer.Stop()
-
loop:
for {
select {
@@ -565,9 +559,6 @@ loop:
if !ok {
break loop
}
- // Reset the timeout timer since we received a chunk
- timer.Reset(streamChunkTimeout)
-
if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
if errors.Is(processErr, context.Canceled) {
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
@@ -576,9 +567,6 @@ loop:
}
return assistantMsg, nil, processErr
}
- case <-timer.C:
- a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "Stream timeout", "No chunk received within timeout")
- return assistantMsg, nil, ErrStreamTimeout
case <-ctx.Done():
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
return assistantMsg, nil, ctx.Err()
diff --git a/internal/llm/agent/errors.go b/internal/llm/agent/errors.go
index 943918390c6708b5ae6ea5e40e50ebce209cc263..0e2f983d64b42b93ad3a51f32ce0335b0374a613 100644
--- a/internal/llm/agent/errors.go
+++ b/internal/llm/agent/errors.go
@@ -7,7 +7,6 @@ import (
var (
ErrRequestCancelled = errors.New("request canceled by user")
- ErrStreamTimeout = errors.New("stream chunk timeout")
ErrSessionBusy = errors.New("session is currently processing another request")
)
From 53a4703c97e6a55008dd19fe663ec8738452aaf2 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Mon, 29 Sep 2025 15:03:39 -0300
Subject: [PATCH 184/236] chore(taskfile): change `release` task to add a
commit for the tag (#1159)
---
Taskfile.yaml | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 80d6bd86d1070e2f4e900660a7cab060ebdfbcea..54b50a68217b6ff66ddf1de9a28a8f45d224fefc 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -97,7 +97,8 @@ tasks:
- sh: "[ $(git status --porcelain=2 | wc -l) = 0 ]"
msg: "Git is dirty"
cmds:
+ - git commit --allow-empty -m "{{.NEXT}}"
- git tag -d nightly
- git tag --sign {{.NEXT}} {{.CLI_ARGS}}
- - echo "pushing {{.NEXT}}..."
+ - echo "Pushing {{.NEXT}}..."
- git push origin --tags
From de306c21924e3ed6c174dcc8c58f771c53cdf3a4 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Mon, 29 Sep 2025 15:17:26 -0400
Subject: [PATCH 185/236] chore: update session chooser key help text
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- Change Enter key help from "confirm" to "choose"
- Change Escape key help from "cancel" to "exit"
💘 Generated with Crush
Co-Authored-By: Crush
---
internal/tui/components/dialogs/sessions/keys.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/internal/tui/components/dialogs/sessions/keys.go b/internal/tui/components/dialogs/sessions/keys.go
index bc7ec1ba9f83915caee9189504abf0b07bd4a24b..73c50899f7ae7da3655fc8a3e3a3dd34c4c22f95 100644
--- a/internal/tui/components/dialogs/sessions/keys.go
+++ b/internal/tui/components/dialogs/sessions/keys.go
@@ -15,7 +15,7 @@ func DefaultKeyMap() KeyMap {
return KeyMap{
Select: key.NewBinding(
key.WithKeys("enter", "tab", "ctrl+y"),
- key.WithHelp("enter", "confirm"),
+ key.WithHelp("enter", "choose"),
),
Next: key.NewBinding(
key.WithKeys("down", "ctrl+n"),
@@ -27,7 +27,7 @@ func DefaultKeyMap() KeyMap {
),
Close: key.NewBinding(
key.WithKeys("esc", "alt+esc"),
- key.WithHelp("esc", "cancel"),
+ key.WithHelp("esc", "exit"),
),
}
}
From cfa3244a77c8eb4326bbc6c4ecb91fdc70c8693f Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Mon, 29 Sep 2025 17:15:40 -0400
Subject: [PATCH 186/236] chore: update model chooser key help text
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- Change Enter key help from "confirm" to "choose"
- Change Escape key help from "cancel" to "exit"
💘 Generated with Crush
Co-Authored-By: Crush
---
internal/tui/components/dialogs/models/keys.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/internal/tui/components/dialogs/models/keys.go b/internal/tui/components/dialogs/models/keys.go
index ef4a6228b839c43a3862e251999dadf81dd6403f..4ec1a487e865981edc0be5852bb6c044ddf04c1f 100644
--- a/internal/tui/components/dialogs/models/keys.go
+++ b/internal/tui/components/dialogs/models/keys.go
@@ -19,7 +19,7 @@ func DefaultKeyMap() KeyMap {
return KeyMap{
Select: key.NewBinding(
key.WithKeys("enter", "ctrl+y"),
- key.WithHelp("enter", "confirm"),
+ key.WithHelp("enter", "choose"),
),
Next: key.NewBinding(
key.WithKeys("down", "ctrl+n"),
@@ -35,7 +35,7 @@ func DefaultKeyMap() KeyMap {
),
Close: key.NewBinding(
key.WithKeys("esc", "alt+esc"),
- key.WithHelp("esc", "cancel"),
+ key.WithHelp("esc", "exit"),
),
}
}
From 6cd1733aed022fefe5ec54416c2b01d42c797288 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 30 Sep 2025 10:55:07 -0300
Subject: [PATCH 187/236] fix(mcp): SSE MCPs not working (#1157)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/mcp-tools.go | 17 ++++++++++-------
1 file changed, 10 insertions(+), 7 deletions(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index f5125c8b89f2dda534396f3c51df3839390022ce..ebd1698f2f7bf45ecda15c9160464e3d295ce3d6 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -341,29 +341,32 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
return nil, err
}
+ // XXX: ideally we should be able to use context.WithTimeout here, but,
+ // the SSE MCP client will start failing once that context is canceled.
timeout := mcpTimeout(m)
- initCtx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- if err := c.Start(initCtx); err != nil {
+ mcpCtx, cancel := context.WithCancel(ctx)
+ cancelTimer := time.AfterFunc(timeout, cancel)
+ if err := c.Start(mcpCtx); err != nil {
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error starting mcp client", "error", err, "name", name)
_ = c.Close()
+ cancel()
return nil, err
}
- if _, err := c.Initialize(initCtx, mcpInitRequest); err != nil {
+ if _, err := c.Initialize(mcpCtx, mcpInitRequest); err != nil {
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error initializing mcp client", "error", err, "name", name)
_ = c.Close()
+ cancel()
return nil, err
}
-
+ cancelTimer.Stop()
slog.Info("Initialized mcp client", "name", name)
return c, nil
}
func maybeTimeoutErr(err error, timeout time.Duration) error {
- if errors.Is(err, context.DeadlineExceeded) {
+ if errors.Is(err, context.Canceled) {
return fmt.Errorf("timed out after %s", timeout)
}
return err
From 598e2f68ea98d11c0a87a566aa4a4e79396c1825 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 30 Sep 2025 13:32:57 -0300
Subject: [PATCH 188/236] fix(style): heartbit in --version
Signed-off-by: Carlos Alexandro Becker
---
internal/cmd/root.go | 36 ++++++++++++++++++++++++++++++++++++
1 file changed, 36 insertions(+)
diff --git a/internal/cmd/root.go b/internal/cmd/root.go
index ea9c218b67c65815b6bcc2c8b1cb17fd02390b39..825c35419f17248d1ea854a1a0ae2aca27bcaa20 100644
--- a/internal/cmd/root.go
+++ b/internal/cmd/root.go
@@ -1,6 +1,7 @@
package cmd
import (
+ "bytes"
"context"
"fmt"
"io"
@@ -10,6 +11,7 @@ import (
"strconv"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/colorprofile"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/db"
@@ -17,6 +19,8 @@ import (
"github.com/charmbracelet/crush/internal/tui"
"github.com/charmbracelet/crush/internal/version"
"github.com/charmbracelet/fang"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/exp/charmtone"
"github.com/charmbracelet/x/term"
"github.com/spf13/cobra"
)
@@ -93,7 +97,39 @@ crush -y
},
}
+var heartbit = lipgloss.NewStyle().Foreground(charmtone.Dolly).SetString(`
+ ▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄
+ ███████████ ███████████
+████████████████████████████
+████████████████████████████
+██████████▀██████▀██████████
+██████████ ██████ ██████████
+▀▀██████▄████▄▄████▄██████▀▀
+ ████████████████████████
+ ████████████████████
+ ▀▀██████████▀▀
+ ▀▀▀▀▀▀
+`)
+
+// copied from cobra:
+const defaultVersionTemplate = `{{with .DisplayName}}{{printf "%s " .}}{{end}}{{printf "version %s" .Version}}
+`
+
func Execute() {
+ // NOTE: very hacky: we create a colorprofile writer with STDOUT, then make
+ // it forward to a bytes.Buffer, write the colored heartbit to it, and then
+ // finally prepend it in the version template.
+ // Unfortunately cobra doesn't give us a way to set a function to handle
+ // printing the version, and PreRunE runs after the version is already
+ // handled, so that doesn't work either.
+ // This is the only way I could find that works relatively well.
+ if term.IsTerminal(os.Stdout.Fd()) {
+ var b bytes.Buffer
+ w := colorprofile.NewWriter(os.Stdout, os.Environ())
+ w.Forward = &b
+ _, _ = w.WriteString(heartbit.String())
+ rootCmd.SetVersionTemplate(b.String() + "\n" + defaultVersionTemplate)
+ }
if err := fang.Execute(
context.Background(),
rootCmd,
From e5e01d5f9db2e784954bbde9273a34c97f81ea4c Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 30 Sep 2025 15:36:58 -0300
Subject: [PATCH 189/236] ci: update changelog group names
Signed-off-by: Carlos Alexandro Becker
---
.goreleaser.yml | 20 ++++++++++----------
1 file changed, 10 insertions(+), 10 deletions(-)
diff --git a/.goreleaser.yml b/.goreleaser.yml
index c0da1c50aec71d899b0cffe09be64e3756e92f51..aabf2f7606462ebb540fd6ebe9efb302a6855e5f 100644
--- a/.goreleaser.yml
+++ b/.goreleaser.yml
@@ -312,19 +312,19 @@ changelog:
- "^wip "
- "^wip:"
groups:
- - title: "New Features"
- regexp: '^.*?feat(\(.+\))??!?:.+$'
+ - title: "Deps"
+ regexp: "^.*\\(deps\\)*:+.*$"
+ order: 300
+ - title: "New!"
+ regexp: "^.*feat[(\\w)]*:+.*$"
order: 100
- - title: "Security updates"
- regexp: '^.*?sec(\(.+\))??!?:.+$'
- order: 150
- - title: "Bug fixes and improvements"
- regexp: '^.*?(fix|refactor)(\(.+\))??!?:.+$'
+ - title: "Fixed"
+ regexp: "^.*fix[(\\w)]*:+.*$"
order: 200
- - title: "Documentation updates"
- regexp: ^.*?docs?(\(.+\))??!?:.+$
+ - title: "Docs"
+ regexp: "^.*docs[(\\w)]*:+.*$"
order: 400
- - title: Other work
+ - title: "Other stuff"
order: 9999
release:
From afa10c5eb9c84e8a78233446ec4c2803b1c868d8 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 30 Sep 2025 17:53:53 -0300
Subject: [PATCH 190/236] feat: crush dirs (#551)
Signed-off-by: Carlos Alexandro Becker
Co-authored-by: Christian Rocha
---
internal/cmd/dirs.go | 66 +++++++++++++++++++++++++++++++++++++++++
internal/cmd/logs.go | 1 -
internal/cmd/root.go | 9 ++++--
internal/cmd/schema.go | 4 ---
internal/config/load.go | 5 ++--
5 files changed, 76 insertions(+), 9 deletions(-)
create mode 100644 internal/cmd/dirs.go
diff --git a/internal/cmd/dirs.go b/internal/cmd/dirs.go
new file mode 100644
index 0000000000000000000000000000000000000000..d3bc0bd4fb1482e2657eedaab9ce4cee30a09373
--- /dev/null
+++ b/internal/cmd/dirs.go
@@ -0,0 +1,66 @@
+package cmd
+
+import (
+ "os"
+ "path/filepath"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/lipgloss/v2/table"
+ "github.com/charmbracelet/x/term"
+ "github.com/spf13/cobra"
+)
+
+var dirsCmd = &cobra.Command{
+ Use: "dirs",
+ Short: "Print directories used by Crush",
+ Long: `Print the directories where Crush stores its configuration and data files.
+This includes the global configuration directory and data directory.`,
+ Example: `
+# Print all directories
+crush dirs
+
+# Print only the config directory
+crush dirs config
+
+# Print only the data directory
+crush dirs data
+ `,
+ Run: func(cmd *cobra.Command, args []string) {
+ if term.IsTerminal(os.Stdout.Fd()) {
+ // We're in a TTY: make it fancy.
+ t := table.New().
+ Border(lipgloss.RoundedBorder()).
+ StyleFunc(func(row, col int) lipgloss.Style {
+ return lipgloss.NewStyle().Padding(0, 2)
+ }).
+ Row("Config", filepath.Dir(config.GlobalConfig())).
+ Row("Data", filepath.Dir(config.GlobalConfigData()))
+ lipgloss.Println(t)
+ return
+ }
+ // Not a TTY.
+ cmd.Println(filepath.Dir(config.GlobalConfig()))
+ cmd.Println(filepath.Dir(config.GlobalConfigData()))
+ },
+}
+
+var configDirCmd = &cobra.Command{
+ Use: "config",
+ Short: "Print the configuration directory used by Crush",
+ Run: func(cmd *cobra.Command, args []string) {
+ cmd.Println(filepath.Dir(config.GlobalConfig()))
+ },
+}
+
+var dataDirCmd = &cobra.Command{
+ Use: "data",
+ Short: "Print the datauration directory used by Crush",
+ Run: func(cmd *cobra.Command, args []string) {
+ cmd.Println(filepath.Dir(config.GlobalConfigData()))
+ },
+}
+
+func init() {
+ dirsCmd.AddCommand(configDirCmd, dataDirCmd)
+}
diff --git a/internal/cmd/logs.go b/internal/cmd/logs.go
index 85921c4e4354194d0d260e814fc61222c114d3ef..e7160f4a1307406be20f1fe00a59e93de5232d67 100644
--- a/internal/cmd/logs.go
+++ b/internal/cmd/logs.go
@@ -68,7 +68,6 @@ var logsCmd = &cobra.Command{
func init() {
logsCmd.Flags().BoolP("follow", "f", false, "Follow log output")
logsCmd.Flags().IntP("tail", "t", defaultTailLines, "Show only the last N lines default: 1000 for performance")
- rootCmd.AddCommand(logsCmd)
}
func followLogs(ctx context.Context, logsFile string, tailLines int) error {
diff --git a/internal/cmd/root.go b/internal/cmd/root.go
index 825c35419f17248d1ea854a1a0ae2aca27bcaa20..0a2be1cbe6e480fe5719640711bd4caffb430229 100644
--- a/internal/cmd/root.go
+++ b/internal/cmd/root.go
@@ -33,8 +33,13 @@ func init() {
rootCmd.Flags().BoolP("help", "h", false, "Help")
rootCmd.Flags().BoolP("yolo", "y", false, "Automatically accept all permissions (dangerous mode)")
- rootCmd.AddCommand(runCmd)
- rootCmd.AddCommand(updateProvidersCmd)
+ rootCmd.AddCommand(
+ runCmd,
+ dirsCmd,
+ updateProvidersCmd,
+ logsCmd,
+ schemaCmd,
+ )
}
var rootCmd = &cobra.Command{
diff --git a/internal/cmd/schema.go b/internal/cmd/schema.go
index f835e250c24ea91a9d5084c9a414ed0e1ae28474..6070eb9144dc0e46bf0f374b2cb1a860f09e83e9 100644
--- a/internal/cmd/schema.go
+++ b/internal/cmd/schema.go
@@ -24,7 +24,3 @@ var schemaCmd = &cobra.Command{
return nil
},
}
-
-func init() {
- rootCmd.AddCommand(schemaCmd)
-}
diff --git a/internal/config/load.go b/internal/config/load.go
index e39074f78bdb8df0ddc98bfbc7322541175b71d6..b36813084049a89b5e67d79d6342335cb85230e3 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -520,7 +520,7 @@ func (c *Config) configureSelectedModels(knownProviders []catwalk.Provider) erro
func lookupConfigs(cwd string) []string {
// prepend default config paths
configPaths := []string{
- globalConfig(),
+ GlobalConfig(),
GlobalConfigData(),
}
@@ -596,7 +596,8 @@ func hasAWSCredentials(env env.Env) bool {
return false
}
-func globalConfig() string {
+// GlobalConfig returns the global configuration file path for the application.
+func GlobalConfig() string {
xdgConfigHome := os.Getenv("XDG_CONFIG_HOME")
if xdgConfigHome != "" {
return filepath.Join(xdgConfigHome, appName, fmt.Sprintf("%s.json", appName))
From 8c7c0db22606db51910c2a24dc15319369ce60f6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E6=9E=97=E7=8E=AE=20=28Jade=20Lin=29?=
Date: Wed, 1 Oct 2025 20:03:17 +0800
Subject: [PATCH 192/236] feat(mcp): notifications support - tools/list_changed
(#967)
Signed-off-by: Carlos Alexandro Becker
Co-authored-by: Carlos Alexandro Becker
---
internal/csync/maps.go | 19 +++++
internal/csync/maps_test.go | 52 +++++++++++++
internal/llm/agent/agent.go | 128 +++++++++++++++++++++++---------
internal/llm/agent/mcp-tools.go | 65 ++++++++++++----
4 files changed, 217 insertions(+), 47 deletions(-)
diff --git a/internal/csync/maps.go b/internal/csync/maps.go
index b7a1f3109f6c15e7e5592cb538943a2d9e340819..1fd2005790014b2ce4bd5a78dbb7931d54cbe66c 100644
--- a/internal/csync/maps.go
+++ b/internal/csync/maps.go
@@ -27,6 +27,25 @@ func NewMapFrom[K comparable, V any](m map[K]V) *Map[K, V] {
}
}
+// NewLazyMap creates a new lazy-loaded map. The provided load function is
+// executed in a separate goroutine to populate the map.
+func NewLazyMap[K comparable, V any](load func() map[K]V) *Map[K, V] {
+ m := &Map[K, V]{}
+ m.mu.Lock()
+ go func() {
+ m.inner = load()
+ m.mu.Unlock()
+ }()
+ return m
+}
+
+// Reset replaces the inner map with the new one.
+func (m *Map[K, V]) Reset(input map[K]V) {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+ m.inner = input
+}
+
// Set sets the value for the specified key in the map.
func (m *Map[K, V]) Set(key K, value V) {
m.mu.Lock()
diff --git a/internal/csync/maps_test.go b/internal/csync/maps_test.go
index 4a8019260a2610b7f5ae0d854029207c6b945d04..4c590f008dad91e8dcbc40d1b90d87ef1b3e5750 100644
--- a/internal/csync/maps_test.go
+++ b/internal/csync/maps_test.go
@@ -5,6 +5,8 @@ import (
"maps"
"sync"
"testing"
+ "testing/synctest"
+ "time"
"github.com/stretchr/testify/require"
)
@@ -36,6 +38,56 @@ func TestNewMapFrom(t *testing.T) {
require.Equal(t, 1, value)
}
+func TestNewLazyMap(t *testing.T) {
+ t.Parallel()
+
+ synctest.Test(t, func(t *testing.T) {
+ t.Helper()
+
+ waiter := sync.Mutex{}
+ waiter.Lock()
+ loadCalled := false
+
+ loadFunc := func() map[string]int {
+ waiter.Lock()
+ defer waiter.Unlock()
+ loadCalled = true
+ return map[string]int{
+ "key1": 1,
+ "key2": 2,
+ }
+ }
+
+ m := NewLazyMap(loadFunc)
+ require.NotNil(t, m)
+
+ waiter.Unlock() // Allow the load function to proceed
+ time.Sleep(100 * time.Millisecond)
+ require.True(t, loadCalled)
+ require.Equal(t, 2, m.Len())
+
+ value, ok := m.Get("key1")
+ require.True(t, ok)
+ require.Equal(t, 1, value)
+ })
+}
+
+func TestMap_Reset(t *testing.T) {
+ t.Parallel()
+
+ m := NewMapFrom(map[string]int{
+ "a": 10,
+ })
+
+ m.Reset(map[string]int{
+ "b": 20,
+ })
+ value, ok := m.Get("b")
+ require.True(t, ok)
+ require.Equal(t, 20, value)
+ require.Equal(t, 1, m.Len())
+}
+
func TestMap_Set(t *testing.T) {
t.Parallel()
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 9bae6e5b8092b987b1c8146460cef946e595beb5..1efc3fc268392c06481d61ae6e11c9d67cdc13e8 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"log/slog"
+ "maps"
"slices"
"strings"
"time"
@@ -65,11 +66,13 @@ type agent struct {
sessions session.Service
messages message.Service
permissions permission.Service
- mcpTools []McpTool
+ baseTools *csync.Map[string, tools.BaseTool]
+ mcpTools *csync.Map[string, tools.BaseTool]
+ lspClients *csync.Map[string, *lsp.Client]
- tools *csync.LazySlice[tools.BaseTool]
// We need this to be able to update it when model changes
- agentToolFn func() (tools.BaseTool, error)
+ agentToolFn func() (tools.BaseTool, error)
+ cleanupFuncs []func()
provider provider.Provider
providerID string
@@ -171,14 +174,16 @@ func NewAgent(
return nil, err
}
- toolFn := func() []tools.BaseTool {
- slog.Info("Initializing agent tools", "agent", agentCfg.ID)
+ baseToolsFn := func() map[string]tools.BaseTool {
+ slog.Info("Initializing agent base tools", "agent", agentCfg.ID)
defer func() {
- slog.Info("Initialized agent tools", "agent", agentCfg.ID)
+ slog.Info("Initialized agent base tools", "agent", agentCfg.ID)
}()
+ // Base tools available to all agents
cwd := cfg.WorkingDir()
- allTools := []tools.BaseTool{
+ result := make(map[string]tools.BaseTool)
+ for _, tool := range []tools.BaseTool{
tools.NewBashTool(permissions, cwd, cfg.Options.Attribution),
tools.NewDownloadTool(permissions, cwd),
tools.NewEditTool(lspClients, permissions, history, cwd),
@@ -190,36 +195,25 @@ func NewAgent(
tools.NewSourcegraphTool(),
tools.NewViewTool(lspClients, permissions, cwd),
tools.NewWriteTool(lspClients, permissions, history, cwd),
+ } {
+ result[tool.Name()] = tool
}
+ return result
+ }
+ mcpToolsFn := func() map[string]tools.BaseTool {
+ slog.Info("Initializing agent mcp tools", "agent", agentCfg.ID)
+ defer func() {
+ slog.Info("Initialized agent mcp tools", "agent", agentCfg.ID)
+ }()
mcpToolsOnce.Do(func() {
- mcpTools = doGetMCPTools(ctx, permissions, cfg)
+ doGetMCPTools(ctx, permissions, cfg)
})
- withCoderTools := func(t []tools.BaseTool) []tools.BaseTool {
- if agentCfg.ID == "coder" {
- t = append(t, mcpTools...)
- if lspClients.Len() > 0 {
- t = append(t, tools.NewDiagnosticsTool(lspClients))
- }
- }
- return t
- }
-
- if agentCfg.AllowedTools == nil {
- return withCoderTools(allTools)
- }
-
- var filteredTools []tools.BaseTool
- for _, tool := range allTools {
- if slices.Contains(agentCfg.AllowedTools, tool.Name()) {
- filteredTools = append(filteredTools, tool)
- }
- }
- return withCoderTools(filteredTools)
+ return maps.Collect(mcpTools.Seq2())
}
- return &agent{
+ a := &agent{
Broker: pubsub.NewBroker[AgentEvent](),
agentCfg: agentCfg,
provider: agentProvider,
@@ -231,10 +225,14 @@ func NewAgent(
summarizeProviderID: string(providerCfg.ID),
agentToolFn: agentToolFn,
activeRequests: csync.NewMap[string, context.CancelFunc](),
- tools: csync.NewLazySlice(toolFn),
+ mcpTools: csync.NewLazyMap(mcpToolsFn),
+ baseTools: csync.NewLazyMap(baseToolsFn),
promptQueue: csync.NewMap[string, []string](),
permissions: permissions,
- }, nil
+ lspClients: lspClients,
+ }
+ a.setupEvents(ctx)
+ return a, nil
}
func (a *agent) Model() catwalk.Model {
@@ -517,7 +515,18 @@ func (a *agent) createUserMessage(ctx context.Context, sessionID, content string
}
func (a *agent) getAllTools() ([]tools.BaseTool, error) {
- allTools := slices.Collect(a.tools.Seq())
+ var allTools []tools.BaseTool
+ for tool := range a.baseTools.Seq() {
+ if a.agentCfg.AllowedTools == nil || slices.Contains(a.agentCfg.AllowedTools, tool.Name()) {
+ allTools = append(allTools, tool)
+ }
+ }
+ if a.agentCfg.ID == "coder" {
+ allTools = slices.AppendSeq(allTools, a.mcpTools.Seq())
+ if a.lspClients.Len() > 0 {
+ allTools = append(allTools, tools.NewDiagnosticsTool(a.lspClients))
+ }
+ }
if a.agentToolFn != nil {
agentTool, agentToolErr := a.agentToolFn()
if agentToolErr != nil {
@@ -591,7 +600,7 @@ loop:
default:
// Continue processing
var tool tools.BaseTool
- allTools, _ := a.getAllTools()
+ allTools, _ = a.getAllTools()
for _, availableTool := range allTools {
if availableTool.Info().Name == toolCall.Name {
tool = availableTool
@@ -960,6 +969,12 @@ func (a *agent) CancelAll() {
a.Cancel(key) // key is sessionID
}
+ for _, cleanup := range a.cleanupFuncs {
+ if cleanup != nil {
+ cleanup()
+ }
+ }
+
timeout := time.After(5 * time.Second)
for a.IsBusy() {
select {
@@ -1071,3 +1086,48 @@ func (a *agent) UpdateModel() error {
return nil
}
+
+func (a *agent) setupEvents(ctx context.Context) {
+ ctx, cancel := context.WithCancel(ctx)
+
+ go func() {
+ subCh := SubscribeMCPEvents(ctx)
+
+ for {
+ select {
+ case event, ok := <-subCh:
+ if !ok {
+ slog.Debug("MCPEvents subscription channel closed")
+ return
+ }
+ switch event.Payload.Type {
+ case MCPEventToolsListChanged:
+ name := event.Payload.Name
+ c, ok := mcpClients.Get(name)
+ if !ok {
+ slog.Warn("MCP client not found for tools update", "name", name)
+ continue
+ }
+ cfg := config.Get()
+ tools, err := getTools(ctx, name, a.permissions, c, cfg.WorkingDir())
+ if err != nil {
+ slog.Error("error listing tools", "error", err)
+ updateMCPState(name, MCPStateError, err, nil, 0)
+ _ = c.Close()
+ continue
+ }
+ updateMcpTools(name, tools)
+ a.mcpTools.Reset(maps.Collect(mcpTools.Seq2()))
+ updateMCPState(name, MCPStateConnected, nil, c, a.mcpTools.Len())
+ default:
+ continue
+ }
+ case <-ctx.Done():
+ slog.Debug("MCPEvents subscription cancelled")
+ return
+ }
+ }
+ }()
+
+ a.cleanupFuncs = append(a.cleanupFuncs, cancel)
+}
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index ebd1698f2f7bf45ecda15c9160464e3d295ce3d6..181f32b7280faf3eb36040d2ebecf3f892350f53 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -8,7 +8,6 @@ import (
"fmt"
"log/slog"
"maps"
- "slices"
"strings"
"sync"
"time"
@@ -54,7 +53,8 @@ func (s MCPState) String() string {
type MCPEventType string
const (
- MCPEventStateChanged MCPEventType = "state_changed"
+ MCPEventStateChanged MCPEventType = "state_changed"
+ MCPEventToolsListChanged MCPEventType = "tools_list_changed"
)
// MCPEvent represents an event in the MCP system
@@ -77,11 +77,12 @@ type MCPClientInfo struct {
}
var (
- mcpToolsOnce sync.Once
- mcpTools []tools.BaseTool
- mcpClients = csync.NewMap[string, *client.Client]()
- mcpStates = csync.NewMap[string, MCPClientInfo]()
- mcpBroker = pubsub.NewBroker[MCPEvent]()
+ mcpToolsOnce sync.Once
+ mcpTools = csync.NewMap[string, tools.BaseTool]()
+ mcpClient2Tools = csync.NewMap[string, []tools.BaseTool]()
+ mcpClients = csync.NewMap[string, *client.Client]()
+ mcpStates = csync.NewMap[string, MCPClientInfo]()
+ mcpBroker = pubsub.NewBroker[MCPEvent]()
)
type McpTool struct {
@@ -237,8 +238,12 @@ func updateMCPState(name string, state MCPState, err error, client *client.Clien
Client: client,
ToolCount: toolCount,
}
- if state == MCPStateConnected {
+ switch state {
+ case MCPStateConnected:
info.ConnectedAt = time.Now()
+ case MCPStateError:
+ updateMcpTools(name, nil)
+ mcpClients.Del(name)
}
mcpStates.Set(name, info)
@@ -252,6 +257,14 @@ func updateMCPState(name string, state MCPState, err error, client *client.Clien
})
}
+// publishMCPEventToolsListChanged publishes a tool list changed event
+func publishMCPEventToolsListChanged(name string) {
+ mcpBroker.Publish(pubsub.UpdatedEvent, MCPEvent{
+ Type: MCPEventToolsListChanged,
+ Name: name,
+ })
+}
+
// CloseMCPClients closes all MCP clients. This should be called during application shutdown.
func CloseMCPClients() error {
var errs []error
@@ -274,10 +287,8 @@ var mcpInitRequest = mcp.InitializeRequest{
},
}
-func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) []tools.BaseTool {
+func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) {
var wg sync.WaitGroup
- result := csync.NewSlice[tools.BaseTool]()
-
// Initialize states for all configured MCPs
for name, m := range cfg.MCP {
if m.Disabled {
@@ -316,6 +327,8 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
return
}
+ mcpClients.Set(name, c)
+
tools, err := getTools(ctx, name, permissions, c, cfg.WorkingDir())
if err != nil {
slog.Error("error listing tools", "error", err)
@@ -324,13 +337,26 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
return
}
+ updateMcpTools(name, tools)
mcpClients.Set(name, c)
updateMCPState(name, MCPStateConnected, nil, c, len(tools))
- result.Append(tools...)
}(name, m)
}
wg.Wait()
- return slices.Collect(result.Seq())
+}
+
+// updateMcpTools updates the global mcpTools and mcpClient2Tools maps
+func updateMcpTools(mcpName string, tools []tools.BaseTool) {
+ if len(tools) == 0 {
+ mcpClient2Tools.Del(mcpName)
+ } else {
+ mcpClient2Tools.Set(mcpName, tools)
+ }
+ for _, tools := range mcpClient2Tools.Seq2() {
+ for _, t := range tools {
+ mcpTools.Set(t.Name(), t)
+ }
+ }
}
func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
@@ -341,11 +367,22 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
return nil, err
}
+ c.OnNotification(func(n mcp.JSONRPCNotification) {
+ slog.Debug("Received MCP notification", "name", name, "notification", n)
+ switch n.Method {
+ case "notifications/tools/list_changed":
+ publishMCPEventToolsListChanged(name)
+ default:
+ slog.Debug("Unhandled MCP notification", "name", name, "method", n.Method)
+ }
+ })
+
// XXX: ideally we should be able to use context.WithTimeout here, but,
// the SSE MCP client will start failing once that context is canceled.
timeout := mcpTimeout(m)
mcpCtx, cancel := context.WithCancel(ctx)
cancelTimer := time.AfterFunc(timeout, cancel)
+
if err := c.Start(mcpCtx); err != nil {
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error starting mcp client", "error", err, "name", name)
@@ -353,6 +390,7 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
cancel()
return nil, err
}
+
if _, err := c.Initialize(mcpCtx, mcpInitRequest); err != nil {
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error initializing mcp client", "error", err, "name", name)
@@ -360,6 +398,7 @@ func createAndInitializeClient(ctx context.Context, name string, m config.MCPCon
cancel()
return nil, err
}
+
cancelTimer.Stop()
slog.Info("Initialized mcp client", "name", name)
return c, nil
From d102102ce498e69b4d22d0d5fbca372a941b42ec Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Tue, 30 Sep 2025 17:04:59 -0400
Subject: [PATCH 193/236] chore(task): annotate tags during release
---
Taskfile.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 54b50a68217b6ff66ddf1de9a28a8f45d224fefc..0739cab998f34c7b4129b0765b9b225a2455f8ae 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -90,7 +90,7 @@ tasks:
vars:
NEXT:
sh: go run github.com/caarlos0/svu/v3@latest next --always
- prompt: "This will release {{.NEXT}}. Continue?"
+ prompt: "This will release {{.NEXT}}. Please make sure you've fetch tags. Continue?"
preconditions:
- sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
msg: Not on main branch
@@ -99,6 +99,6 @@ tasks:
cmds:
- git commit --allow-empty -m "{{.NEXT}}"
- git tag -d nightly
- - git tag --sign {{.NEXT}} {{.CLI_ARGS}}
+ - git tag --annotate --sign {{.NEXT}} {{.CLI_ARGS}}
- echo "Pushing {{.NEXT}}..."
- git push origin --tags
From c96abaed6b4115352e490129006dcfd165b49cba Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 1 Oct 2025 11:16:10 -0300
Subject: [PATCH 194/236] feat: limit filepath walk, automatic low limits when
not git repo (#1052)
Signed-off-by: Carlos Alexandro Becker
---
internal/config/config.go | 35 ++++++++
internal/config/load.go | 27 ++++++
internal/fsext/fileutil.go | 65 ++++----------
internal/fsext/fileutil_test.go | 90 +++++++++----------
internal/fsext/ignore_test.go | 8 +-
internal/fsext/lookup_test.go | 64 ++-----------
internal/fsext/ls.go | 33 ++++---
internal/fsext/ls_test.go | 73 +++++++--------
internal/llm/prompt/coder.go | 2 +-
internal/llm/tools/ls.go | 61 +++++++------
internal/tui/components/chat/editor/editor.go | 4 +-
schema.json | 72 ++++++++++++++-
12 files changed, 296 insertions(+), 238 deletions(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index fc5d62ef1c361c4e4aae29a2683ed92c8e76fd9d..858fa1c47b33f6a5e6bafb81b4799ea5739736f9 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -131,6 +131,19 @@ type TUIOptions struct {
CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
DiffMode string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
// Here we can add themes later or any TUI related options
+ //
+
+ Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
+}
+
+// Completions defines options for the completions UI.
+type Completions struct {
+ MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
+ MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
+}
+
+func (c Completions) Limits() (depth, items int) {
+ return ptrValOr(c.MaxDepth, -1), ptrValOr(c.MaxItems, -1)
}
type Permissions struct {
@@ -246,6 +259,19 @@ type Agent struct {
ContextPaths []string `json:"context_paths,omitempty"`
}
+type Tools struct {
+ Ls ToolLs `json:"ls,omitzero"`
+}
+
+type ToolLs struct {
+ MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
+ MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
+}
+
+func (t ToolLs) Limits() (depth, items int) {
+ return ptrValOr(t.MaxDepth, -1), ptrValOr(t.MaxItems, -1)
+}
+
// Config holds the configuration for crush.
type Config struct {
Schema string `json:"$schema,omitempty"`
@@ -264,6 +290,8 @@ type Config struct {
Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
+ Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
+
// Internal
workingDir string `json:"-"`
// TODO: most likely remove this concept when I come back to it
@@ -579,3 +607,10 @@ func resolveEnvs(envs map[string]string) []string {
}
return res
}
+
+func ptrValOr[T any](t *T, el T) T {
+ if t == nil {
+ return el
+ }
+ return *t
+}
diff --git a/internal/config/load.go b/internal/config/load.go
index b36813084049a89b5e67d79d6342335cb85230e3..9fb45028d6936a652f2657f51707b6cde73f4084 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -1,12 +1,14 @@
package config
import (
+ "context"
"encoding/json"
"fmt"
"io"
"log/slog"
"maps"
"os"
+ "os/exec"
"path/filepath"
"runtime"
"slices"
@@ -62,6 +64,16 @@ func Load(workingDir, dataDir string, debug bool) (*Config, error) {
cfg.Options.Debug,
)
+ if !isInsideWorktree() {
+ const depth = 2
+ const items = 100
+ slog.Warn("No git repository detected in working directory, will limit file walk operations", "depth", depth, "items", items)
+ assignIfNil(&cfg.Tools.Ls.MaxDepth, depth)
+ assignIfNil(&cfg.Tools.Ls.MaxItems, items)
+ assignIfNil(&cfg.Options.TUI.Completions.MaxDepth, depth)
+ assignIfNil(&cfg.Options.TUI.Completions.MaxItems, items)
+ }
+
// Load known providers, this loads the config from catwalk
providers, err := Providers(cfg)
if err != nil {
@@ -638,3 +650,18 @@ func GlobalConfigData() string {
return filepath.Join(home.Dir(), ".local", "share", appName, fmt.Sprintf("%s.json", appName))
}
+
+func assignIfNil[T any](ptr **T, val T) {
+ if *ptr == nil {
+ *ptr = &val
+ }
+}
+
+func isInsideWorktree() bool {
+ bts, err := exec.CommandContext(
+ context.Background(),
+ "git", "rev-parse",
+ "--is-inside-work-tree",
+ ).CombinedOutput()
+ return err == nil && strings.TrimSpace(string(bts)) == "true"
+}
diff --git a/internal/fsext/fileutil.go b/internal/fsext/fileutil.go
index 30c552324452cbce4436701506419916c014d7f9..182b145a609311d20544d399c1212097c7519dda 100644
--- a/internal/fsext/fileutil.go
+++ b/internal/fsext/fileutil.go
@@ -1,15 +1,17 @@
package fsext
import (
+ "errors"
"fmt"
"os"
"path/filepath"
- "sort"
+ "slices"
"strings"
"time"
"github.com/bmatcuk/doublestar/v4"
"github.com/charlievieth/fastwalk"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/home"
)
@@ -80,10 +82,9 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
pattern = filepath.ToSlash(pattern)
walker := NewFastGlobWalker(searchPath)
- var matches []FileInfo
+ found := csync.NewSlice[FileInfo]()
conf := fastwalk.Config{
- Follow: true,
- // Use forward slashes when running a Windows binary under WSL or MSYS
+ Follow: true,
ToSlash: fastwalk.DefaultToSlash(),
Sort: fastwalk.SortFilesFirst,
}
@@ -121,31 +122,26 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
return nil
}
- matches = append(matches, FileInfo{Path: path, ModTime: info.ModTime()})
- if limit > 0 && len(matches) >= limit*2 {
+ found.Append(FileInfo{Path: path, ModTime: info.ModTime()})
+ if limit > 0 && found.Len() >= limit*2 { // NOTE: why x2?
return filepath.SkipAll
}
return nil
})
- if err != nil {
+ if err != nil && !errors.Is(err, filepath.SkipAll) {
return nil, false, fmt.Errorf("fastwalk error: %w", err)
}
- sort.Slice(matches, func(i, j int) bool {
- return matches[i].ModTime.After(matches[j].ModTime)
+ matches := slices.SortedFunc(found.Seq(), func(a, b FileInfo) int {
+ return b.ModTime.Compare(a.ModTime)
})
-
- truncated := false
- if limit > 0 && len(matches) > limit {
- matches = matches[:limit]
- truncated = true
- }
+ matches, truncated := truncate(matches, limit)
results := make([]string, len(matches))
for i, m := range matches {
results[i] = m.Path
}
- return results, truncated, nil
+ return results, truncated || errors.Is(err, filepath.SkipAll), nil
}
// ShouldExcludeFile checks if a file should be excluded from processing
@@ -155,36 +151,6 @@ func ShouldExcludeFile(rootPath, filePath string) bool {
shouldIgnore(filePath, nil)
}
-// WalkDirectories walks a directory tree and calls the provided function for each directory,
-// respecting hierarchical .gitignore/.crushignore files like git does.
-func WalkDirectories(rootPath string, fn func(path string, d os.DirEntry, err error) error) error {
- dl := NewDirectoryLister(rootPath)
-
- conf := fastwalk.Config{
- Follow: true,
- ToSlash: fastwalk.DefaultToSlash(),
- Sort: fastwalk.SortDirsFirst,
- }
-
- return fastwalk.Walk(&conf, rootPath, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return fn(path, d, err)
- }
-
- // Only process directories
- if !d.IsDir() {
- return nil
- }
-
- // Check if directory should be ignored
- if dl.shouldIgnore(path, nil) {
- return filepath.SkipDir
- }
-
- return fn(path, d, err)
- })
-}
-
func PrettyPath(path string) string {
return home.Short(path)
}
@@ -248,3 +214,10 @@ func ToWindowsLineEndings(content string) (string, bool) {
}
return content, false
}
+
+func truncate[T any](input []T, limit int) ([]T, bool) {
+ if limit > 0 && len(input) > limit {
+ return input[:limit], true
+ }
+ return input, false
+}
diff --git a/internal/fsext/fileutil_test.go b/internal/fsext/fileutil_test.go
index 1779bfb9312f7834748badaf72a47563878f21da..3788fe5477b082dec496275a8ac028788d55fc64 100644
--- a/internal/fsext/fileutil_test.go
+++ b/internal/fsext/fileutil_test.go
@@ -5,7 +5,6 @@ import (
"os"
"path/filepath"
"testing"
- "testing/synctest"
"time"
"github.com/stretchr/testify/require"
@@ -148,37 +147,35 @@ func TestGlobWithDoubleStar(t *testing.T) {
require.NoError(t, err)
require.False(t, truncated)
- require.Equal(t, matches, []string{file1})
+ require.Equal(t, []string{file1}, matches)
})
t.Run("returns results sorted by modification time (newest first)", func(t *testing.T) {
- synctest.Test(t, func(t *testing.T) {
- testDir := t.TempDir()
+ testDir := t.TempDir()
- file1 := filepath.Join(testDir, "file1.txt")
- require.NoError(t, os.WriteFile(file1, []byte("first"), 0o644))
+ file1 := filepath.Join(testDir, "file1.txt")
+ require.NoError(t, os.WriteFile(file1, []byte("first"), 0o644))
- file2 := filepath.Join(testDir, "file2.txt")
- require.NoError(t, os.WriteFile(file2, []byte("second"), 0o644))
+ file2 := filepath.Join(testDir, "file2.txt")
+ require.NoError(t, os.WriteFile(file2, []byte("second"), 0o644))
- file3 := filepath.Join(testDir, "file3.txt")
- require.NoError(t, os.WriteFile(file3, []byte("third"), 0o644))
+ file3 := filepath.Join(testDir, "file3.txt")
+ require.NoError(t, os.WriteFile(file3, []byte("third"), 0o644))
- base := time.Now()
- m1 := base
- m2 := base.Add(1 * time.Millisecond)
- m3 := base.Add(2 * time.Millisecond)
+ base := time.Now()
+ m1 := base
+ m2 := base.Add(10 * time.Hour)
+ m3 := base.Add(20 * time.Hour)
- require.NoError(t, os.Chtimes(file1, m1, m1))
- require.NoError(t, os.Chtimes(file2, m2, m2))
- require.NoError(t, os.Chtimes(file3, m3, m3))
+ require.NoError(t, os.Chtimes(file1, m1, m1))
+ require.NoError(t, os.Chtimes(file2, m2, m2))
+ require.NoError(t, os.Chtimes(file3, m3, m3))
- matches, truncated, err := GlobWithDoubleStar("*.txt", testDir, 0)
- require.NoError(t, err)
- require.False(t, truncated)
+ matches, truncated, err := GlobWithDoubleStar("*.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
- require.Equal(t, matches, []string{file3, file2, file1})
- })
+ require.Equal(t, []string{file3, file2, file1}, matches)
})
t.Run("handles empty directory", func(t *testing.T) {
@@ -188,7 +185,7 @@ func TestGlobWithDoubleStar(t *testing.T) {
require.NoError(t, err)
require.False(t, truncated)
// Even empty directories should return the directory itself
- require.Equal(t, matches, []string{testDir})
+ require.Equal(t, []string{testDir}, matches)
})
t.Run("handles non-existent search path", func(t *testing.T) {
@@ -235,39 +232,38 @@ func TestGlobWithDoubleStar(t *testing.T) {
matches, truncated, err = GlobWithDoubleStar("*.txt", testDir, 0)
require.NoError(t, err)
require.False(t, truncated)
- require.Equal(t, matches, []string{goodFile})
+ require.Equal(t, []string{goodFile}, matches)
})
t.Run("handles mixed file and directory matching with sorting", func(t *testing.T) {
- synctest.Test(t, func(t *testing.T) {
- testDir := t.TempDir()
+ testDir := t.TempDir()
- oldestFile := filepath.Join(testDir, "old.test")
- require.NoError(t, os.WriteFile(oldestFile, []byte("old"), 0o644))
+ oldestFile := filepath.Join(testDir, "old.rs")
+ require.NoError(t, os.WriteFile(oldestFile, []byte("old"), 0o644))
- middleDir := filepath.Join(testDir, "mid.test")
- require.NoError(t, os.MkdirAll(middleDir, 0o755))
+ middleDir := filepath.Join(testDir, "mid.rs")
+ require.NoError(t, os.MkdirAll(middleDir, 0o755))
- newestFile := filepath.Join(testDir, "new.test")
- require.NoError(t, os.WriteFile(newestFile, []byte("new"), 0o644))
+ newestFile := filepath.Join(testDir, "new.rs")
+ require.NoError(t, os.WriteFile(newestFile, []byte("new"), 0o644))
- base := time.Now()
- tOldest := base
- tMiddle := base.Add(1 * time.Millisecond)
- tNewest := base.Add(2 * time.Millisecond)
+ base := time.Now()
+ tOldest := base
+ tMiddle := base.Add(10 * time.Hour)
+ tNewest := base.Add(20 * time.Hour)
- // Reverse the expected order
- require.NoError(t, os.Chtimes(newestFile, tOldest, tOldest))
- require.NoError(t, os.Chtimes(middleDir, tMiddle, tMiddle))
- require.NoError(t, os.Chtimes(oldestFile, tNewest, tNewest))
+ // Reverse the expected order
+ require.NoError(t, os.Chtimes(newestFile, tOldest, tOldest))
+ require.NoError(t, os.Chtimes(middleDir, tMiddle, tMiddle))
+ require.NoError(t, os.Chtimes(oldestFile, tNewest, tNewest))
- matches, truncated, err := GlobWithDoubleStar("*.test", testDir, 0)
- require.NoError(t, err)
- require.False(t, truncated)
+ matches, truncated, err := GlobWithDoubleStar("*.rs", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Len(t, matches, 3)
- // Results should be sorted by mod time, but we set the oldestFile
- // to have the most recent mod time
- require.Equal(t, matches, []string{oldestFile, middleDir, newestFile})
- })
+ // Results should be sorted by mod time, but we set the oldestFile
+ // to have the most recent mod time
+ require.Equal(t, []string{oldestFile, middleDir, newestFile}, matches)
})
}
diff --git a/internal/fsext/ignore_test.go b/internal/fsext/ignore_test.go
index 1b517ec0408fe69726bf4fa4bbb95c2a206e548c..a652f3a285fd256840fb3a711fb36e0217a43e28 100644
--- a/internal/fsext/ignore_test.go
+++ b/internal/fsext/ignore_test.go
@@ -9,14 +9,8 @@ import (
)
func TestCrushIgnore(t *testing.T) {
- // Create a temporary directory for testing
tempDir := t.TempDir()
-
- // Change to temp directory
- oldWd, _ := os.Getwd()
- err := os.Chdir(tempDir)
- require.NoError(t, err)
- defer os.Chdir(oldWd)
+ t.Chdir(tempDir)
// Create test files
require.NoError(t, os.WriteFile("test1.txt", []byte("test"), 0o644))
diff --git a/internal/fsext/lookup_test.go b/internal/fsext/lookup_test.go
index b7604331673aad0d65d34e046901bc9eae722195..97c167f37d8ebcf4d19124367955874e7f816b67 100644
--- a/internal/fsext/lookup_test.go
+++ b/internal/fsext/lookup_test.go
@@ -12,15 +12,7 @@ import (
func TestLookupClosest(t *testing.T) {
tempDir := t.TempDir()
-
- // Change to temp directory
- oldWd, _ := os.Getwd()
- err := os.Chdir(tempDir)
- require.NoError(t, err)
-
- t.Cleanup(func() {
- os.Chdir(oldWd)
- })
+ t.Chdir(tempDir)
t.Run("target found in starting directory", func(t *testing.T) {
testDir := t.TempDir()
@@ -114,24 +106,15 @@ func TestLookupClosest(t *testing.T) {
})
t.Run("relative path handling", func(t *testing.T) {
- testDir := t.TempDir()
-
- // Change to test directory
- oldWd, _ := os.Getwd()
- err := os.Chdir(testDir)
- require.NoError(t, err)
- defer os.Chdir(oldWd)
-
// Create target file in current directory
- err = os.WriteFile("target.txt", []byte("test"), 0o644)
- require.NoError(t, err)
+ require.NoError(t, os.WriteFile("target.txt", []byte("test"), 0o644))
// Search using relative path
foundPath, found := LookupClosest(".", "target.txt")
require.True(t, found)
// Resolve symlinks to handle macOS /private/var vs /var discrepancy
- expectedPath, err := filepath.EvalSymlinks(filepath.Join(testDir, "target.txt"))
+ expectedPath, err := filepath.EvalSymlinks(filepath.Join(tempDir, "target.txt"))
require.NoError(t, err)
actualPath, err := filepath.EvalSymlinks(foundPath)
require.NoError(t, err)
@@ -145,15 +128,7 @@ func TestLookupClosestWithOwnership(t *testing.T) {
// This test focuses on the basic functionality when ownership checks pass.
tempDir := t.TempDir()
-
- // Change to temp directory
- oldWd, _ := os.Getwd()
- err := os.Chdir(tempDir)
- require.NoError(t, err)
-
- t.Cleanup(func() {
- os.Chdir(oldWd)
- })
+ t.Chdir(tempDir)
t.Run("search respects same ownership", func(t *testing.T) {
testDir := t.TempDir()
@@ -177,15 +152,7 @@ func TestLookupClosestWithOwnership(t *testing.T) {
func TestLookup(t *testing.T) {
tempDir := t.TempDir()
-
- // Change to temp directory
- oldWd, _ := os.Getwd()
- err := os.Chdir(tempDir)
- require.NoError(t, err)
-
- t.Cleanup(func() {
- os.Chdir(oldWd)
- })
+ t.Chdir(tempDir)
t.Run("no targets returns empty slice", func(t *testing.T) {
testDir := t.TempDir()
@@ -358,22 +325,9 @@ func TestLookup(t *testing.T) {
})
t.Run("relative path handling", func(t *testing.T) {
- testDir := t.TempDir()
-
- // Change to test directory
- oldWd, _ := os.Getwd()
- err := os.Chdir(testDir)
- require.NoError(t, err)
-
- t.Cleanup(func() {
- os.Chdir(oldWd)
- })
-
// Create target files in current directory
- err = os.WriteFile("target1.txt", []byte("test1"), 0o644)
- require.NoError(t, err)
- err = os.WriteFile("target2.txt", []byte("test2"), 0o644)
- require.NoError(t, err)
+ require.NoError(t, os.WriteFile("target1.txt", []byte("test1"), 0o644))
+ require.NoError(t, os.WriteFile("target2.txt", []byte("test2"), 0o644))
// Search using relative path
found, err := Lookup(".", "target1.txt", "target2.txt")
@@ -381,9 +335,9 @@ func TestLookup(t *testing.T) {
require.Len(t, found, 2)
// Resolve symlinks to handle macOS /private/var vs /var discrepancy
- expectedPath1, err := filepath.EvalSymlinks(filepath.Join(testDir, "target1.txt"))
+ expectedPath1, err := filepath.EvalSymlinks(filepath.Join(tempDir, "target1.txt"))
require.NoError(t, err)
- expectedPath2, err := filepath.EvalSymlinks(filepath.Join(testDir, "target2.txt"))
+ expectedPath2, err := filepath.EvalSymlinks(filepath.Join(tempDir, "target2.txt"))
require.NoError(t, err)
// Check that found paths match expected paths (order may vary)
diff --git a/internal/fsext/ls.go b/internal/fsext/ls.go
index 2027f734c4156572b134c012b2e3c143c364bd29..80d25a57f19867a4ca2af44df7e691bb9d109496 100644
--- a/internal/fsext/ls.go
+++ b/internal/fsext/ls.go
@@ -1,6 +1,7 @@
package fsext
import (
+ "errors"
"log/slog"
"os"
"path/filepath"
@@ -71,6 +72,11 @@ var commonIgnorePatterns = sync.OnceValue(func() ignore.IgnoreParser {
// Crush
".crush",
+
+ // macOS stuff
+ "OrbStack",
+ ".local",
+ ".share",
)
})
@@ -200,16 +206,17 @@ func (dl *directoryLister) getIgnore(path string) ignore.IgnoreParser {
}
// ListDirectory lists files and directories in the specified path,
-func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]string, bool, error) {
- results := csync.NewSlice[string]()
- truncated := false
+func ListDirectory(initialPath string, ignorePatterns []string, depth, limit int) ([]string, bool, error) {
+ found := csync.NewSlice[string]()
dl := NewDirectoryLister(initialPath)
+ slog.Warn("listing directory", "path", initialPath, "depth", depth, "limit", limit, "ignorePatterns", ignorePatterns)
+
conf := fastwalk.Config{
- Follow: true,
- // Use forward slashes when running a Windows binary under WSL or MSYS
- ToSlash: fastwalk.DefaultToSlash(),
- Sort: fastwalk.SortDirsFirst,
+ Follow: true,
+ ToSlash: fastwalk.DefaultToSlash(),
+ Sort: fastwalk.SortDirsFirst,
+ MaxDepth: depth,
}
err := fastwalk.Walk(&conf, initialPath, func(path string, d os.DirEntry, err error) error {
@@ -228,19 +235,19 @@ func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]st
if d.IsDir() {
path = path + string(filepath.Separator)
}
- results.Append(path)
+ found.Append(path)
}
- if limit > 0 && results.Len() >= limit {
- truncated = true
+ if limit > 0 && found.Len() >= limit {
return filepath.SkipAll
}
return nil
})
- if err != nil && results.Len() == 0 {
- return nil, truncated, err
+ if err != nil && !errors.Is(err, filepath.SkipAll) {
+ return nil, false, err
}
- return slices.Collect(results.Seq()), truncated, nil
+ matches, truncated := truncate(slices.Collect(found.Seq()), limit)
+ return matches, truncated || errors.Is(err, filepath.SkipAll), nil
}
diff --git a/internal/fsext/ls_test.go b/internal/fsext/ls_test.go
index a74ca3276c9af0edac6adbe1bd6e367d952af492..7bdad17fc46955d49fa08f7488d6efe8239294cb 100644
--- a/internal/fsext/ls_test.go
+++ b/internal/fsext/ls_test.go
@@ -5,26 +5,11 @@ import (
"path/filepath"
"testing"
- "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
-func chdir(t *testing.T, dir string) {
- original, err := os.Getwd()
- require.NoError(t, err)
-
- err = os.Chdir(dir)
- require.NoError(t, err)
-
- t.Cleanup(func() {
- err := os.Chdir(original)
- require.NoError(t, err)
- })
-}
-
func TestListDirectory(t *testing.T) {
- tempDir := t.TempDir()
- chdir(t, tempDir)
+ tmp := t.TempDir()
testFiles := map[string]string{
"regular.txt": "content",
@@ -35,32 +20,40 @@ func TestListDirectory(t *testing.T) {
"build.log": "build output",
}
- for filePath, content := range testFiles {
- dir := filepath.Dir(filePath)
- if dir != "." {
- require.NoError(t, os.MkdirAll(dir, 0o755))
- }
-
- err := os.WriteFile(filePath, []byte(content), 0o644)
- require.NoError(t, err)
+ for name, content := range testFiles {
+ fp := filepath.Join(tmp, name)
+ dir := filepath.Dir(fp)
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ require.NoError(t, os.WriteFile(fp, []byte(content), 0o644))
}
- files, truncated, err := ListDirectory(".", nil, 0)
- require.NoError(t, err)
- assert.False(t, truncated)
- assert.Equal(t, len(files), 4)
+ t.Run("no limit", func(t *testing.T) {
+ files, truncated, err := ListDirectory(tmp, nil, -1, -1)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Len(t, files, 4)
+ require.ElementsMatch(t, []string{
+ "regular.txt",
+ "subdir",
+ "subdir/.another",
+ "subdir/file.go",
+ }, relPaths(t, files, tmp))
+ })
+ t.Run("limit", func(t *testing.T) {
+ files, truncated, err := ListDirectory(tmp, nil, -1, 2)
+ require.NoError(t, err)
+ require.True(t, truncated)
+ require.Len(t, files, 2)
+ })
+}
- fileSet := make(map[string]bool)
- for _, file := range files {
- fileSet[filepath.ToSlash(file)] = true
+func relPaths(tb testing.TB, in []string, base string) []string {
+ tb.Helper()
+ out := make([]string, 0, len(in))
+ for _, p := range in {
+ rel, err := filepath.Rel(base, p)
+ require.NoError(tb, err)
+ out = append(out, filepath.ToSlash(rel))
}
-
- assert.True(t, fileSet["./regular.txt"])
- assert.True(t, fileSet["./subdir/"])
- assert.True(t, fileSet["./subdir/file.go"])
- assert.True(t, fileSet["./regular.txt"])
-
- assert.False(t, fileSet["./.hidden"])
- assert.False(t, fileSet["./.gitignore"])
- assert.False(t, fileSet["./build.log"])
+ return out
}
diff --git a/internal/llm/prompt/coder.go b/internal/llm/prompt/coder.go
index 90e5a17191f346a5df53622e1826bc04214ddbfc..57ed088b22de03fe875ad0822f159b35eb36a834 100644
--- a/internal/llm/prompt/coder.go
+++ b/internal/llm/prompt/coder.go
@@ -53,7 +53,7 @@ func getEnvironmentInfo() string {
isGit := isGitRepo(cwd)
platform := runtime.GOOS
date := time.Now().Format("1/2/2006")
- output, _ := tools.ListDirectoryTree(cwd, nil)
+ output, _, _ := tools.ListDirectoryTree(cwd, tools.LSParams{})
return fmt.Sprintf(`Here is useful information about the environment you are running in:
Working directory: %s
diff --git a/internal/llm/tools/ls.go b/internal/llm/tools/ls.go
index f421e69e7af938801aa9c3affacfe30ed669fabc..305f7f10249594ff06ac008a8bf81145d7d834de 100644
--- a/internal/llm/tools/ls.go
+++ b/internal/llm/tools/ls.go
@@ -1,6 +1,7 @@
package tools
import (
+ "cmp"
"context"
_ "embed"
"encoding/json"
@@ -9,6 +10,7 @@ import (
"path/filepath"
"strings"
+ "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/permission"
)
@@ -16,11 +18,13 @@ import (
type LSParams struct {
Path string `json:"path"`
Ignore []string `json:"ignore"`
+ Depth int `json:"depth"`
}
type LSPermissionsParams struct {
Path string `json:"path"`
Ignore []string `json:"ignore"`
+ Depth int `json:"depth"`
}
type TreeNode struct {
@@ -42,7 +46,7 @@ type lsTool struct {
const (
LSToolName = "ls"
- MaxLSFiles = 1000
+ maxLSFiles = 1000
)
//go:embed ls.md
@@ -68,6 +72,10 @@ func (l *lsTool) Info() ToolInfo {
"type": "string",
"description": "The path to the directory to list (defaults to current working directory)",
},
+ "depth": map[string]any{
+ "type": "integer",
+ "description": "The maximum depth to traverse",
+ },
"ignore": map[string]any{
"type": "array",
"description": "List of glob patterns to ignore",
@@ -86,13 +94,7 @@ func (l *lsTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
- searchPath := params.Path
- if searchPath == "" {
- searchPath = l.workingDir
- }
-
- var err error
- searchPath, err = fsext.Expand(searchPath)
+ searchPath, err := fsext.Expand(cmp.Or(params.Path, l.workingDir))
if err != nil {
return ToolResponse{}, fmt.Errorf("error expanding path: %w", err)
}
@@ -137,44 +139,49 @@ func (l *lsTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
}
}
- output, err := ListDirectoryTree(searchPath, params.Ignore)
+ output, metadata, err := ListDirectoryTree(searchPath, params)
if err != nil {
return ToolResponse{}, err
}
- // Get file count for metadata
- files, truncated, err := fsext.ListDirectory(searchPath, params.Ignore, MaxLSFiles)
- if err != nil {
- return ToolResponse{}, fmt.Errorf("error listing directory for metadata: %w", err)
- }
-
return WithResponseMetadata(
NewTextResponse(output),
- LSResponseMetadata{
- NumberOfFiles: len(files),
- Truncated: truncated,
- },
+ metadata,
), nil
}
-func ListDirectoryTree(searchPath string, ignore []string) (string, error) {
+func ListDirectoryTree(searchPath string, params LSParams) (string, LSResponseMetadata, error) {
if _, err := os.Stat(searchPath); os.IsNotExist(err) {
- return "", fmt.Errorf("path does not exist: %s", searchPath)
+ return "", LSResponseMetadata{}, fmt.Errorf("path does not exist: %s", searchPath)
}
- files, truncated, err := fsext.ListDirectory(searchPath, ignore, MaxLSFiles)
+ ls := config.Get().Tools.Ls
+ depth, limit := ls.Limits()
+ maxFiles := min(limit, maxLSFiles)
+ files, truncated, err := fsext.ListDirectory(
+ searchPath,
+ params.Ignore,
+ cmp.Or(params.Depth, depth),
+ maxFiles,
+ )
if err != nil {
- return "", fmt.Errorf("error listing directory: %w", err)
+ return "", LSResponseMetadata{}, fmt.Errorf("error listing directory: %w", err)
}
+ metadata := LSResponseMetadata{
+ NumberOfFiles: len(files),
+ Truncated: truncated,
+ }
tree := createFileTree(files, searchPath)
- output := printTree(tree, searchPath)
+ var output string
if truncated {
- output = fmt.Sprintf("There are more than %d files in the directory. Use a more specific path or use the Glob tool to find specific files. The first %d files and directories are included below:\n\n%s", MaxLSFiles, MaxLSFiles, output)
+ output = fmt.Sprintf("There are more than %d files in the directory. Use a more specific path or use the Glob tool to find specific files. The first %[1]d files and directories are included below.\n", maxFiles)
}
-
- return output, nil
+ if depth > 0 {
+ output = fmt.Sprintf("The directory tree is shown up to a depth of %d. Use a higher depth and a specific path to see more levels.\n", cmp.Or(params.Depth, depth))
+ }
+ return output + "\n" + printTree(tree, searchPath), metadata, nil
}
func createFileTree(sortedPaths []string, rootPath string) []*TreeNode {
diff --git a/internal/tui/components/chat/editor/editor.go b/internal/tui/components/chat/editor/editor.go
index 86390611f6115fc14def1e8a7713b252b0d6a59d..f70a0a3dbe63a9473f552efa233e03bd4efc0ee1 100644
--- a/internal/tui/components/chat/editor/editor.go
+++ b/internal/tui/components/chat/editor/editor.go
@@ -480,7 +480,9 @@ func (m *editorCmp) SetPosition(x, y int) tea.Cmd {
}
func (m *editorCmp) startCompletions() tea.Msg {
- files, _, _ := fsext.ListDirectory(".", nil, 0)
+ ls := m.app.Config().Options.TUI.Completions
+ depth, limit := ls.Limits()
+ files, _, _ := fsext.ListDirectory(".", nil, depth, limit)
slices.Sort(files)
completionItems := make([]completions.Completion, 0, len(files))
for _, file := range files {
diff --git a/schema.json b/schema.json
index deb65846fe30ca689779e36745b9a429082c452b..014155f1b1f22309ec6381f44c41e97b3b3825dc 100644
--- a/schema.json
+++ b/schema.json
@@ -19,6 +19,28 @@
"additionalProperties": false,
"type": "object"
},
+ "Completions": {
+ "properties": {
+ "max_depth": {
+ "type": "integer",
+ "description": "Maximum depth for the ls tool",
+ "default": 0,
+ "examples": [
+ 10
+ ]
+ },
+ "max_items": {
+ "type": "integer",
+ "description": "Maximum number of items to return for the ls tool",
+ "default": 1000,
+ "examples": [
+ 100
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
"Config": {
"properties": {
"$schema": {
@@ -53,10 +75,17 @@
"permissions": {
"$ref": "#/$defs/Permissions",
"description": "Permission settings for tool usage"
+ },
+ "tools": {
+ "$ref": "#/$defs/Tools",
+ "description": "Tool configurations"
}
},
"additionalProperties": false,
- "type": "object"
+ "type": "object",
+ "required": [
+ "tools"
+ ]
},
"LSPConfig": {
"properties": {
@@ -484,10 +513,51 @@
"split"
],
"description": "Diff mode for the TUI interface"
+ },
+ "completions": {
+ "$ref": "#/$defs/Completions",
+ "description": "Completions UI options"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "completions"
+ ]
+ },
+ "ToolLs": {
+ "properties": {
+ "max_depth": {
+ "type": "integer",
+ "description": "Maximum depth for the ls tool",
+ "default": 0,
+ "examples": [
+ 10
+ ]
+ },
+ "max_items": {
+ "type": "integer",
+ "description": "Maximum number of items to return for the ls tool",
+ "default": 1000,
+ "examples": [
+ 100
+ ]
}
},
"additionalProperties": false,
"type": "object"
+ },
+ "Tools": {
+ "properties": {
+ "ls": {
+ "$ref": "#/$defs/ToolLs"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "ls"
+ ]
}
}
}
From f7ffd8a884cd07ad3b59ef00e79a50b787500bab Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Tue, 5 Aug 2025 22:40:57 -0400
Subject: [PATCH 195/236] fix: don't supress application level panics
---
main.go | 13 ++-----------
1 file changed, 2 insertions(+), 11 deletions(-)
diff --git a/main.go b/main.go
index 49dbcd7d3c045ae1510d7ca2055fa480c6fadadf..e75cb03e3575cf902c2ff4b44ddd15e0405f0b60 100644
--- a/main.go
+++ b/main.go
@@ -3,23 +3,14 @@ package main
import (
"log/slog"
"net/http"
+ _ "net/http/pprof"
"os"
- _ "net/http/pprof" // profiling
-
- _ "github.com/joho/godotenv/autoload" // automatically load .env files
-
"github.com/charmbracelet/crush/internal/cmd"
- "github.com/charmbracelet/crush/internal/event"
- "github.com/charmbracelet/crush/internal/log"
+ _ "github.com/joho/godotenv/autoload"
)
func main() {
- defer log.RecoverPanic("main", func() {
- event.Flush()
- slog.Error("Application terminated due to unhandled panic")
- })
-
if os.Getenv("CRUSH_PROFILE") != "" {
go func() {
slog.Info("Serving pprof at localhost:6060")
From d0724b16aa37057635896d04d0032413cf7923c7 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 1 Oct 2025 15:15:46 -0300
Subject: [PATCH 196/236] feat(tui): progress bar (#1162)
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 6 +++---
go.sum | 12 ++++++++----
internal/app/app.go | 9 ++++++---
internal/tui/tui.go | 7 +++++++
4 files changed, 24 insertions(+), 10 deletions(-)
diff --git a/go.mod b/go.mod
index 699233cdd52fe59165e8f9c44a85d1413f1bc4b6..4beabb5b74efb565432aa36329698169e8aaae8d 100644
--- a/go.mod
+++ b/go.mod
@@ -13,13 +13,13 @@ require (
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/charlievieth/fastwalk v1.0.14
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
- github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e
+ github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7
github.com/charmbracelet/catwalk v0.6.1
github.com/charmbracelet/fang v0.4.2
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706
- github.com/charmbracelet/x/ansi v0.10.1
+ github.com/charmbracelet/x/ansi v0.10.2
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
@@ -104,7 +104,7 @@ require (
github.com/lucasb-eyer/go-colorful v1.3.0
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
- github.com/mattn/go-runewidth v0.0.16 // indirect
+ github.com/mattn/go-runewidth v0.0.17 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
diff --git a/go.sum b/go.sum
index f54651f8f6b5fa0e6f9f4a3ee53a61d0eec0970c..55872255a6bf56f4ef8093dc277fc17c6df9092a 100644
--- a/go.sum
+++ b/go.sum
@@ -80,6 +80,8 @@ github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e h1:4BBnKWFwJ5FLyhw/ijFxKE04i9rubr8WIPR1kjO57iA=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e/go.mod h1:F7AfLKYQqpM3NNBVs7ctW417tavhvoh9SBjsgtwpzbY=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7 h1:wH4F+UvxcZSDOxy8j45tghiRo8amrYHejbE9+1C6xv0=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7/go.mod h1:5IzIGXU1n0foRc8bRAherC8ZuQCQURPlwx3ANLq1138=
github.com/charmbracelet/catwalk v0.6.1 h1:2rRqUlwo+fdyIty8jEvUufRTgqBl0aea21LV6YQPqb0=
github.com/charmbracelet/catwalk v0.6.1/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
@@ -94,8 +96,10 @@ github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mS
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef h1:VrWaUi2LXYLjfjCHowdSOEc6dQ9Ro14KY7Bw4IWd19M=
github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef/go.mod h1:AThRsQH1t+dfyOKIwXRoJBniYFQUkUpQq4paheHMc2o=
-github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
-github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
+github.com/charmbracelet/x/ansi v0.10.2-0.20250929231137-76218bae042e h1:aN905mmYvQsT4xKUZTdM+lCcuxTrubM3DGVTPxk0ArM=
+github.com/charmbracelet/x/ansi v0.10.2-0.20250929231137-76218bae042e/go.mod h1:HbLdJjQH4UH4AqA2HpRWuWNluRE6zxJH/yteYEYCFa8=
+github.com/charmbracelet/x/ansi v0.10.2 h1:ith2ArZS0CJG30cIUfID1LXN7ZFXRCww6RUvAPA+Pzw=
+github.com/charmbracelet/x/ansi v0.10.2/go.mod h1:HbLdJjQH4UH4AqA2HpRWuWNluRE6zxJH/yteYEYCFa8=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a/go.mod h1:rc2bsPC6MWae3LdOxNO1mOb443NlMrrDL0xEya48NNc=
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3 h1:1xwHZg6eMZ9Wv5TE1UGub6ARubyOd1Lo5kPUI/6VL50=
@@ -198,8 +202,8 @@ github.com/mark3labs/mcp-go v0.41.0 h1:IFfJaovCet65F3av00bE1HzSnmHpMRWM1kz96R98I
github.com/mark3labs/mcp-go v0.41.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
-github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/mattn/go-runewidth v0.0.17 h1:78v8ZlW0bP43XfmAfPsdXcoNCelfMHsDmd/pkENfrjQ=
+github.com/mattn/go-runewidth v0.0.17/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
diff --git a/internal/app/app.go b/internal/app/app.go
index 2b3d81fb58acdeb2570a765c0a25ec53b65121da..29631c1be84e96617adfeb705b2e35e0b68725e5 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -17,12 +17,12 @@ import (
"github.com/charmbracelet/crush/internal/history"
"github.com/charmbracelet/crush/internal/llm/agent"
"github.com/charmbracelet/crush/internal/log"
- "github.com/charmbracelet/crush/internal/pubsub"
-
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/permission"
+ "github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
+ "github.com/charmbracelet/x/ansi"
)
type App struct {
@@ -107,7 +107,10 @@ func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool
ctx, cancel := context.WithCancel(ctx)
defer cancel()
- // Start spinner if not in quiet mode.
+ // Start progress bar and spinner
+ fmt.Printf(ansi.SetIndeterminateProgressBar)
+ defer fmt.Printf(ansi.ResetProgressBar)
+
var spinner *format.Spinner
if !quiet {
spinner = format.NewSpinner(ctx, cancel, "Generating")
diff --git a/internal/tui/tui.go b/internal/tui/tui.go
index 2c935810b833af01c582866ec38d5f7b277bc203..74d82e15514c70ee96b507a01b8f611d3ade6a4d 100644
--- a/internal/tui/tui.go
+++ b/internal/tui/tui.go
@@ -3,6 +3,7 @@ package tui
import (
"context"
"fmt"
+ "math/rand"
"strings"
"time"
@@ -601,6 +602,12 @@ func (a *appModel) View() tea.View {
view.Layer = canvas
view.Cursor = cursor
+ view.ProgressBar = tea.NewProgressBar(tea.ProgressBarNone, 0)
+ if a.app.CoderAgent.IsBusy() {
+ // use a random percentage to prevent the ghostty from hiding it after
+ // a timeout.
+ view.ProgressBar = tea.NewProgressBar(tea.ProgressBarIndeterminate, rand.Intn(100))
+ }
return view
}
From 75a602ae3bdb8d3d22cb95f294f48348e0d4a267 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 1 Oct 2025 13:42:06 -0400
Subject: [PATCH 197/236] chore: print a bug reporting notice when crush
crashes
All the credit in this revision goes to @andreynering.
---
internal/cmd/root.go | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/internal/cmd/root.go b/internal/cmd/root.go
index 0a2be1cbe6e480fe5719640711bd4caffb430229..c1adfc8016033673610e0b37970ec75a9aea778a 100644
--- a/internal/cmd/root.go
+++ b/internal/cmd/root.go
@@ -3,6 +3,7 @@ package cmd
import (
"bytes"
"context"
+ "errors"
"fmt"
"io"
"log/slog"
@@ -93,7 +94,7 @@ crush -y
if _, err := program.Run(); err != nil {
event.Error(err)
slog.Error("TUI run error", "error", err)
- return fmt.Errorf("TUI error: %v", err)
+ return errors.New("Crush crashed. If metrics are enabled, we were notified about it. If you'd like to report it, please copy the stacktrace above and open an issue at https://github.com/charmbracelet/crush/issues/new?template=bug.yml")
}
return nil
},
From fccc49f4cb9ba90027b6eda64ae7257b633def66 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 1 Oct 2025 14:17:07 -0400
Subject: [PATCH 198/236] chore(lint): ignore staticcheck in helpful crash
error
Co-authored-by: Andrey Nering
---
internal/cmd/root.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/cmd/root.go b/internal/cmd/root.go
index c1adfc8016033673610e0b37970ec75a9aea778a..d6a26d818643a05704f554223a7b7960792970c5 100644
--- a/internal/cmd/root.go
+++ b/internal/cmd/root.go
@@ -94,7 +94,7 @@ crush -y
if _, err := program.Run(); err != nil {
event.Error(err)
slog.Error("TUI run error", "error", err)
- return errors.New("Crush crashed. If metrics are enabled, we were notified about it. If you'd like to report it, please copy the stacktrace above and open an issue at https://github.com/charmbracelet/crush/issues/new?template=bug.yml")
+ return errors.New("Crush crashed. If metrics are enabled, we were notified about it. If you'd like to report it, please copy the stacktrace above and open an issue at https://github.com/charmbracelet/crush/issues/new?template=bug.yml") //nolint:staticcheck
}
return nil
},
From 8c1e3764c76b47845e066ca7a4fddd4820b2b551 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 1 Oct 2025 12:39:42 -0400
Subject: [PATCH 199/236] chore(task): add helper for fetching tags
---
Taskfile.yaml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 0739cab998f34c7b4129b0765b9b225a2455f8ae..8f714b0e5afba0dec28f85627be47503dc59c0fb 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -102,3 +102,8 @@ tasks:
- git tag --annotate --sign {{.NEXT}} {{.CLI_ARGS}}
- echo "Pushing {{.NEXT}}..."
- git push origin --tags
+
+ fetch-tags:
+ cmds:
+ - git tag -d nightly || true
+ - git fetch --tags
From 08e384dae68fb364c31ceb6fb49b288bdf508c95 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 1 Oct 2025 21:22:17 -0400
Subject: [PATCH 200/236] chore(task): fetch tags before releasing
---
Taskfile.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 8f714b0e5afba0dec28f85627be47503dc59c0fb..a990205a63497fbb020c78298fb826890ee1dcda 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -90,15 +90,15 @@ tasks:
vars:
NEXT:
sh: go run github.com/caarlos0/svu/v3@latest next --always
- prompt: "This will release {{.NEXT}}. Please make sure you've fetch tags. Continue?"
+ prompt: "This will release {{.NEXT}}. Continue?"
preconditions:
- sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
msg: Not on main branch
- sh: "[ $(git status --porcelain=2 | wc -l) = 0 ]"
msg: "Git is dirty"
cmds:
+ - task: fetch-tags
- git commit --allow-empty -m "{{.NEXT}}"
- - git tag -d nightly
- git tag --annotate --sign {{.NEXT}} {{.CLI_ARGS}}
- echo "Pushing {{.NEXT}}..."
- git push origin --tags
From 255aa3bcaaa79a270b48da607b2adab6a7e89bbd Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 1 Oct 2025 21:23:33 -0400
Subject: [PATCH 201/236] chore(task): just use svu if it's already installed
---
Taskfile.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index a990205a63497fbb020c78298fb826890ee1dcda..13c171ed2e67faa9aa87c6f9f7d0ec3b7018f382 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -89,7 +89,7 @@ tasks:
desc: Create and push a new tag following semver
vars:
NEXT:
- sh: go run github.com/caarlos0/svu/v3@latest next --always
+ sh: svu next --always || go run github.com/caarlos0/svu/v3@latest next --always
prompt: "This will release {{.NEXT}}. Continue?"
preconditions:
- sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
From 1afeeb78f711b4210ac64cac79ce5fdd0054a82e Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Sun, 5 Oct 2025 15:14:06 -0300
Subject: [PATCH 202/236] chore(legal): @kucukkanat has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 4ecfd86887d5d072491a8fa764628e8935e4ebfe..be16926ef1dc52cf523997eead83ed5ffe19eb95 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -679,6 +679,14 @@
"created_at": "2025-09-29T01:19:40Z",
"repoId": 987670088,
"pullRequestNo": 1148
+ },
+ {
+ "name": "kucukkanat",
+ "id": 914316,
+ "comment_id": 3369230313,
+ "created_at": "2025-10-05T18:13:57Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1195
}
]
}
\ No newline at end of file
From a2b69450c0215676121a50755c059ded4e31ab82 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Sun, 5 Oct 2025 16:17:23 -0300
Subject: [PATCH 203/236] chore(legal): @thuggys has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index be16926ef1dc52cf523997eead83ed5ffe19eb95..665f0d0e5c3af5ab0492a898cd1303535616273d 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -687,6 +687,14 @@
"created_at": "2025-10-05T18:13:57Z",
"repoId": 987670088,
"pullRequestNo": 1195
+ },
+ {
+ "name": "thuggys",
+ "id": 150315417,
+ "comment_id": 3369149503,
+ "created_at": "2025-10-05T15:59:55Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1194
}
]
}
\ No newline at end of file
From f1122df71fab7d11f369a936ef636abd40a7dc48 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 6 Oct 2025 09:58:05 +0000
Subject: [PATCH 204/236] chore(deps): bump the all group with 7 updates
(#1198)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
go.mod | 20 ++++++++++----------
go.sum | 40 ++++++++++++++++++----------------------
2 files changed, 28 insertions(+), 32 deletions(-)
diff --git a/go.mod b/go.mod
index 4beabb5b74efb565432aa36329698169e8aaae8d..170788928c44d7e233da6c25871927f3a8bf2073 100644
--- a/go.mod
+++ b/go.mod
@@ -7,15 +7,15 @@ require (
github.com/MakeNowJust/heredoc v1.0.0
github.com/PuerkitoBio/goquery v1.10.3
github.com/alecthomas/chroma/v2 v2.20.0
- github.com/anthropics/anthropic-sdk-go v1.12.0
+ github.com/anthropics/anthropic-sdk-go v1.13.0
github.com/atotto/clipboard v0.1.4
github.com/aymanbagabas/go-udiff v0.3.1
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/charlievieth/fastwalk v1.0.14
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7
- github.com/charmbracelet/catwalk v0.6.1
- github.com/charmbracelet/fang v0.4.2
+ github.com/charmbracelet/catwalk v0.6.3
+ github.com/charmbracelet/fang v0.4.3
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706
@@ -26,13 +26,13 @@ require (
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
github.com/joho/godotenv v1.5.1
- github.com/mark3labs/mcp-go v0.41.0
+ github.com/mark3labs/mcp-go v0.41.1
github.com/muesli/termenv v0.16.0
- github.com/ncruces/go-sqlite3 v0.29.0
+ github.com/ncruces/go-sqlite3 v0.29.1
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
github.com/nxadm/tail v1.4.11
github.com/openai/openai-go v1.12.0
- github.com/pressly/goose/v3 v3.25.0
+ github.com/pressly/goose/v3 v3.26.0
github.com/qjebbs/go-jsons v1.0.0-alpha.4
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sahilm/fuzzy v0.1.1
@@ -72,7 +72,7 @@ require (
github.com/aymerick/douceur v0.2.0 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
- github.com/charmbracelet/colorprofile v0.3.2 // indirect
+ github.com/charmbracelet/colorprofile v0.3.2
github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
@@ -141,18 +141,18 @@ require (
go.opentelemetry.io/otel/metric v1.37.0 // indirect
go.opentelemetry.io/otel/trace v1.37.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
- golang.org/x/crypto v0.41.0 // indirect
+ golang.org/x/crypto v0.42.0 // indirect
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
golang.org/x/image v0.26.0 // indirect
golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sync v0.17.0 // indirect
golang.org/x/sys v0.36.0 // indirect
- golang.org/x/term v0.34.0 // indirect
+ golang.org/x/term v0.35.0 // indirect
golang.org/x/text v0.29.0
golang.org/x/time v0.8.0 // indirect
google.golang.org/api v0.211.0 // indirect
- google.golang.org/genai v1.26.0
+ google.golang.org/genai v1.28.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
google.golang.org/grpc v1.71.0 // indirect
google.golang.org/protobuf v1.36.8 // indirect
diff --git a/go.sum b/go.sum
index 55872255a6bf56f4ef8093dc277fc17c6df9092a..3669305d22b191791df373899305e5e18a4e1f71 100644
--- a/go.sum
+++ b/go.sum
@@ -30,8 +30,8 @@ github.com/alecthomas/repr v0.5.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW5
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
-github.com/anthropics/anthropic-sdk-go v1.12.0 h1:xPqlGnq7rWrTiHazIvCiumA0u7mGQnwDQtvA1M82h9U=
-github.com/anthropics/anthropic-sdk-go v1.12.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
+github.com/anthropics/anthropic-sdk-go v1.13.0 h1:Bhbe8sRoDPtipttg8bQYrMCKe2b79+q6rFW1vOKEUKI=
+github.com/anthropics/anthropic-sdk-go v1.13.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY=
@@ -78,16 +78,14 @@ github.com/charlievieth/fastwalk v1.0.14 h1:3Eh5uaFGwHZd8EGwTjJnSpBkfwfsak9h6ICg
github.com/charlievieth/fastwalk v1.0.14/go.mod h1:diVcUreiU1aQ4/Wu3NbxxH4/KYdKpLDojrQ1Bb2KgNY=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e h1:4BBnKWFwJ5FLyhw/ijFxKE04i9rubr8WIPR1kjO57iA=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e/go.mod h1:F7AfLKYQqpM3NNBVs7ctW417tavhvoh9SBjsgtwpzbY=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7 h1:wH4F+UvxcZSDOxy8j45tghiRo8amrYHejbE9+1C6xv0=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7/go.mod h1:5IzIGXU1n0foRc8bRAherC8ZuQCQURPlwx3ANLq1138=
-github.com/charmbracelet/catwalk v0.6.1 h1:2rRqUlwo+fdyIty8jEvUufRTgqBl0aea21LV6YQPqb0=
-github.com/charmbracelet/catwalk v0.6.1/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
+github.com/charmbracelet/catwalk v0.6.3 h1:RyL8Yqd4QsV3VyvBEsePScv1z2vKaZxPfQQ0XB5L5AA=
+github.com/charmbracelet/catwalk v0.6.3/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
-github.com/charmbracelet/fang v0.4.2 h1:nWr7Tb82/TTNNGMGG35aTZ1X68loAOQmpb0qxkKXjas=
-github.com/charmbracelet/fang v0.4.2/go.mod h1:wHJKQYO5ReYsxx+yZl+skDtrlKO/4LLEQ6EXsdHhRhg=
+github.com/charmbracelet/fang v0.4.3 h1:qXeMxnL4H6mSKBUhDefHu8NfikFbP/MBNTfqTrXvzmY=
+github.com/charmbracelet/fang v0.4.3/go.mod h1:wHJKQYO5ReYsxx+yZl+skDtrlKO/4LLEQ6EXsdHhRhg=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea h1:g1HfUgSMvye8mgecMD1mPscpt+pzJoDEiSA+p2QXzdQ=
@@ -96,8 +94,6 @@ github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mS
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef h1:VrWaUi2LXYLjfjCHowdSOEc6dQ9Ro14KY7Bw4IWd19M=
github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef/go.mod h1:AThRsQH1t+dfyOKIwXRoJBniYFQUkUpQq4paheHMc2o=
-github.com/charmbracelet/x/ansi v0.10.2-0.20250929231137-76218bae042e h1:aN905mmYvQsT4xKUZTdM+lCcuxTrubM3DGVTPxk0ArM=
-github.com/charmbracelet/x/ansi v0.10.2-0.20250929231137-76218bae042e/go.mod h1:HbLdJjQH4UH4AqA2HpRWuWNluRE6zxJH/yteYEYCFa8=
github.com/charmbracelet/x/ansi v0.10.2 h1:ith2ArZS0CJG30cIUfID1LXN7ZFXRCww6RUvAPA+Pzw=
github.com/charmbracelet/x/ansi v0.10.2/go.mod h1:HbLdJjQH4UH4AqA2HpRWuWNluRE6zxJH/yteYEYCFa8=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
@@ -198,8 +194,8 @@ github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQ
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/mark3labs/mcp-go v0.41.0 h1:IFfJaovCet65F3av00bE1HzSnmHpMRWM1kz96R98I70=
-github.com/mark3labs/mcp-go v0.41.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
+github.com/mark3labs/mcp-go v0.41.1 h1:w78eWfiQam2i8ICL7AL0WFiq7KHNJQ6UB53ZVtH4KGA=
+github.com/mark3labs/mcp-go v0.41.1/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.17 h1:78v8ZlW0bP43XfmAfPsdXcoNCelfMHsDmd/pkENfrjQ=
@@ -222,8 +218,8 @@ github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8=
github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
-github.com/ncruces/go-sqlite3 v0.29.0 h1:1tsLiagCoqZEfcHDeKsNSv5jvrY/Iu393pAnw2wLNJU=
-github.com/ncruces/go-sqlite3 v0.29.0/go.mod h1:r1hSvYKPNJ+OlUA1O3r8o9LAawzPAlqeZiIdxTBBBJ0=
+github.com/ncruces/go-sqlite3 v0.29.1 h1:NIi8AISWBToRHyoz01FXiTNvU147Tqdibgj2tFzJCqM=
+github.com/ncruces/go-sqlite3 v0.29.1/go.mod h1:PpccBNNhvjwUOwDQEn2gXQPFPTWdlromj0+fSkd5KSg=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt7M=
@@ -243,8 +239,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posthog/posthog-go v1.6.10 h1:OA6bkiUg89rI7f5cSXbcrH5+wLinyS6hHplnD92Pu/M=
github.com/posthog/posthog-go v1.6.10/go.mod h1:LcC1Nu4AgvV22EndTtrMXTy+7RGVC0MhChSw7Qk5XkY=
-github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjGng=
-github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
+github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
+github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/qjebbs/go-jsons v1.0.0-alpha.4 h1:Qsb4ohRUHQODIUAsJKdKJ/SIDbsO7oGOzsfy+h1yQZs=
github.com/qjebbs/go-jsons v1.0.0-alpha.4/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
@@ -342,8 +338,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
-golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
-golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
+golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI=
+golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8=
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
@@ -405,8 +401,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
-golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
-golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
+golang.org/x/term v0.35.0 h1:bZBVKBudEyhRcajGcNc3jIfWPqV4y/Kt2XcoigOWtDQ=
+golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
@@ -429,8 +425,8 @@ golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxb
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
-google.golang.org/genai v1.26.0 h1:r4HGL54kFv/WCRMTAbZg05Ct+vXfhAbTRlXhFyBkEQo=
-google.golang.org/genai v1.26.0/go.mod h1:OClfdf+r5aaD+sCd4aUSkPzJItmg2wD/WON9lQnRPaY=
+google.golang.org/genai v1.28.0 h1:6qpUWFH3PkHPhxNnu3wjaCVJ6Jri1EIR7ks07f9IpIk=
+google.golang.org/genai v1.28.0/go.mod h1:7pAilaICJlQBonjKKJNhftDFv3SREhZcTe9F6nRcjbg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
From 2d05fe8164076765b1bb5d62b6f89fe585ef846c Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Mon, 6 Oct 2025 16:32:04 -0300
Subject: [PATCH 205/236] chore(legal): @nikaro has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 665f0d0e5c3af5ab0492a898cd1303535616273d..dc7d5873d3110320f09b8457b696a47d307ef41a 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -695,6 +695,14 @@
"created_at": "2025-10-05T15:59:55Z",
"repoId": 987670088,
"pullRequestNo": 1194
+ },
+ {
+ "name": "nikaro",
+ "id": 3918653,
+ "comment_id": 3373586148,
+ "created_at": "2025-10-06T19:31:50Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1200
}
]
}
\ No newline at end of file
From 886bb7c7101ec637104d17b34dce8573e1daa5ee Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 9 Oct 2025 19:32:10 +0200
Subject: [PATCH 206/236] fix(mcp): fix ui description, double spaces (#1210)
Signed-off-by: Carlos Alexandro Becker
---
internal/tui/components/core/core.go | 7 +++++--
.../core/testdata/TestStatus/EmptyDescription.golden | 2 +-
internal/tui/components/mcp/mcp.go | 2 +-
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/internal/tui/components/core/core.go b/internal/tui/components/core/core.go
index 18de56b17f08e4513bde34fe9fef7aaf4e08c09f..80c28ba1e11c4ddeb7e6da1f4802577d23e8b4dc 100644
--- a/internal/tui/components/core/core.go
+++ b/internal/tui/components/core/core.go
@@ -110,14 +110,17 @@ func Status(opts StatusOpts, width int) string {
extraContentWidth += 1
}
description = ansi.Truncate(description, width-lipgloss.Width(icon)-lipgloss.Width(title)-2-extraContentWidth, "…")
+ description = t.S().Base.Foreground(descriptionColor).Render(description)
}
- description = t.S().Base.Foreground(descriptionColor).Render(description)
content := []string{}
if icon != "" {
content = append(content, icon)
}
- content = append(content, title, description)
+ content = append(content, title)
+ if description != "" {
+ content = append(content, description)
+ }
if opts.ExtraContent != "" {
content = append(content, opts.ExtraContent)
}
diff --git a/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden b/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden
index 5b396377658610dd0fbc0746fd960f2faaf76f49..db4acad54383ecbc2cc50061ee5ba77491dc545d 100644
--- a/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden
+++ b/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden
@@ -1 +1 @@
-● [38;2;133;131;146mTitle Only[m [38;2;96;95;107m[m
\ No newline at end of file
+● [38;2;133;131;146mTitle Only[m
\ No newline at end of file
diff --git a/internal/tui/components/mcp/mcp.go b/internal/tui/components/mcp/mcp.go
index d11826b77749ba65276b5336a5d88cdbc8552881..fd3bd012732397538cc263b2eff92ae617e866d8 100644
--- a/internal/tui/components/mcp/mcp.go
+++ b/internal/tui/components/mcp/mcp.go
@@ -55,7 +55,7 @@ func RenderMCPList(opts RenderOptions) []string {
// Determine icon and color based on state
icon := t.ItemOfflineIcon
- description := l.MCP.Command
+ description := ""
extraContent := ""
if state, exists := mcpStates[l.Name]; exists {
From 390983a851b54d39b5812bd5aaaab2101703ca14 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 9 Oct 2025 14:46:11 -0300
Subject: [PATCH 207/236] chore: allow to pass args to task run
Signed-off-by: Carlos Alexandro Becker
---
Taskfile.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 13c171ed2e67faa9aa87c6f9f7d0ec3b7018f382..1c4225158fc21508e8dccac8d6f47610f7d81faf 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -38,7 +38,7 @@ tasks:
run:
desc: Run build
cmds:
- - go run .
+ - go run . {{.CLI_ARGS}}
test:
desc: Run tests
@@ -104,6 +104,6 @@ tasks:
- git push origin --tags
fetch-tags:
- cmds:
+ cmds:
- git tag -d nightly || true
- git fetch --tags
From 04210801f02d2ee5db7ae89a7ec28e34d5d14d5b Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 9 Oct 2025 23:16:55 +0200
Subject: [PATCH 208/236] fix(lsp): small UI improvements (#1211)
Signed-off-by: Carlos Alexandro Becker
---
internal/tui/components/lsp/lsp.go | 51 +++++++++++++++---------------
1 file changed, 25 insertions(+), 26 deletions(-)
diff --git a/internal/tui/components/lsp/lsp.go b/internal/tui/components/lsp/lsp.go
index f5f4061045901c91ecb8bce1f47eab3ac1f7abcf..0c0384e91c36744b8f318f9bbc71e5e076a26abf 100644
--- a/internal/tui/components/lsp/lsp.go
+++ b/internal/tui/components/lsp/lsp.go
@@ -56,32 +56,7 @@ func RenderLSPList(lspClients *csync.Map[string, *lsp.Client], opts RenderOption
break
}
- // Determine icon color and description based on state
- icon := t.ItemOfflineIcon
- description := l.LSP.Command
-
- if l.LSP.Disabled {
- description = t.S().Subtle.Render("disabled")
- } else if state, exists := lspStates[l.Name]; exists {
- switch state.State {
- case lsp.StateStarting:
- icon = t.ItemBusyIcon
- description = t.S().Subtle.Render("starting...")
- case lsp.StateReady:
- icon = t.ItemOnlineIcon
- description = l.LSP.Command
- case lsp.StateError:
- icon = t.ItemErrorIcon
- if state.Error != nil {
- description = t.S().Subtle.Render(fmt.Sprintf("error: %s", state.Error.Error()))
- } else {
- description = t.S().Subtle.Render("error")
- }
- case lsp.StateDisabled:
- icon = t.ItemOfflineIcon.Foreground(t.FgMuted)
- description = t.S().Base.Foreground(t.FgMuted).Render("no root markers found")
- }
- }
+ icon, description := iconAndDescription(l, t, lspStates)
// Calculate diagnostic counts if we have LSP clients
var extraContent string
@@ -134,6 +109,30 @@ func RenderLSPList(lspClients *csync.Map[string, *lsp.Client], opts RenderOption
return lspList
}
+func iconAndDescription(l config.LSP, t *styles.Theme, states map[string]app.LSPClientInfo) (lipgloss.Style, string) {
+ if l.LSP.Disabled {
+ return t.ItemOfflineIcon.Foreground(t.FgMuted), t.S().Subtle.Render("disabled")
+ }
+
+ info := states[l.Name]
+ switch info.State {
+ case lsp.StateStarting:
+ return t.ItemBusyIcon, t.S().Subtle.Render("starting...")
+ case lsp.StateReady:
+ return t.ItemOnlineIcon, ""
+ case lsp.StateError:
+ description := t.S().Subtle.Render("error")
+ if info.Error != nil {
+ description = t.S().Subtle.Render(fmt.Sprintf("error: %s", info.Error.Error()))
+ }
+ return t.ItemErrorIcon, description
+ case lsp.StateDisabled:
+ return t.ItemOfflineIcon.Foreground(t.FgMuted), t.S().Subtle.Render("inactive")
+ default:
+ return t.ItemOfflineIcon, ""
+ }
+}
+
// RenderLSPBlock renders a complete LSP block with optional truncation indicator.
func RenderLSPBlock(lspClients *csync.Map[string, *lsp.Client], opts RenderOptions, showTruncationIndicator bool) string {
t := styles.CurrentTheme()
From a430043677c5fdf019ec0a7cc19bfc26d401b6a4 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 9 Oct 2025 23:51:01 -0300
Subject: [PATCH 209/236] fix: move some logs to debug
Signed-off-by: Carlos Alexandro Becker
---
internal/fsext/ls.go | 2 +-
internal/llm/agent/agent.go | 8 ++++----
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/internal/fsext/ls.go b/internal/fsext/ls.go
index 80d25a57f19867a4ca2af44df7e691bb9d109496..c22b960ad02a42bf6adac7768b7d99e55a9390ee 100644
--- a/internal/fsext/ls.go
+++ b/internal/fsext/ls.go
@@ -210,7 +210,7 @@ func ListDirectory(initialPath string, ignorePatterns []string, depth, limit int
found := csync.NewSlice[string]()
dl := NewDirectoryLister(initialPath)
- slog.Warn("listing directory", "path", initialPath, "depth", depth, "limit", limit, "ignorePatterns", ignorePatterns)
+ slog.Debug("listing directory", "path", initialPath, "depth", depth, "limit", limit, "ignorePatterns", ignorePatterns)
conf := fastwalk.Config{
Follow: true,
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 1efc3fc268392c06481d61ae6e11c9d67cdc13e8..32032280908fc0045125f31db2d6112eddf6a7c9 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -175,9 +175,9 @@ func NewAgent(
}
baseToolsFn := func() map[string]tools.BaseTool {
- slog.Info("Initializing agent base tools", "agent", agentCfg.ID)
+ slog.Debug("Initializing agent base tools", "agent", agentCfg.ID)
defer func() {
- slog.Info("Initialized agent base tools", "agent", agentCfg.ID)
+ slog.Debug("Initialized agent base tools", "agent", agentCfg.ID)
}()
// Base tools available to all agents
@@ -201,9 +201,9 @@ func NewAgent(
return result
}
mcpToolsFn := func() map[string]tools.BaseTool {
- slog.Info("Initializing agent mcp tools", "agent", agentCfg.ID)
+ slog.Debug("Initializing agent mcp tools", "agent", agentCfg.ID)
defer func() {
- slog.Info("Initialized agent mcp tools", "agent", agentCfg.ID)
+ slog.Debug("Initialized agent mcp tools", "agent", agentCfg.ID)
}()
mcpToolsOnce.Do(func() {
From ca66a11ab6293b7f598a83596f724f00e2960831 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 10 Oct 2025 10:07:49 -0300
Subject: [PATCH 210/236] refactor(mcp): use the new mcp library (#1208)
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 4 +-
go.sum | 16 +--
internal/config/config.go | 2 +-
internal/llm/agent/agent.go | 1 +
internal/llm/agent/mcp-tools.go | 193 +++++++++++++++-----------------
5 files changed, 100 insertions(+), 116 deletions(-)
diff --git a/go.mod b/go.mod
index 170788928c44d7e233da6c25871927f3a8bf2073..843e7f231f729e86d4e299349fa1293005ad3971 100644
--- a/go.mod
+++ b/go.mod
@@ -26,7 +26,7 @@ require (
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
github.com/joho/godotenv v1.5.1
- github.com/mark3labs/mcp-go v0.41.1
+ github.com/modelcontextprotocol/go-sdk v1.0.0
github.com/muesli/termenv v0.16.0
github.com/ncruces/go-sqlite3 v0.29.1
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
@@ -91,6 +91,7 @@ require (
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/google/go-cmp v0.7.0 // indirect
+ github.com/google/jsonschema-go v0.3.0 // indirect
github.com/google/s2a-go v0.1.8 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
@@ -121,7 +122,6 @@ require (
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
- github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.9 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
diff --git a/go.sum b/go.sum
index 3669305d22b191791df373899305e5e18a4e1f71..563016cca9ffcec4a7be40aeed80822a105d1769 100644
--- a/go.sum
+++ b/go.sum
@@ -130,8 +130,6 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
-github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
-github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
@@ -144,13 +142,15 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI=
github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
-github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
-github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
+github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
+github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
+github.com/google/jsonschema-go v0.3.0 h1:6AH2TxVNtk3IlvkkhjrtbUc4S8AvO0Xii0DxIygDg+Q=
+github.com/google/jsonschema-go v0.3.0/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE=
github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM=
github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
@@ -194,8 +194,6 @@ github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQ
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/mark3labs/mcp-go v0.41.1 h1:w78eWfiQam2i8ICL7AL0WFiq7KHNJQ6UB53ZVtH4KGA=
-github.com/mark3labs/mcp-go v0.41.1/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.17 h1:78v8ZlW0bP43XfmAfPsdXcoNCelfMHsDmd/pkENfrjQ=
@@ -206,6 +204,8 @@ github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwX
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/modelcontextprotocol/go-sdk v1.0.0 h1:Z4MSjLi38bTgLrd/LjSmofqRqyBiVKRyQSJgw8q8V74=
+github.com/modelcontextprotocol/go-sdk v1.0.0/go.mod h1:nYtYQroQ2KQiM0/SbyEPUWQ6xs4B95gJjEalc9AQyOs=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI=
@@ -265,8 +265,6 @@ github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFRh6zSQ=
github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
-github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
-github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
@@ -422,6 +420,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
+golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
+golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
diff --git a/internal/config/config.go b/internal/config/config.go
index 858fa1c47b33f6a5e6bafb81b4799ea5739736f9..b37b98cad717e789ad16237b3ca250a2f1555ba9 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -99,7 +99,7 @@ type MCPType string
const (
MCPStdio MCPType = "stdio"
- MCPSse MCPType = "sse"
+ MCPSSE MCPType = "sse"
MCPHttp MCPType = "http"
)
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 32032280908fc0045125f31db2d6112eddf6a7c9..e338eef782912bdfea48ca72ebfd33c4cd981f62 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -1,3 +1,4 @@
+// Package agent contains the implementation of the AI agent service.
package agent
import (
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 181f32b7280faf3eb36040d2ebecf3f892350f53..67f0b39ccfb6eb8aad3abd337e7545a59766d872 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -6,8 +6,11 @@ import (
"encoding/json"
"errors"
"fmt"
+ "io"
"log/slog"
"maps"
+ "net/http"
+ "os/exec"
"strings"
"sync"
"time"
@@ -19,9 +22,7 @@ import (
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/version"
- "github.com/mark3labs/mcp-go/client"
- "github.com/mark3labs/mcp-go/client/transport"
- "github.com/mark3labs/mcp-go/mcp"
+ "github.com/modelcontextprotocol/go-sdk/mcp"
)
// MCPState represents the current state of an MCP client
@@ -71,7 +72,7 @@ type MCPClientInfo struct {
Name string
State MCPState
Error error
- Client *client.Client
+ Client *mcp.ClientSession
ToolCount int
ConnectedAt time.Time
}
@@ -80,14 +81,14 @@ var (
mcpToolsOnce sync.Once
mcpTools = csync.NewMap[string, tools.BaseTool]()
mcpClient2Tools = csync.NewMap[string, []tools.BaseTool]()
- mcpClients = csync.NewMap[string, *client.Client]()
+ mcpClients = csync.NewMap[string, *mcp.ClientSession]()
mcpStates = csync.NewMap[string, MCPClientInfo]()
mcpBroker = pubsub.NewBroker[MCPEvent]()
)
type McpTool struct {
mcpName string
- tool mcp.Tool
+ tool *mcp.Tool
permissions permission.Service
workingDir string
}
@@ -97,14 +98,9 @@ func (b *McpTool) Name() string {
}
func (b *McpTool) Info() tools.ToolInfo {
- required := b.tool.InputSchema.Required
- if required == nil {
- required = make([]string, 0)
- }
- parameters := b.tool.InputSchema.Properties
- if parameters == nil {
- parameters = make(map[string]any)
- }
+ input := b.tool.InputSchema.(map[string]any)
+ required, _ := input["required"].([]string)
+ parameters, _ := input["properties"].(map[string]any)
return tools.ToolInfo{
Name: fmt.Sprintf("mcp_%s_%s", b.mcpName, b.tool.Name),
Description: b.tool.Description,
@@ -123,11 +119,9 @@ func runTool(ctx context.Context, name, toolName string, input string) (tools.To
if err != nil {
return tools.NewTextErrorResponse(err.Error()), nil
}
- result, err := c.CallTool(ctx, mcp.CallToolRequest{
- Params: mcp.CallToolParams{
- Name: toolName,
- Arguments: args,
- },
+ result, err := c.CallTool(ctx, &mcp.CallToolParams{
+ Name: toolName,
+ Arguments: args,
})
if err != nil {
return tools.NewTextErrorResponse(err.Error()), nil
@@ -135,8 +129,8 @@ func runTool(ctx context.Context, name, toolName string, input string) (tools.To
output := make([]string, 0, len(result.Content))
for _, v := range result.Content {
- if v, ok := v.(mcp.TextContent); ok {
- output = append(output, v.Text)
+ if vv, ok := v.(*mcp.TextContent); ok {
+ output = append(output, vv.Text)
} else {
output = append(output, fmt.Sprintf("%v", v))
}
@@ -144,8 +138,8 @@ func runTool(ctx context.Context, name, toolName string, input string) (tools.To
return tools.NewTextResponse(strings.Join(output, "\n")), nil
}
-func getOrRenewClient(ctx context.Context, name string) (*client.Client, error) {
- c, ok := mcpClients.Get(name)
+func getOrRenewClient(ctx context.Context, name string) (*mcp.ClientSession, error) {
+ sess, ok := mcpClients.Get(name)
if !ok {
return nil, fmt.Errorf("mcp '%s' not available", name)
}
@@ -157,20 +151,20 @@ func getOrRenewClient(ctx context.Context, name string) (*client.Client, error)
timeout := mcpTimeout(m)
pingCtx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
- err := c.Ping(pingCtx)
+ err := sess.Ping(pingCtx, nil)
if err == nil {
- return c, nil
+ return sess, nil
}
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, state.ToolCount)
- c, err = createAndInitializeClient(ctx, name, m, cfg.Resolver())
+ sess, err = createMCPSession(ctx, name, m, cfg.Resolver())
if err != nil {
return nil, err
}
- updateMCPState(name, MCPStateConnected, nil, c, state.ToolCount)
- mcpClients.Set(name, c)
- return c, nil
+ updateMCPState(name, MCPStateConnected, nil, sess, state.ToolCount)
+ mcpClients.Set(name, sess)
+ return sess, nil
}
func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolResponse, error) {
@@ -197,8 +191,8 @@ func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
return runTool(ctx, b.mcpName, b.tool.Name, params.Input)
}
-func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) ([]tools.BaseTool, error) {
- result, err := c.ListTools(ctx, mcp.ListToolsRequest{})
+func getTools(ctx context.Context, name string, permissions permission.Service, c *mcp.ClientSession, workingDir string) ([]tools.BaseTool, error) {
+ result, err := c.ListTools(ctx, &mcp.ListToolsParams{})
if err != nil {
return nil, err
}
@@ -230,7 +224,7 @@ func GetMCPState(name string) (MCPClientInfo, bool) {
}
// updateMCPState updates the state of an MCP client and publishes an event
-func updateMCPState(name string, state MCPState, err error, client *client.Client, toolCount int) {
+func updateMCPState(name string, state MCPState, err error, client *mcp.ClientSession, toolCount int) {
info := MCPClientInfo{
Name: name,
State: state,
@@ -257,19 +251,14 @@ func updateMCPState(name string, state MCPState, err error, client *client.Clien
})
}
-// publishMCPEventToolsListChanged publishes a tool list changed event
-func publishMCPEventToolsListChanged(name string) {
- mcpBroker.Publish(pubsub.UpdatedEvent, MCPEvent{
- Type: MCPEventToolsListChanged,
- Name: name,
- })
-}
-
// CloseMCPClients closes all MCP clients. This should be called during application shutdown.
func CloseMCPClients() error {
var errs []error
for name, c := range mcpClients.Seq2() {
- if err := c.Close(); err != nil {
+ if err := c.Close(); err != nil &&
+ !errors.Is(err, io.EOF) &&
+ !errors.Is(err, context.Canceled) &&
+ err.Error() != "signal: killed" {
errs = append(errs, fmt.Errorf("close mcp: %s: %w", name, err))
}
}
@@ -277,16 +266,6 @@ func CloseMCPClients() error {
return errors.Join(errs...)
}
-var mcpInitRequest = mcp.InitializeRequest{
- Params: mcp.InitializeParams{
- ProtocolVersion: mcp.LATEST_PROTOCOL_VERSION,
- ClientInfo: mcp.Implementation{
- Name: "Crush",
- Version: version.Version,
- },
- },
-}
-
func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) {
var wg sync.WaitGroup
// Initialize states for all configured MCPs
@@ -322,7 +301,7 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
defer cancel()
- c, err := createAndInitializeClient(ctx, name, m, cfg.Resolver())
+ c, err := createMCPSession(ctx, name, m, cfg.Resolver())
if err != nil {
return
}
@@ -359,49 +338,46 @@ func updateMcpTools(mcpName string, tools []tools.BaseTool) {
}
}
-func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
- c, err := createMcpClient(name, m, resolver)
+func createMCPSession(ctx context.Context, name string, m config.MCPConfig, resolver config.VariableResolver) (*mcp.ClientSession, error) {
+ timeout := mcpTimeout(m)
+ mcpCtx, cancel := context.WithCancel(ctx)
+ cancelTimer := time.AfterFunc(timeout, cancel)
+
+ transport, err := createMCPTransport(mcpCtx, m, resolver)
if err != nil {
updateMCPState(name, MCPStateError, err, nil, 0)
slog.Error("error creating mcp client", "error", err, "name", name)
return nil, err
}
- c.OnNotification(func(n mcp.JSONRPCNotification) {
- slog.Debug("Received MCP notification", "name", name, "notification", n)
- switch n.Method {
- case "notifications/tools/list_changed":
- publishMCPEventToolsListChanged(name)
- default:
- slog.Debug("Unhandled MCP notification", "name", name, "method", n.Method)
- }
- })
-
- // XXX: ideally we should be able to use context.WithTimeout here, but,
- // the SSE MCP client will start failing once that context is canceled.
- timeout := mcpTimeout(m)
- mcpCtx, cancel := context.WithCancel(ctx)
- cancelTimer := time.AfterFunc(timeout, cancel)
+ client := mcp.NewClient(
+ &mcp.Implementation{
+ Name: "crush",
+ Version: version.Version,
+ Title: "Crush",
+ },
+ &mcp.ClientOptions{
+ ToolListChangedHandler: func(context.Context, *mcp.ToolListChangedRequest) {
+ mcpBroker.Publish(pubsub.UpdatedEvent, MCPEvent{
+ Type: MCPEventToolsListChanged,
+ Name: name,
+ })
+ },
+ KeepAlive: time.Minute * 10,
+ },
+ )
- if err := c.Start(mcpCtx); err != nil {
+ session, err := client.Connect(mcpCtx, transport, nil)
+ if err != nil {
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error starting mcp client", "error", err, "name", name)
- _ = c.Close()
- cancel()
- return nil, err
- }
-
- if _, err := c.Initialize(mcpCtx, mcpInitRequest); err != nil {
- updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
- slog.Error("error initializing mcp client", "error", err, "name", name)
- _ = c.Close()
cancel()
return nil, err
}
cancelTimer.Stop()
slog.Info("Initialized mcp client", "name", name)
- return c, nil
+ return session, nil
}
func maybeTimeoutErr(err error, timeout time.Duration) error {
@@ -411,7 +387,7 @@ func maybeTimeoutErr(err error, timeout time.Duration) error {
return err
}
-func createMcpClient(name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
+func createMCPTransport(ctx context.Context, m config.MCPConfig, resolver config.VariableResolver) (mcp.Transport, error) {
switch m.Type {
case config.MCPStdio:
command, err := resolver.ResolveValue(m.Command)
@@ -421,44 +397,51 @@ func createMcpClient(name string, m config.MCPConfig, resolver config.VariableRe
if strings.TrimSpace(command) == "" {
return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
}
- return client.NewStdioMCPClientWithOptions(
- home.Long(command),
- m.ResolvedEnv(),
- m.Args,
- transport.WithCommandLogger(mcpLogger{name: name}),
- )
+ cmd := exec.CommandContext(ctx, home.Long(command), m.Args...)
+ cmd.Env = m.ResolvedEnv()
+ return &mcp.CommandTransport{
+ Command: cmd,
+ }, nil
case config.MCPHttp:
if strings.TrimSpace(m.URL) == "" {
return nil, fmt.Errorf("mcp http config requires a non-empty 'url' field")
}
- return client.NewStreamableHttpClient(
- m.URL,
- transport.WithHTTPHeaders(m.ResolvedHeaders()),
- transport.WithHTTPLogger(mcpLogger{name: name}),
- )
- case config.MCPSse:
+ client := &http.Client{
+ Transport: &headerRoundTripper{
+ headers: m.ResolvedHeaders(),
+ },
+ }
+ return &mcp.StreamableClientTransport{
+ Endpoint: m.URL,
+ HTTPClient: client,
+ }, nil
+ case config.MCPSSE:
if strings.TrimSpace(m.URL) == "" {
return nil, fmt.Errorf("mcp sse config requires a non-empty 'url' field")
}
- return client.NewSSEMCPClient(
- m.URL,
- client.WithHeaders(m.ResolvedHeaders()),
- transport.WithSSELogger(mcpLogger{name: name}),
- )
+ client := &http.Client{
+ Transport: &headerRoundTripper{
+ headers: m.ResolvedHeaders(),
+ },
+ }
+ return &mcp.SSEClientTransport{
+ Endpoint: m.URL,
+ HTTPClient: client,
+ }, nil
default:
return nil, fmt.Errorf("unsupported mcp type: %s", m.Type)
}
}
-// for MCP's clients.
-type mcpLogger struct{ name string }
-
-func (l mcpLogger) Errorf(format string, v ...any) {
- slog.Error(fmt.Sprintf(format, v...), "name", l.name)
+type headerRoundTripper struct {
+ headers map[string]string
}
-func (l mcpLogger) Infof(format string, v ...any) {
- slog.Info(fmt.Sprintf(format, v...), "name", l.name)
+func (rt headerRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
+ for k, v := range rt.headers {
+ req.Header.Set(k, v)
+ }
+ return http.DefaultTransport.RoundTrip(req)
}
func mcpTimeout(m config.MCPConfig) time.Duration {
From 7ac96ef0686106b53324a6901fac10a9f00e8a4f Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Fri, 10 Oct 2025 11:45:12 -0300
Subject: [PATCH 211/236] fix(vertex): small fix for anthropic models via
google vertex (#1214)
---
internal/llm/provider/vertexai.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/internal/llm/provider/vertexai.go b/internal/llm/provider/vertexai.go
index 871ff092b058af70833ba615260efcdbc09f2514..e7591af70c24a528d48895c11e653f023ba86c89 100644
--- a/internal/llm/provider/vertexai.go
+++ b/internal/llm/provider/vertexai.go
@@ -30,7 +30,7 @@ func newVertexAIClient(opts providerClientOptions) VertexAIClient {
}
model := opts.model(opts.modelType)
- if strings.Contains(model.ID, "anthropic") || strings.Contains(model.ID, "claude-sonnet") {
+ if strings.Contains(model.ID, "anthropic") || strings.Contains(model.ID, "claude") || strings.Contains(model.ID, "sonnet") {
return newAnthropicClient(opts, AnthropicClientTypeVertex)
}
return &geminiClient{
From 4969c34d18747159dcdb2f6c8543afe3ef2c2e0d Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Sat, 11 Oct 2025 18:07:50 -0300
Subject: [PATCH 212/236] fix(tui): panic (#1220)
Signed-off-by: Carlos Alexandro Becker
---
internal/tui/tui.go | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/internal/tui/tui.go b/internal/tui/tui.go
index 74d82e15514c70ee96b507a01b8f611d3ade6a4d..fe3875d256b3b7c7e3d2fdafecca57b20c3d2fbb 100644
--- a/internal/tui/tui.go
+++ b/internal/tui/tui.go
@@ -603,9 +603,9 @@ func (a *appModel) View() tea.View {
view.Layer = canvas
view.Cursor = cursor
view.ProgressBar = tea.NewProgressBar(tea.ProgressBarNone, 0)
- if a.app.CoderAgent.IsBusy() {
- // use a random percentage to prevent the ghostty from hiding it after
- // a timeout.
+ if a.app != nil && a.app.CoderAgent != nil && a.app.CoderAgent.IsBusy() {
+ // HACK: use a random percentage to prevent ghostty from hiding it
+ // after a timeout.
view.ProgressBar = tea.NewProgressBar(tea.ProgressBarIndeterminate, rand.Intn(100))
}
return view
From a824240d128010ef52f870356dea6ba7c5b5f00e Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Sat, 11 Oct 2025 18:11:59 -0300
Subject: [PATCH 213/236] fix(tui): fix progress not cleaning up some times
(#1219)
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 2 +-
go.sum | 4 ++--
internal/app/app.go | 8 ++++----
internal/tui/tui.go | 1 -
4 files changed, 7 insertions(+), 8 deletions(-)
diff --git a/go.mod b/go.mod
index 843e7f231f729e86d4e299349fa1293005ad3971..22daf1ce6d08d57d64174fcc705c9f3026eb48e4 100644
--- a/go.mod
+++ b/go.mod
@@ -13,7 +13,7 @@ require (
github.com/bmatcuk/doublestar/v4 v4.9.1
github.com/charlievieth/fastwalk v1.0.14
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
- github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7
+ github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20251011205917-3b687ffc1619
github.com/charmbracelet/catwalk v0.6.3
github.com/charmbracelet/fang v0.4.3
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
diff --git a/go.sum b/go.sum
index 563016cca9ffcec4a7be40aeed80822a105d1769..30ee83eba84c07b1135a8983fdfd04e162258c64 100644
--- a/go.sum
+++ b/go.sum
@@ -78,8 +78,8 @@ github.com/charlievieth/fastwalk v1.0.14 h1:3Eh5uaFGwHZd8EGwTjJnSpBkfwfsak9h6ICg
github.com/charlievieth/fastwalk v1.0.14/go.mod h1:diVcUreiU1aQ4/Wu3NbxxH4/KYdKpLDojrQ1Bb2KgNY=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7 h1:wH4F+UvxcZSDOxy8j45tghiRo8amrYHejbE9+1C6xv0=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7/go.mod h1:5IzIGXU1n0foRc8bRAherC8ZuQCQURPlwx3ANLq1138=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20251011205917-3b687ffc1619 h1:hjOhtqsxa+LVuCAkzhfA43wtusOaUPyQdSTg/wbRscw=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20251011205917-3b687ffc1619/go.mod h1:5IzIGXU1n0foRc8bRAherC8ZuQCQURPlwx3ANLq1138=
github.com/charmbracelet/catwalk v0.6.3 h1:RyL8Yqd4QsV3VyvBEsePScv1z2vKaZxPfQQ0XB5L5AA=
github.com/charmbracelet/catwalk v0.6.3/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
diff --git a/internal/app/app.go b/internal/app/app.go
index 29631c1be84e96617adfeb705b2e35e0b68725e5..8f305f765f3391e1a6afce294e3c42525ec65668 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -107,10 +107,6 @@ func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool
ctx, cancel := context.WithCancel(ctx)
defer cancel()
- // Start progress bar and spinner
- fmt.Printf(ansi.SetIndeterminateProgressBar)
- defer fmt.Printf(ansi.ResetProgressBar)
-
var spinner *format.Spinner
if !quiet {
spinner = format.NewSpinner(ctx, cancel, "Generating")
@@ -154,7 +150,11 @@ func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool
messageEvents := app.Messages.Subscribe(ctx)
messageReadBytes := make(map[string]int)
+ defer fmt.Printf(ansi.ResetProgressBar)
for {
+ // HACK: add it again on every iteration so it doesn't get hidden by
+ // the terminal due to inactivity.
+ fmt.Printf(ansi.SetIndeterminateProgressBar)
select {
case result := <-done:
stopSpinner()
diff --git a/internal/tui/tui.go b/internal/tui/tui.go
index fe3875d256b3b7c7e3d2fdafecca57b20c3d2fbb..26d23f46ee62aafe07d1bb6209a4fedea929c6e1 100644
--- a/internal/tui/tui.go
+++ b/internal/tui/tui.go
@@ -602,7 +602,6 @@ func (a *appModel) View() tea.View {
view.Layer = canvas
view.Cursor = cursor
- view.ProgressBar = tea.NewProgressBar(tea.ProgressBarNone, 0)
if a.app != nil && a.app.CoderAgent != nil && a.app.CoderAgent.IsBusy() {
// HACK: use a random percentage to prevent ghostty from hiding it
// after a timeout.
From e9be0dfce158697df19ca28a28e766c8af9b8329 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Mon, 13 Oct 2025 02:56:30 -0300
Subject: [PATCH 214/236] chore(legal): @daps94 has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index dc7d5873d3110320f09b8457b696a47d307ef41a..61a8b03447ae07a5dc775ca59a5eef7aacfe9c2b 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -703,6 +703,14 @@
"created_at": "2025-10-06T19:31:50Z",
"repoId": 987670088,
"pullRequestNo": 1200
+ },
+ {
+ "name": "daps94",
+ "id": 35882689,
+ "comment_id": 3395964275,
+ "created_at": "2025-10-13T05:56:20Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1223
}
]
}
\ No newline at end of file
From 8814881209fa099d2a8a373d077d0233637cd619 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 13 Oct 2025 10:18:21 +0000
Subject: [PATCH 215/236] chore(deps): bump the all group with 4 updates
(#1225)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
go.mod | 8 ++++----
go.sum | 20 ++++++++++----------
2 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/go.mod b/go.mod
index 22daf1ce6d08d57d64174fcc705c9f3026eb48e4..5f32e148b92ac8e6c456157465061c759d267dd9 100644
--- a/go.mod
+++ b/go.mod
@@ -14,7 +14,7 @@ require (
github.com/charlievieth/fastwalk v1.0.14
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20251011205917-3b687ffc1619
- github.com/charmbracelet/catwalk v0.6.3
+ github.com/charmbracelet/catwalk v0.6.4
github.com/charmbracelet/fang v0.4.3
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea
@@ -117,7 +117,7 @@ require (
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/posthog/posthog-go v1.6.10
+ github.com/posthog/posthog-go v1.6.11
github.com/rivo/uniseg v0.4.7
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
@@ -149,10 +149,10 @@ require (
golang.org/x/sync v0.17.0 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/term v0.35.0 // indirect
- golang.org/x/text v0.29.0
+ golang.org/x/text v0.30.0
golang.org/x/time v0.8.0 // indirect
google.golang.org/api v0.211.0 // indirect
- google.golang.org/genai v1.28.0
+ google.golang.org/genai v1.30.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
google.golang.org/grpc v1.71.0 // indirect
google.golang.org/protobuf v1.36.8 // indirect
diff --git a/go.sum b/go.sum
index 30ee83eba84c07b1135a8983fdfd04e162258c64..2d53e85a40001ea9241e4c7ee728baa734a889d9 100644
--- a/go.sum
+++ b/go.sum
@@ -80,8 +80,8 @@ github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20251011205917-3b687ffc1619 h1:hjOhtqsxa+LVuCAkzhfA43wtusOaUPyQdSTg/wbRscw=
github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20251011205917-3b687ffc1619/go.mod h1:5IzIGXU1n0foRc8bRAherC8ZuQCQURPlwx3ANLq1138=
-github.com/charmbracelet/catwalk v0.6.3 h1:RyL8Yqd4QsV3VyvBEsePScv1z2vKaZxPfQQ0XB5L5AA=
-github.com/charmbracelet/catwalk v0.6.3/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
+github.com/charmbracelet/catwalk v0.6.4 h1:zFHtuP94mSDE48nST3DS3a37wfsQqNcVnsFkS3v6N6E=
+github.com/charmbracelet/catwalk v0.6.4/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
github.com/charmbracelet/fang v0.4.3 h1:qXeMxnL4H6mSKBUhDefHu8NfikFbP/MBNTfqTrXvzmY=
@@ -237,8 +237,8 @@ github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjL
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/posthog/posthog-go v1.6.10 h1:OA6bkiUg89rI7f5cSXbcrH5+wLinyS6hHplnD92Pu/M=
-github.com/posthog/posthog-go v1.6.10/go.mod h1:LcC1Nu4AgvV22EndTtrMXTy+7RGVC0MhChSw7Qk5XkY=
+github.com/posthog/posthog-go v1.6.11 h1:5G8Y3pxnOpc3S4+PK1z1dCmZRuldiWxBsqqvvSfC2+w=
+github.com/posthog/posthog-go v1.6.11/go.mod h1:LcC1Nu4AgvV22EndTtrMXTy+7RGVC0MhChSw7Qk5XkY=
github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/qjebbs/go-jsons v1.0.0-alpha.4 h1:Qsb4ohRUHQODIUAsJKdKJ/SIDbsO7oGOzsfy+h1yQZs=
@@ -410,8 +410,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
-golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
-golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
+golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
+golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -420,13 +420,13 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
-golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
-golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
+golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE=
+golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
-google.golang.org/genai v1.28.0 h1:6qpUWFH3PkHPhxNnu3wjaCVJ6Jri1EIR7ks07f9IpIk=
-google.golang.org/genai v1.28.0/go.mod h1:7pAilaICJlQBonjKKJNhftDFv3SREhZcTe9F6nRcjbg=
+google.golang.org/genai v1.30.0 h1:7021aneIvl24nEBLbtQFEWleHsMbjzpcQvkT4WcJ1dc=
+google.golang.org/genai v1.30.0/go.mod h1:7pAilaICJlQBonjKKJNhftDFv3SREhZcTe9F6nRcjbg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
From e5f171ba8640c1eff33db28e42914f7803311cc1 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 13 Oct 2025 10:20:28 +0000
Subject: [PATCH 216/236] chore(deps): bump
stefanzweifel/git-auto-commit-action from 6.0.1 to 7.0.0 in the all group
(#1226)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/schema-update.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/schema-update.yml b/.github/workflows/schema-update.yml
index bc1a69c68273c007a764c268958858be3b62bcd2..466c3a25fb3698a183ed84436d5dca9813b2dcb6 100644
--- a/.github/workflows/schema-update.yml
+++ b/.github/workflows/schema-update.yml
@@ -17,7 +17,7 @@ jobs:
with:
go-version-file: go.mod
- run: go run . schema > ./schema.json
- - uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v5
+ - uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v5
with:
commit_message: "chore: auto-update generated files"
branch: main
From 9ffa58723de56761baef45b595f0a9e48266aa6d Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Tue, 14 Oct 2025 16:15:51 -0300
Subject: [PATCH 218/236] fix(ls): properly handle limits (#1230)
Signed-off-by: Carlos Alexandro Becker
---
internal/config/config.go | 4 ++--
internal/llm/tools/ls.go | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/internal/config/config.go b/internal/config/config.go
index b37b98cad717e789ad16237b3ca250a2f1555ba9..ff948b874ea1613ca126053547dcf9b7d4cc3297 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -143,7 +143,7 @@ type Completions struct {
}
func (c Completions) Limits() (depth, items int) {
- return ptrValOr(c.MaxDepth, -1), ptrValOr(c.MaxItems, -1)
+ return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
}
type Permissions struct {
@@ -269,7 +269,7 @@ type ToolLs struct {
}
func (t ToolLs) Limits() (depth, items int) {
- return ptrValOr(t.MaxDepth, -1), ptrValOr(t.MaxItems, -1)
+ return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
}
// Config holds the configuration for crush.
diff --git a/internal/llm/tools/ls.go b/internal/llm/tools/ls.go
index 305f7f10249594ff06ac008a8bf81145d7d834de..af25259dd8c69ff8d52d467e20532612681b51b1 100644
--- a/internal/llm/tools/ls.go
+++ b/internal/llm/tools/ls.go
@@ -157,7 +157,7 @@ func ListDirectoryTree(searchPath string, params LSParams) (string, LSResponseMe
ls := config.Get().Tools.Ls
depth, limit := ls.Limits()
- maxFiles := min(limit, maxLSFiles)
+ maxFiles := cmp.Or(limit, maxLSFiles)
files, truncated, err := fsext.ListDirectory(
searchPath,
params.Ignore,
From 02cd9ab3861d611cf20870b645983ee9fe073874 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Tue, 14 Oct 2025 15:25:18 -0400
Subject: [PATCH 219/236] chore(task): also push empty named commit in release
(#1231)
Co-authored-by: Andrey Nering
---
Taskfile.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 1c4225158fc21508e8dccac8d6f47610f7d81faf..540cb24bff123664a42eb86afcea593513c4f25f 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -101,7 +101,7 @@ tasks:
- git commit --allow-empty -m "{{.NEXT}}"
- git tag --annotate --sign {{.NEXT}} {{.CLI_ARGS}}
- echo "Pushing {{.NEXT}}..."
- - git push origin --tags
+ - git push origin main --follow-tags
fetch-tags:
cmds:
From 595c9401bb70818601dda9a483bc96402408d950 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Tue, 14 Oct 2025 16:28:55 -0300
Subject: [PATCH 220/236] chore(task): set commit desc automatically
---
Taskfile.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 540cb24bff123664a42eb86afcea593513c4f25f..92b162dfbb847356e09eb17ea5996e6093a305b2 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -99,7 +99,7 @@ tasks:
cmds:
- task: fetch-tags
- git commit --allow-empty -m "{{.NEXT}}"
- - git tag --annotate --sign {{.NEXT}} {{.CLI_ARGS}}
+ - git tag --annotate -m "{{.NEXT}}" {{.NEXT}} {{.CLI_ARGS}}
- echo "Pushing {{.NEXT}}..."
- git push origin main --follow-tags
From 8c9ce8e765d0b0db1e6cf8065983db19d2997a03 Mon Sep 17 00:00:00 2001
From: Amolith
Date: Wed, 15 Oct 2025 02:26:30 -0600
Subject: [PATCH 222/236] feat: paste/close bindings in user cmd dialog (#1221)
Co-authored-by: Crush
---
.../tui/components/dialogs/commands/arguments.go | 9 ++++++++-
internal/tui/components/dialogs/commands/keys.go | 14 ++++++++++++++
2 files changed, 22 insertions(+), 1 deletion(-)
diff --git a/internal/tui/components/dialogs/commands/arguments.go b/internal/tui/components/dialogs/commands/arguments.go
index 03110eeaf2b8fbb909f1f9e4fbd57344699732e3..b1a274319719b9f550179b35aa98fd8310e0bb7b 100644
--- a/internal/tui/components/dialogs/commands/arguments.go
+++ b/internal/tui/components/dialogs/commands/arguments.go
@@ -128,12 +128,19 @@ func (c *commandArgumentsDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
c.inputs[c.focusIndex].Blur()
c.focusIndex = (c.focusIndex - 1 + len(c.inputs)) % len(c.inputs)
c.inputs[c.focusIndex].Focus()
-
+ case key.Matches(msg, c.keys.Paste):
+ return c, textinput.Paste
+ case key.Matches(msg, c.keys.Close):
+ return c, util.CmdHandler(dialogs.CloseDialogMsg{})
default:
var cmd tea.Cmd
c.inputs[c.focusIndex], cmd = c.inputs[c.focusIndex].Update(msg)
return c, cmd
}
+ case tea.PasteMsg:
+ var cmd tea.Cmd
+ c.inputs[c.focusIndex], cmd = c.inputs[c.focusIndex].Update(msg)
+ return c, cmd
}
return c, nil
}
diff --git a/internal/tui/components/dialogs/commands/keys.go b/internal/tui/components/dialogs/commands/keys.go
index 7b79a29c28a024154a3b4d8c763969585409fd00..b704f227fe6f183a430bd25d3af62f4ef50b3365 100644
--- a/internal/tui/components/dialogs/commands/keys.go
+++ b/internal/tui/components/dialogs/commands/keys.go
@@ -76,6 +76,8 @@ type ArgumentsDialogKeyMap struct {
Confirm key.Binding
Next key.Binding
Previous key.Binding
+ Paste key.Binding
+ Close key.Binding
}
func DefaultArgumentsDialogKeyMap() ArgumentsDialogKeyMap {
@@ -93,6 +95,14 @@ func DefaultArgumentsDialogKeyMap() ArgumentsDialogKeyMap {
key.WithKeys("shift+tab", "up"),
key.WithHelp("shift+tab/↑", "previous"),
),
+ Paste: key.NewBinding(
+ key.WithKeys("ctrl+v"),
+ key.WithHelp("ctrl+v", "paste"),
+ ),
+ Close: key.NewBinding(
+ key.WithKeys("esc", "alt+esc"),
+ key.WithHelp("esc", "cancel"),
+ ),
}
}
@@ -102,6 +112,8 @@ func (k ArgumentsDialogKeyMap) KeyBindings() []key.Binding {
k.Confirm,
k.Next,
k.Previous,
+ k.Paste,
+ k.Close,
}
}
@@ -122,5 +134,7 @@ func (k ArgumentsDialogKeyMap) ShortHelp() []key.Binding {
k.Confirm,
k.Next,
k.Previous,
+ k.Paste,
+ k.Close,
}
}
From 69be8c20e2152d3a0c053d005013286d8bfe57c6 Mon Sep 17 00:00:00 2001
From: Andrey Nering
Date: Wed, 15 Oct 2025 06:57:21 -0300
Subject: [PATCH 223/236] fix(bedrock): detect credentials set by `aws
configure` (#1232)
---
internal/config/load.go | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/internal/config/load.go b/internal/config/load.go
index 9fb45028d6936a652f2657f51707b6cde73f4084..c63a9663613bdfdea6a9c9ccef9f53d375e35c74 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -605,6 +605,11 @@ func hasAWSCredentials(env env.Env) bool {
env.Get("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
return true
}
+
+ if _, err := os.Stat(filepath.Join(home.Dir(), ".aws/credentials")); err == nil {
+ return true
+ }
+
return false
}
From 1a40fbabbd8cfce50824072aa3b52bafc9a56a1c Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 15 Oct 2025 09:18:43 -0300
Subject: [PATCH 224/236] fix(grep): check mime type (#1228)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/tools/grep.go | 44 +++-----
internal/llm/tools/grep_test.go | 192 ++++++++++++++++++++++++++++++++
2 files changed, 207 insertions(+), 29 deletions(-)
diff --git a/internal/llm/tools/grep.go b/internal/llm/tools/grep.go
index cbf50360b9355c05797690678a99d1310b19556f..237d4e18dab0bc518b9d4b6e2c73ef5035d2b348 100644
--- a/internal/llm/tools/grep.go
+++ b/internal/llm/tools/grep.go
@@ -7,6 +7,7 @@ import (
"encoding/json"
"fmt"
"io"
+ "net/http"
"os"
"os/exec"
"path/filepath"
@@ -390,8 +391,8 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
}
func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, string, error) {
- // Quick binary file detection
- if isBinaryFile(filePath) {
+ // Only search text files.
+ if !isTextFile(filePath) {
return false, 0, "", nil
}
@@ -414,45 +415,30 @@ func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, st
return false, 0, "", scanner.Err()
}
-var binaryExts = map[string]struct{}{
- ".exe": {}, ".dll": {}, ".so": {}, ".dylib": {},
- ".bin": {}, ".obj": {}, ".o": {}, ".a": {},
- ".zip": {}, ".tar": {}, ".gz": {}, ".bz2": {},
- ".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {},
- ".pdf": {}, ".doc": {}, ".docx": {}, ".xls": {},
- ".mp3": {}, ".mp4": {}, ".avi": {}, ".mov": {},
-}
-
-// isBinaryFile performs a quick check to determine if a file is binary
-func isBinaryFile(filePath string) bool {
- // Check file extension first (fastest)
- ext := strings.ToLower(filepath.Ext(filePath))
- if _, isBinary := binaryExts[ext]; isBinary {
- return true
- }
-
- // Quick content check for files without clear extensions
+// isTextFile checks if a file is a text file by examining its MIME type.
+func isTextFile(filePath string) bool {
file, err := os.Open(filePath)
if err != nil {
- return false // If we can't open it, let the caller handle the error
+ return false
}
defer file.Close()
- // Read first 512 bytes to check for null bytes
+ // Read first 512 bytes for MIME type detection.
buffer := make([]byte, 512)
n, err := file.Read(buffer)
if err != nil && err != io.EOF {
return false
}
- // Check for null bytes (common in binary files)
- for i := range n {
- if buffer[i] == 0 {
- return true
- }
- }
+ // Detect content type.
+ contentType := http.DetectContentType(buffer[:n])
- return false
+ // Check if it's a text MIME type.
+ return strings.HasPrefix(contentType, "text/") ||
+ contentType == "application/json" ||
+ contentType == "application/xml" ||
+ contentType == "application/javascript" ||
+ contentType == "application/x-sh"
}
func globToRegex(glob string) string {
diff --git a/internal/llm/tools/grep_test.go b/internal/llm/tools/grep_test.go
index 53c96b22df444adfba59c6b13995a104411a57be..435b3045b93a8e1297ff2aaeff9ee8977b974b56 100644
--- a/internal/llm/tools/grep_test.go
+++ b/internal/llm/tools/grep_test.go
@@ -198,3 +198,195 @@ func BenchmarkRegexCacheVsCompile(b *testing.B) {
}
})
}
+
+func TestIsTextFile(t *testing.T) {
+ t.Parallel()
+ tempDir := t.TempDir()
+
+ tests := []struct {
+ name string
+ filename string
+ content []byte
+ wantText bool
+ }{
+ {
+ name: "go file",
+ filename: "test.go",
+ content: []byte("package main\n\nfunc main() {}\n"),
+ wantText: true,
+ },
+ {
+ name: "yaml file",
+ filename: "config.yaml",
+ content: []byte("key: value\nlist:\n - item1\n - item2\n"),
+ wantText: true,
+ },
+ {
+ name: "yml file",
+ filename: "config.yml",
+ content: []byte("key: value\n"),
+ wantText: true,
+ },
+ {
+ name: "json file",
+ filename: "data.json",
+ content: []byte(`{"key": "value"}`),
+ wantText: true,
+ },
+ {
+ name: "javascript file",
+ filename: "script.js",
+ content: []byte("console.log('hello');\n"),
+ wantText: true,
+ },
+ {
+ name: "typescript file",
+ filename: "script.ts",
+ content: []byte("const x: string = 'hello';\n"),
+ wantText: true,
+ },
+ {
+ name: "markdown file",
+ filename: "README.md",
+ content: []byte("# Title\n\nSome content\n"),
+ wantText: true,
+ },
+ {
+ name: "shell script",
+ filename: "script.sh",
+ content: []byte("#!/bin/bash\necho 'hello'\n"),
+ wantText: true,
+ },
+ {
+ name: "python file",
+ filename: "script.py",
+ content: []byte("print('hello')\n"),
+ wantText: true,
+ },
+ {
+ name: "xml file",
+ filename: "data.xml",
+ content: []byte("\n\n"),
+ wantText: true,
+ },
+ {
+ name: "plain text",
+ filename: "file.txt",
+ content: []byte("plain text content\n"),
+ wantText: true,
+ },
+ {
+ name: "css file",
+ filename: "style.css",
+ content: []byte("body { color: red; }\n"),
+ wantText: true,
+ },
+ {
+ name: "scss file",
+ filename: "style.scss",
+ content: []byte("$primary: blue;\nbody { color: $primary; }\n"),
+ wantText: true,
+ },
+ {
+ name: "sass file",
+ filename: "style.sass",
+ content: []byte("$primary: blue\nbody\n color: $primary\n"),
+ wantText: true,
+ },
+ {
+ name: "rust file",
+ filename: "main.rs",
+ content: []byte("fn main() {\n println!(\"Hello, world!\");\n}\n"),
+ wantText: true,
+ },
+ {
+ name: "zig file",
+ filename: "main.zig",
+ content: []byte("const std = @import(\"std\");\npub fn main() void {}\n"),
+ wantText: true,
+ },
+ {
+ name: "java file",
+ filename: "Main.java",
+ content: []byte("public class Main {\n public static void main(String[] args) {}\n}\n"),
+ wantText: true,
+ },
+ {
+ name: "c file",
+ filename: "main.c",
+ content: []byte("#include \nint main() { return 0; }\n"),
+ wantText: true,
+ },
+ {
+ name: "cpp file",
+ filename: "main.cpp",
+ content: []byte("#include \nint main() { return 0; }\n"),
+ wantText: true,
+ },
+ {
+ name: "fish shell",
+ filename: "script.fish",
+ content: []byte("#!/usr/bin/env fish\necho 'hello'\n"),
+ wantText: true,
+ },
+ {
+ name: "powershell file",
+ filename: "script.ps1",
+ content: []byte("Write-Host 'Hello, World!'\n"),
+ wantText: true,
+ },
+ {
+ name: "cmd batch file",
+ filename: "script.bat",
+ content: []byte("@echo off\necho Hello, World!\n"),
+ wantText: true,
+ },
+ {
+ name: "cmd file",
+ filename: "script.cmd",
+ content: []byte("@echo off\necho Hello, World!\n"),
+ wantText: true,
+ },
+ {
+ name: "binary exe",
+ filename: "binary.exe",
+ content: []byte{0x4D, 0x5A, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00},
+ wantText: false,
+ },
+ {
+ name: "png image",
+ filename: "image.png",
+ content: []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A},
+ wantText: false,
+ },
+ {
+ name: "jpeg image",
+ filename: "image.jpg",
+ content: []byte{0xFF, 0xD8, 0xFF, 0xE0, 0x00, 0x10, 0x4A, 0x46},
+ wantText: false,
+ },
+ {
+ name: "zip archive",
+ filename: "archive.zip",
+ content: []byte{0x50, 0x4B, 0x03, 0x04, 0x14, 0x00, 0x00, 0x00},
+ wantText: false,
+ },
+ {
+ name: "pdf file",
+ filename: "document.pdf",
+ content: []byte("%PDF-1.4\n%âãÏÓ\n"),
+ wantText: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ filePath := filepath.Join(tempDir, tt.filename)
+ require.NoError(t, os.WriteFile(filePath, tt.content, 0o644))
+
+ got := isTextFile(filePath)
+ require.Equal(t, tt.wantText, got, "isTextFile(%s) = %v, want %v", tt.filename, got, tt.wantText)
+ })
+ }
+}
From 23e0fd441709aa040dda95d00de19ff2770ab037 Mon Sep 17 00:00:00 2001
From: Evan
Date: Wed, 15 Oct 2025 15:58:56 -0400
Subject: [PATCH 225/236] fix(mcp): add type assertion guards (#1239)
Signed-off-by: Evan Wies
---
internal/llm/agent/mcp-tools.go | 23 ++++++++++++++++++++---
1 file changed, 20 insertions(+), 3 deletions(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 67f0b39ccfb6eb8aad3abd337e7545a59766d872..038cd43f4469953779799b70850355ef5dcda45f 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -98,9 +98,26 @@ func (b *McpTool) Name() string {
}
func (b *McpTool) Info() tools.ToolInfo {
- input := b.tool.InputSchema.(map[string]any)
- required, _ := input["required"].([]string)
- parameters, _ := input["properties"].(map[string]any)
+ var parameters map[string]any
+ var required []string
+
+ if input, ok := b.tool.InputSchema.(map[string]any); ok {
+ if props, ok := input["properties"].(map[string]any); ok {
+ parameters = props
+ }
+ if req, ok := input["required"].([]any); ok {
+ // Convert []any -> []string when elements are strings
+ for _, v := range req {
+ if s, ok := v.(string); ok {
+ required = append(required, s)
+ }
+ }
+ } else if reqStr, ok := input["required"].([]string); ok {
+ // Handle case where it's already []string
+ required = reqStr
+ }
+ }
+
return tools.ToolInfo{
Name: fmt.Sprintf("mcp_%s_%s", b.mcpName, b.tool.Name),
Description: b.tool.Description,
From 05457d52a5467a1168676b8bdff802bac9cddd54 Mon Sep 17 00:00:00 2001
From: Christian Rocha
Date: Wed, 15 Oct 2025 16:00:50 -0400
Subject: [PATCH 226/236] refactor: use clamp from /x/exp/ordered (#1236)
---
go.mod | 2 ++
go.sum | 2 ++
internal/tui/components/chat/messages/messages.go | 5 +++--
internal/tui/exp/list/list.go | 5 +++--
internal/tui/util/util.go | 7 -------
5 files changed, 10 insertions(+), 11 deletions(-)
diff --git a/go.mod b/go.mod
index 5f32e148b92ac8e6c456157465061c759d267dd9..15ad5f22d82ad649decda39907f0911650a8b5f5 100644
--- a/go.mod
+++ b/go.mod
@@ -46,6 +46,8 @@ require (
mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5
)
+require github.com/charmbracelet/x/exp/ordered v0.1.0
+
require (
cloud.google.com/go v0.116.0 // indirect
cloud.google.com/go/auth v0.13.0 // indirect
diff --git a/go.sum b/go.sum
index 2d53e85a40001ea9241e4c7ee728baa734a889d9..d1c0349a66d5e8c0e9bf6968d849cb0cbf6d26c5 100644
--- a/go.sum
+++ b/go.sum
@@ -102,6 +102,8 @@ github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3 h1:1
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3/go.mod h1:T9jr8CzFpjhFVHjNjKwbAD7KwBNyFnj2pntAO7F2zw0=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
+github.com/charmbracelet/x/exp/ordered v0.1.0 h1:55/qLwjIh0gL0Vni+QAWk7T/qRVP6sBf+2agPBgnOFE=
+github.com/charmbracelet/x/exp/ordered v0.1.0/go.mod h1:5UHwmG+is5THxMyCJHNPCn2/ecI07aKNrW+LcResjJ8=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
diff --git a/internal/tui/components/chat/messages/messages.go b/internal/tui/components/chat/messages/messages.go
index 296b02478a7d0738fef2f60ae6b2211d44424a2f..d931ba7e179255d6639db78ebea5e82b57af1504 100644
--- a/internal/tui/components/chat/messages/messages.go
+++ b/internal/tui/components/chat/messages/messages.go
@@ -12,6 +12,7 @@ import (
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/x/ansi"
+ "github.com/charmbracelet/x/exp/ordered"
"github.com/google/uuid"
"github.com/atotto/clipboard"
@@ -271,7 +272,7 @@ func (m *messageCmp) renderThinkingContent() string {
}
}
fullContent := content.String()
- height := util.Clamp(lipgloss.Height(fullContent), 1, 10)
+ height := ordered.Clamp(lipgloss.Height(fullContent), 1, 10)
m.thinkingViewport.SetHeight(height)
m.thinkingViewport.SetWidth(m.textWidth())
m.thinkingViewport.SetContent(fullContent)
@@ -344,7 +345,7 @@ func (m *messageCmp) GetSize() (int, int) {
// SetSize updates the width of the message component for text wrapping
func (m *messageCmp) SetSize(width int, height int) tea.Cmd {
- m.width = util.Clamp(width, 1, 120)
+ m.width = ordered.Clamp(width, 1, 120)
m.thinkingViewport.SetWidth(m.width - 4)
return nil
}
diff --git a/internal/tui/exp/list/list.go b/internal/tui/exp/list/list.go
index fd789f90b89b016abb9b9fb5c79227da7ef30fd9..e18b88348959c59190f1741698f76c33f04571db 100644
--- a/internal/tui/exp/list/list.go
+++ b/internal/tui/exp/list/list.go
@@ -15,6 +15,7 @@ import (
"github.com/charmbracelet/lipgloss/v2"
uv "github.com/charmbracelet/ultraviolet"
"github.com/charmbracelet/x/ansi"
+ "github.com/charmbracelet/x/exp/ordered"
"github.com/rivo/uniseg"
)
@@ -1283,14 +1284,14 @@ func (l *list[T]) UpdateItem(id string, item T) tea.Cmd {
newItem, ok := l.renderedItems.Get(item.ID())
if ok {
newLines := newItem.height - oldItem.height
- l.offset = util.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
+ l.offset = ordered.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
}
}
} else if hasOldItem && l.offset > oldItem.start {
newItem, ok := l.renderedItems.Get(item.ID())
if ok {
newLines := newItem.height - oldItem.height
- l.offset = util.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
+ l.offset = ordered.Clamp(l.offset+newLines, 0, lipgloss.Height(l.rendered)-1)
}
}
}
diff --git a/internal/tui/util/util.go b/internal/tui/util/util.go
index 1f4ea30c49c8fb0517a5068d3b7f05970638743a..eb19ad89544b281af2e836f667ac63aaa6414e01 100644
--- a/internal/tui/util/util.go
+++ b/internal/tui/util/util.go
@@ -60,10 +60,3 @@ type (
}
ClearStatusMsg struct{}
)
-
-func Clamp(v, low, high int) int {
- if high < low {
- low, high = high, low
- }
- return min(high, max(low, v))
-}
From 1932bcebd41811e8605dc626158d6ec2e3e9118b Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 15 Oct 2025 17:01:29 -0300
Subject: [PATCH 227/236] chore: go mod tidy
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/go.mod b/go.mod
index 15ad5f22d82ad649decda39907f0911650a8b5f5..e0b92a9380af54233306de80c826a5191878298a 100644
--- a/go.mod
+++ b/go.mod
@@ -22,6 +22,7 @@ require (
github.com/charmbracelet/x/ansi v0.10.2
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a
+ github.com/charmbracelet/x/exp/ordered v0.1.0
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
@@ -46,8 +47,6 @@ require (
mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5
)
-require github.com/charmbracelet/x/exp/ordered v0.1.0
-
require (
cloud.google.com/go v0.116.0 // indirect
cloud.google.com/go/auth v0.13.0 // indirect
From 4b1001cf3391473faf43233a10cc4240ec147eb9 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Wed, 15 Oct 2025 17:13:14 -0300
Subject: [PATCH 228/236] fix(tui): paste on arguments input (#1240)
Signed-off-by: Carlos Alexandro Becker
---
internal/tui/components/dialogs/commands/arguments.go | 2 --
internal/tui/components/dialogs/commands/keys.go | 7 -------
2 files changed, 9 deletions(-)
diff --git a/internal/tui/components/dialogs/commands/arguments.go b/internal/tui/components/dialogs/commands/arguments.go
index b1a274319719b9f550179b35aa98fd8310e0bb7b..72677bc934864970c2cbded87b31853ad702a6ed 100644
--- a/internal/tui/components/dialogs/commands/arguments.go
+++ b/internal/tui/components/dialogs/commands/arguments.go
@@ -128,8 +128,6 @@ func (c *commandArgumentsDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
c.inputs[c.focusIndex].Blur()
c.focusIndex = (c.focusIndex - 1 + len(c.inputs)) % len(c.inputs)
c.inputs[c.focusIndex].Focus()
- case key.Matches(msg, c.keys.Paste):
- return c, textinput.Paste
case key.Matches(msg, c.keys.Close):
return c, util.CmdHandler(dialogs.CloseDialogMsg{})
default:
diff --git a/internal/tui/components/dialogs/commands/keys.go b/internal/tui/components/dialogs/commands/keys.go
index b704f227fe6f183a430bd25d3af62f4ef50b3365..65d4af84c22c87117bf5a08427027da5ee0e244f 100644
--- a/internal/tui/components/dialogs/commands/keys.go
+++ b/internal/tui/components/dialogs/commands/keys.go
@@ -76,7 +76,6 @@ type ArgumentsDialogKeyMap struct {
Confirm key.Binding
Next key.Binding
Previous key.Binding
- Paste key.Binding
Close key.Binding
}
@@ -95,10 +94,6 @@ func DefaultArgumentsDialogKeyMap() ArgumentsDialogKeyMap {
key.WithKeys("shift+tab", "up"),
key.WithHelp("shift+tab/↑", "previous"),
),
- Paste: key.NewBinding(
- key.WithKeys("ctrl+v"),
- key.WithHelp("ctrl+v", "paste"),
- ),
Close: key.NewBinding(
key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
@@ -112,7 +107,6 @@ func (k ArgumentsDialogKeyMap) KeyBindings() []key.Binding {
k.Confirm,
k.Next,
k.Previous,
- k.Paste,
k.Close,
}
}
@@ -134,7 +128,6 @@ func (k ArgumentsDialogKeyMap) ShortHelp() []key.Binding {
k.Confirm,
k.Next,
k.Previous,
- k.Paste,
k.Close,
}
}
From ce72a48378780d5fc76da7333353348aa05502fe Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 16 Oct 2025 08:58:56 -0300
Subject: [PATCH 229/236] fix(mcp): append to os.Environ() (#1242)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/mcp-tools.go | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 038cd43f4469953779799b70850355ef5dcda45f..6209efa29fb7d9a3a488ca730f6f8175e3b08a60 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -10,6 +10,7 @@ import (
"log/slog"
"maps"
"net/http"
+ "os"
"os/exec"
"strings"
"sync"
@@ -415,7 +416,7 @@ func createMCPTransport(ctx context.Context, m config.MCPConfig, resolver config
return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
}
cmd := exec.CommandContext(ctx, home.Long(command), m.Args...)
- cmd.Env = m.ResolvedEnv()
+ cmd.Env = append(os.Environ(), m.ResolvedEnv()...)
return &mcp.CommandTransport{
Command: cmd,
}, nil
From 2aa53614d2970fba5674d439a0fec0e531fd6d93 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 16 Oct 2025 10:11:52 -0300
Subject: [PATCH 230/236] test: add tests for the dirs cmd (#1243)
Signed-off-by: Carlos Alexandro Becker
---
internal/cmd/dirs_test.go | 46 +++++++++++++++++++++++++++++++++++++++
1 file changed, 46 insertions(+)
create mode 100644 internal/cmd/dirs_test.go
diff --git a/internal/cmd/dirs_test.go b/internal/cmd/dirs_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..2d68f45481a61b4ee9cf9ddc31b8d86d8a69a51f
--- /dev/null
+++ b/internal/cmd/dirs_test.go
@@ -0,0 +1,46 @@
+package cmd
+
+import (
+ "bytes"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func init() {
+ os.Setenv("XDG_CONFIG_HOME", "/tmp/fakeconfig")
+ os.Setenv("XDG_DATA_HOME", "/tmp/fakedata")
+}
+
+func TestDirs(t *testing.T) {
+ var b bytes.Buffer
+ dirsCmd.SetOut(&b)
+ dirsCmd.SetErr(&b)
+ dirsCmd.SetIn(bytes.NewReader(nil))
+ dirsCmd.Run(dirsCmd, nil)
+ expected := filepath.FromSlash("/tmp/fakeconfig/crush") + "\n" +
+ filepath.FromSlash("/tmp/fakedata/crush") + "\n"
+ require.Equal(t, expected, b.String())
+}
+
+func TestConfigDir(t *testing.T) {
+ var b bytes.Buffer
+ configDirCmd.SetOut(&b)
+ configDirCmd.SetErr(&b)
+ configDirCmd.SetIn(bytes.NewReader(nil))
+ configDirCmd.Run(configDirCmd, nil)
+ expected := filepath.FromSlash("/tmp/fakeconfig/crush") + "\n"
+ require.Equal(t, expected, b.String())
+}
+
+func TestDataDir(t *testing.T) {
+ var b bytes.Buffer
+ dataDirCmd.SetOut(&b)
+ dataDirCmd.SetErr(&b)
+ dataDirCmd.SetIn(bytes.NewReader(nil))
+ dataDirCmd.Run(dataDirCmd, nil)
+ expected := filepath.FromSlash("/tmp/fakedata/crush") + "\n"
+ require.Equal(t, expected, b.String())
+}
From 6166fc6fc9a57b9b69a447560563d61abe7e8d46 Mon Sep 17 00:00:00 2001
From: Charm <124303983+charmcli@users.noreply.github.com>
Date: Thu, 16 Oct 2025 14:30:16 -0300
Subject: [PATCH 231/236] chore(legal): @BrunoKrugel has signed the CLA
---
.github/cla-signatures.json | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index 61a8b03447ae07a5dc775ca59a5eef7aacfe9c2b..12b86fb4c2936e871a1a022a150385b3744b23cf 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -711,6 +711,14 @@
"created_at": "2025-10-13T05:56:20Z",
"repoId": 987670088,
"pullRequestNo": 1223
+ },
+ {
+ "name": "BrunoKrugel",
+ "id": 30608179,
+ "comment_id": 3411978929,
+ "created_at": "2025-10-16T17:30:07Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1245
}
]
}
\ No newline at end of file
From b896a2584775d3b4f4179a2774350fee4b6313cf Mon Sep 17 00:00:00 2001
From: Bruno Krugel
Date: Thu, 16 Oct 2025 14:34:50 -0300
Subject: [PATCH 232/236] fix(mcp): avoid nil errors for tool parameters
(#1245)
---
CRUSH.md | 2 +-
internal/llm/agent/mcp-tools.go | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/CRUSH.md b/CRUSH.md
index 102ad43ca5758beee6515ab9da4054ddc92b9a9f..dee2e7ba62baeb2af691828ed67dddf3446d4525 100644
--- a/CRUSH.md
+++ b/CRUSH.md
@@ -54,7 +54,7 @@ func TestYourFunction(t *testing.T) {
## Formatting
- ALWAYS format any Go code you write.
- - First, try `goftumpt -w .`.
+ - First, try `gofumpt -w .`.
- If `gofumpt` is not available, use `goimports`.
- If `goimports` is not available, use `gofmt`.
- You can also use `task fmt` to run `gofumpt -w .` on the entire project,
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 6209efa29fb7d9a3a488ca730f6f8175e3b08a60..041cff490a59f1de51505e833cc7ee7866aa7644 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -99,8 +99,8 @@ func (b *McpTool) Name() string {
}
func (b *McpTool) Info() tools.ToolInfo {
- var parameters map[string]any
- var required []string
+ parameters := make(map[string]any)
+ required := make([]string, 0)
if input, ok := b.tool.InputSchema.(map[string]any); ok {
if props, ok := input["properties"].(map[string]any); ok {
From 3a9954297f6d5c20e53d4a64335d5bccb9323792 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 16 Oct 2025 14:36:58 -0300
Subject: [PATCH 233/236] fix(mcp): improve STDIO error handling (#1244)
Signed-off-by: Carlos Alexandro Becker
---
internal/llm/agent/mcp-tools.go | 34 +++++++++++++++++++++++++++++++++
1 file changed, 34 insertions(+)
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index 041cff490a59f1de51505e833cc7ee7866aa7644..d2ff6454e9a7135ee9404ef665495772b90ba86c 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -387,6 +387,7 @@ func createMCPSession(ctx context.Context, name string, m config.MCPConfig, reso
session, err := client.Connect(mcpCtx, transport, nil)
if err != nil {
+ err = maybeStdioErr(err, transport)
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error starting mcp client", "error", err, "name", name)
cancel()
@@ -398,6 +399,27 @@ func createMCPSession(ctx context.Context, name string, m config.MCPConfig, reso
return session, nil
}
+// maybeStdioErr if a stdio mcp prints an error in non-json format, it'll fail
+// to parse, and the cli will then close it, causing the EOF error.
+// so, if we got an EOF err, and the transport is STDIO, we try to exec it
+// again with a timeout and collect the output so we can add details to the
+// error.
+// this happens particularly when starting things with npx, e.g. if node can't
+// be found or some other error like that.
+func maybeStdioErr(err error, transport mcp.Transport) error {
+ if !errors.Is(err, io.EOF) {
+ return err
+ }
+ ct, ok := transport.(*mcp.CommandTransport)
+ if !ok {
+ return err
+ }
+ if err2 := stdioMCPCheck(ct.Command); err2 != nil {
+ err = errors.Join(err, err2)
+ }
+ return err
+}
+
func maybeTimeoutErr(err error, timeout time.Duration) error {
if errors.Is(err, context.Canceled) {
return fmt.Errorf("timed out after %s", timeout)
@@ -465,3 +487,15 @@ func (rt headerRoundTripper) RoundTrip(req *http.Request) (*http.Response, error
func mcpTimeout(m config.MCPConfig) time.Duration {
return time.Duration(cmp.Or(m.Timeout, 15)) * time.Second
}
+
+func stdioMCPCheck(old *exec.Cmd) error {
+ ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
+ defer cancel()
+ cmd := exec.CommandContext(ctx, old.Path, old.Args...)
+ cmd.Env = old.Env
+ out, err := cmd.CombinedOutput()
+ if err == nil || errors.Is(ctx.Err(), context.DeadlineExceeded) {
+ return nil
+ }
+ return fmt.Errorf("%w: %s", err, string(out))
+}
From 015632a146db8ce0d04c989b4394f67490ad0c21 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Thu, 16 Oct 2025 19:36:29 -0300
Subject: [PATCH 235/236] fix(mcp): make sure to cancel context on error
(#1246)
Signed-off-by: Carlos Alexandro Becker
---
Taskfile.yaml | 2 +-
internal/llm/agent/mcp-tools.go | 3 +++
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 92b162dfbb847356e09eb17ea5996e6093a305b2..65a7e2d42fe8dcb307bced7c1fa9a0326b35ccc1 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -99,7 +99,7 @@ tasks:
cmds:
- task: fetch-tags
- git commit --allow-empty -m "{{.NEXT}}"
- - git tag --annotate -m "{{.NEXT}}" {{.NEXT}} {{.CLI_ARGS}}
+ - git tag --annotate --sign -m "{{.NEXT}}" {{.NEXT}} {{.CLI_ARGS}}
- echo "Pushing {{.NEXT}}..."
- git push origin main --follow-tags
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index d2ff6454e9a7135ee9404ef665495772b90ba86c..6838c54ab4dc8cface0eb311e0fb933a5c18aae6 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -365,6 +365,8 @@ func createMCPSession(ctx context.Context, name string, m config.MCPConfig, reso
if err != nil {
updateMCPState(name, MCPStateError, err, nil, 0)
slog.Error("error creating mcp client", "error", err, "name", name)
+ cancel()
+ cancelTimer.Stop()
return nil, err
}
@@ -391,6 +393,7 @@ func createMCPSession(ctx context.Context, name string, m config.MCPConfig, reso
updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error starting mcp client", "error", err, "name", name)
cancel()
+ cancelTimer.Stop()
return nil, err
}
From a64a4def3ea855ac2c84cd0c12d165fe5098b1a5 Mon Sep 17 00:00:00 2001
From: Carlos Alexandro Becker
Date: Fri, 17 Oct 2025 09:56:58 -0300
Subject: [PATCH 236/236] feat(lsp): find references tool (#1233)
Signed-off-by: Carlos Alexandro Becker
---
go.mod | 2 +-
go.sum | 4 +-
internal/llm/agent/agent.go | 2 +-
internal/llm/tools/diagnostics.go | 2 +-
internal/llm/tools/grep.go | 108 +++++++-------
internal/llm/tools/grep_test.go | 29 ++++
internal/llm/tools/references.go | 214 +++++++++++++++++++++++++++
internal/llm/tools/references.md | 36 +++++
internal/llm/tools/rg.go | 2 +-
internal/llm/tools/testdata/grep.txt | 3 +
internal/lsp/client.go | 10 ++
11 files changed, 348 insertions(+), 64 deletions(-)
create mode 100644 internal/llm/tools/references.go
create mode 100644 internal/llm/tools/references.md
create mode 100644 internal/llm/tools/testdata/grep.txt
diff --git a/go.mod b/go.mod
index e0b92a9380af54233306de80c826a5191878298a..c0bc32fe29ac100f98c589edf7697f104aa854a5 100644
--- a/go.mod
+++ b/go.mod
@@ -77,7 +77,7 @@ require (
github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
- github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4
+ github.com/charmbracelet/x/powernap v0.0.0-20251015113943-25f979b54ad4
github.com/charmbracelet/x/term v0.2.1
github.com/charmbracelet/x/termios v0.1.1 // indirect
github.com/charmbracelet/x/windows v0.2.2 // indirect
diff --git a/go.sum b/go.sum
index d1c0349a66d5e8c0e9bf6968d849cb0cbf6d26c5..0fa4e9f695cf5d60a60be753aaee9a0b2e14c192 100644
--- a/go.sum
+++ b/go.sum
@@ -106,8 +106,8 @@ github.com/charmbracelet/x/exp/ordered v0.1.0 h1:55/qLwjIh0gL0Vni+QAWk7T/qRVP6sB
github.com/charmbracelet/x/exp/ordered v0.1.0/go.mod h1:5UHwmG+is5THxMyCJHNPCn2/ecI07aKNrW+LcResjJ8=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
-github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
-github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
+github.com/charmbracelet/x/powernap v0.0.0-20251015113943-25f979b54ad4 h1:i/XilBPYK4L1Yo/mc9FPx0SyJzIsN0y4sj1MWq9Sscc=
+github.com/charmbracelet/x/powernap v0.0.0-20251015113943-25f979b54ad4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index e338eef782912bdfea48ca72ebfd33c4cd981f62..b2b222db1a481b1eb4c7e945467bd5c74506d5ab 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -525,7 +525,7 @@ func (a *agent) getAllTools() ([]tools.BaseTool, error) {
if a.agentCfg.ID == "coder" {
allTools = slices.AppendSeq(allTools, a.mcpTools.Seq())
if a.lspClients.Len() > 0 {
- allTools = append(allTools, tools.NewDiagnosticsTool(a.lspClients))
+ allTools = append(allTools, tools.NewDiagnosticsTool(a.lspClients), tools.NewReferencesTool(a.lspClients))
}
}
if a.agentToolFn != nil {
diff --git a/internal/llm/tools/diagnostics.go b/internal/llm/tools/diagnostics.go
index 8e0c332cef76e40d5e24e74ed3260b95aab8b04b..c2625e9495963f1de467656b2d74e71e0b3c78fa 100644
--- a/internal/llm/tools/diagnostics.go
+++ b/internal/llm/tools/diagnostics.go
@@ -23,7 +23,7 @@ type diagnosticsTool struct {
lspClients *csync.Map[string, *lsp.Client]
}
-const DiagnosticsToolName = "diagnostics"
+const DiagnosticsToolName = "lsp_diagnostics"
//go:embed diagnostics.md
var diagnosticsDescription []byte
diff --git a/internal/llm/tools/grep.go b/internal/llm/tools/grep.go
index 237d4e18dab0bc518b9d4b6e2c73ef5035d2b348..ed844b6c10081deab6a314f380da72e0893102ca 100644
--- a/internal/llm/tools/grep.go
+++ b/internal/llm/tools/grep.go
@@ -2,6 +2,7 @@ package tools
import (
"bufio"
+ "bytes"
"context"
_ "embed"
"encoding/json"
@@ -13,7 +14,6 @@ import (
"path/filepath"
"regexp"
"sort"
- "strconv"
"strings"
"sync"
"time"
@@ -82,6 +82,7 @@ type grepMatch struct {
path string
modTime time.Time
lineNum int
+ charNum int
lineText string
}
@@ -189,7 +190,11 @@ func (g *grepTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
fmt.Fprintf(&output, "%s:\n", match.path)
}
if match.lineNum > 0 {
- fmt.Fprintf(&output, " Line %d: %s\n", match.lineNum, match.lineText)
+ if match.charNum > 0 {
+ fmt.Fprintf(&output, " Line %d, Char %d: %s\n", match.lineNum, match.charNum, match.lineText)
+ } else {
+ fmt.Fprintf(&output, " Line %d: %s\n", match.lineNum, match.lineText)
+ }
} else {
fmt.Fprintf(&output, " %s\n", match.path)
}
@@ -252,66 +257,51 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
return nil, err
}
- lines := strings.Split(strings.TrimSpace(string(output)), "\n")
- matches := make([]grepMatch, 0, len(lines))
-
- for _, line := range lines {
- if line == "" {
+ var matches []grepMatch
+ for line := range bytes.SplitSeq(bytes.TrimSpace(output), []byte{'\n'}) {
+ if len(line) == 0 {
continue
}
-
- // Parse ripgrep output using null separation
- filePath, lineNumStr, lineText, ok := parseRipgrepLine(line)
- if !ok {
+ var match ripgrepMatch
+ if err := json.Unmarshal(line, &match); err != nil {
continue
}
-
- lineNum, err := strconv.Atoi(lineNumStr)
- if err != nil {
+ if match.Type != "match" {
continue
}
-
- fileInfo, err := os.Stat(filePath)
- if err != nil {
- continue // Skip files we can't access
+ for _, m := range match.Data.Submatches {
+ fi, err := os.Stat(match.Data.Path.Text)
+ if err != nil {
+ continue // Skip files we can't access
+ }
+ matches = append(matches, grepMatch{
+ path: match.Data.Path.Text,
+ modTime: fi.ModTime(),
+ lineNum: match.Data.LineNumber,
+ charNum: m.Start + 1, // ensure 1-based
+ lineText: strings.TrimSpace(match.Data.Lines.Text),
+ })
+ // only get the first match of each line
+ break
}
-
- matches = append(matches, grepMatch{
- path: filePath,
- modTime: fileInfo.ModTime(),
- lineNum: lineNum,
- lineText: lineText,
- })
}
-
return matches, nil
}
-// parseRipgrepLine parses ripgrep output with null separation to handle Windows paths
-func parseRipgrepLine(line string) (filePath, lineNum, lineText string, ok bool) {
- // Split on null byte first to separate filename from rest
- parts := strings.SplitN(line, "\x00", 2)
- if len(parts) != 2 {
- return "", "", "", false
- }
-
- filePath = parts[0]
- remainder := parts[1]
-
- // Now split the remainder on first colon: "linenum:content"
- colonIndex := strings.Index(remainder, ":")
- if colonIndex == -1 {
- return "", "", "", false
- }
-
- lineNumStr := remainder[:colonIndex]
- lineText = remainder[colonIndex+1:]
-
- if _, err := strconv.Atoi(lineNumStr); err != nil {
- return "", "", "", false
- }
-
- return filePath, lineNumStr, lineText, true
+type ripgrepMatch struct {
+ Type string `json:"type"`
+ Data struct {
+ Path struct {
+ Text string `json:"text"`
+ } `json:"path"`
+ Lines struct {
+ Text string `json:"text"`
+ } `json:"lines"`
+ LineNumber int `json:"line_number"`
+ Submatches []struct {
+ Start int `json:"start"`
+ } `json:"submatches"`
+ } `json:"data"`
}
func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
@@ -363,7 +353,7 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
return nil
}
- match, lineNum, lineText, err := fileContainsPattern(path, regex)
+ match, lineNum, charNum, lineText, err := fileContainsPattern(path, regex)
if err != nil {
return nil // Skip files we can't read
}
@@ -373,6 +363,7 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
path: path,
modTime: info.ModTime(),
lineNum: lineNum,
+ charNum: charNum,
lineText: lineText,
})
@@ -390,15 +381,15 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
return matches, nil
}
-func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, string, error) {
+func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, int, string, error) {
// Only search text files.
if !isTextFile(filePath) {
- return false, 0, "", nil
+ return false, 0, 0, "", nil
}
file, err := os.Open(filePath)
if err != nil {
- return false, 0, "", err
+ return false, 0, 0, "", err
}
defer file.Close()
@@ -407,12 +398,13 @@ func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, st
for scanner.Scan() {
lineNum++
line := scanner.Text()
- if pattern.MatchString(line) {
- return true, lineNum, line, nil
+ if loc := pattern.FindStringIndex(line); loc != nil {
+ charNum := loc[0] + 1
+ return true, lineNum, charNum, line, nil
}
}
- return false, 0, "", scanner.Err()
+ return false, 0, 0, "", scanner.Err()
}
// isTextFile checks if a file is a text file by examining its MIME type.
diff --git a/internal/llm/tools/grep_test.go b/internal/llm/tools/grep_test.go
index 435b3045b93a8e1297ff2aaeff9ee8977b974b56..753ee05942b78578fd2e9170384cac3fd5d9496e 100644
--- a/internal/llm/tools/grep_test.go
+++ b/internal/llm/tools/grep_test.go
@@ -390,3 +390,32 @@ func TestIsTextFile(t *testing.T) {
})
}
}
+
+func TestColumnMatch(t *testing.T) {
+ t.Parallel()
+
+ // Test both implementations
+ for name, fn := range map[string]func(pattern, path, include string) ([]grepMatch, error){
+ "regex": searchFilesWithRegex,
+ "rg": func(pattern, path, include string) ([]grepMatch, error) {
+ return searchWithRipgrep(t.Context(), pattern, path, include)
+ },
+ } {
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ if name == "rg" && getRg() == "" {
+ t.Skip("rg is not in $PATH")
+ }
+
+ matches, err := fn("THIS", "./testdata/", "")
+ require.NoError(t, err)
+ require.Len(t, matches, 1)
+ match := matches[0]
+ require.Equal(t, 2, match.lineNum)
+ require.Equal(t, 14, match.charNum)
+ require.Equal(t, "I wanna grep THIS particular word", match.lineText)
+ require.Equal(t, "testdata/grep.txt", filepath.ToSlash(filepath.Clean(match.path)))
+ })
+ }
+}
diff --git a/internal/llm/tools/references.go b/internal/llm/tools/references.go
new file mode 100644
index 0000000000000000000000000000000000000000..a1bc393cd5d28755f5f0b694c1b2df40bee1a39e
--- /dev/null
+++ b/internal/llm/tools/references.go
@@ -0,0 +1,214 @@
+package tools
+
+import (
+ "cmp"
+ "context"
+ _ "embed"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "maps"
+ "path/filepath"
+ "regexp"
+ "slices"
+ "sort"
+ "strings"
+
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/lsp"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
+)
+
+type ReferencesParams struct {
+ Symbol string `json:"symbol"`
+ Path string `json:"path"`
+}
+
+type referencesTool struct {
+ lspClients *csync.Map[string, *lsp.Client]
+}
+
+const ReferencesToolName = "lsp_references"
+
+//go:embed references.md
+var referencesDescription []byte
+
+func NewReferencesTool(lspClients *csync.Map[string, *lsp.Client]) BaseTool {
+ return &referencesTool{
+ lspClients,
+ }
+}
+
+func (r *referencesTool) Name() string {
+ return ReferencesToolName
+}
+
+func (r *referencesTool) Info() ToolInfo {
+ return ToolInfo{
+ Name: ReferencesToolName,
+ Description: string(referencesDescription),
+ Parameters: map[string]any{
+ "symbol": map[string]any{
+ "type": "string",
+ "description": "The symbol name to search for (e.g., function name, variable name, type name).",
+ },
+ "path": map[string]any{
+ "type": "string",
+ "description": "The directory to search in. Should be the entire project most of the time. Defaults to the current working directory.",
+ },
+ },
+ Required: []string{"symbol"},
+ }
+}
+
+func (r *referencesTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
+ var params ReferencesParams
+ if err := json.Unmarshal([]byte(call.Input), ¶ms); err != nil {
+ return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
+ }
+
+ if params.Symbol == "" {
+ return NewTextErrorResponse("symbol is required"), nil
+ }
+
+ if r.lspClients.Len() == 0 {
+ return NewTextErrorResponse("no LSP clients available"), nil
+ }
+
+ workingDir := cmp.Or(params.Path, ".")
+
+ matches, _, err := searchFiles(ctx, regexp.QuoteMeta(params.Symbol), workingDir, "", 100)
+ if err != nil {
+ return NewTextErrorResponse(fmt.Sprintf("failed to search for symbol: %s", err)), nil
+ }
+
+ if len(matches) == 0 {
+ return NewTextResponse(fmt.Sprintf("Symbol '%s' not found", params.Symbol)), nil
+ }
+
+ var allLocations []protocol.Location
+ var allErrs error
+ for _, match := range matches {
+ locations, err := r.find(ctx, params.Symbol, match)
+ if err != nil {
+ if strings.Contains(err.Error(), "no identifier found") {
+ // grep probably matched a comment, string value, or something else that's irrelevant
+ continue
+ }
+ slog.Error("Failed to find references", "error", err, "symbol", params.Symbol, "path", match.path, "line", match.lineNum, "char", match.charNum)
+ allErrs = errors.Join(allErrs, err)
+ continue
+ }
+ allLocations = append(allLocations, locations...)
+ // XXX: should we break here or look for all results?
+ }
+
+ if len(allLocations) > 0 {
+ output := formatReferences(cleanupLocations(allLocations))
+ return NewTextResponse(output), nil
+ }
+
+ if allErrs != nil {
+ return NewTextErrorResponse(allErrs.Error()), nil
+ }
+ return NewTextResponse(fmt.Sprintf("No references found for symbol '%s'", params.Symbol)), nil
+}
+
+func (r *referencesTool) find(ctx context.Context, symbol string, match grepMatch) ([]protocol.Location, error) {
+ absPath, err := filepath.Abs(match.path)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get absolute path: %s", err)
+ }
+
+ var client *lsp.Client
+ for c := range r.lspClients.Seq() {
+ if c.HandlesFile(absPath) {
+ client = c
+ break
+ }
+ }
+
+ if client == nil {
+ slog.Warn("No LSP clients to handle", "path", match.path)
+ return nil, nil
+ }
+
+ return client.FindReferences(
+ ctx,
+ absPath,
+ match.lineNum,
+ match.charNum+getSymbolOffset(symbol),
+ true,
+ )
+}
+
+// getSymbolOffset returns the character offset to the actual symbol name
+// in a qualified symbol (e.g., "Bar" in "foo.Bar" or "method" in "Class::method").
+func getSymbolOffset(symbol string) int {
+ // Check for :: separator (Rust, C++, Ruby modules/classes, PHP static).
+ if idx := strings.LastIndex(symbol, "::"); idx != -1 {
+ return idx + 2
+ }
+ // Check for . separator (Go, Python, JavaScript, Java, C#, Ruby methods).
+ if idx := strings.LastIndex(symbol, "."); idx != -1 {
+ return idx + 1
+ }
+ // Check for \ separator (PHP namespaces).
+ if idx := strings.LastIndex(symbol, "\\"); idx != -1 {
+ return idx + 1
+ }
+ return 0
+}
+
+func cleanupLocations(locations []protocol.Location) []protocol.Location {
+ slices.SortFunc(locations, func(a, b protocol.Location) int {
+ if a.URI != b.URI {
+ return strings.Compare(string(a.URI), string(b.URI))
+ }
+ if a.Range.Start.Line != b.Range.Start.Line {
+ return cmp.Compare(a.Range.Start.Line, b.Range.Start.Line)
+ }
+ return cmp.Compare(a.Range.Start.Character, b.Range.Start.Character)
+ })
+ return slices.CompactFunc(locations, func(a, b protocol.Location) bool {
+ return a.URI == b.URI &&
+ a.Range.Start.Line == b.Range.Start.Line &&
+ a.Range.Start.Character == b.Range.Start.Character
+ })
+}
+
+func groupByFilename(locations []protocol.Location) map[string][]protocol.Location {
+ files := make(map[string][]protocol.Location)
+ for _, loc := range locations {
+ path, err := loc.URI.Path()
+ if err != nil {
+ slog.Error("Failed to convert location URI to path", "uri", loc.URI, "error", err)
+ continue
+ }
+ files[path] = append(files[path], loc)
+ }
+ return files
+}
+
+func formatReferences(locations []protocol.Location) string {
+ fileRefs := groupByFilename(locations)
+ files := slices.Collect(maps.Keys(fileRefs))
+ sort.Strings(files)
+
+ var output strings.Builder
+ output.WriteString(fmt.Sprintf("Found %d reference(s) in %d file(s):\n\n", len(locations), len(files)))
+
+ for _, file := range files {
+ refs := fileRefs[file]
+ output.WriteString(fmt.Sprintf("%s (%d reference(s)):\n", file, len(refs)))
+ for _, ref := range refs {
+ line := ref.Range.Start.Line + 1
+ char := ref.Range.Start.Character + 1
+ output.WriteString(fmt.Sprintf(" Line %d, Column %d\n", line, char))
+ }
+ output.WriteString("\n")
+ }
+
+ return output.String()
+}
diff --git a/internal/llm/tools/references.md b/internal/llm/tools/references.md
new file mode 100644
index 0000000000000000000000000000000000000000..951ce71a68b9d62060649cda999107ab9243f42a
--- /dev/null
+++ b/internal/llm/tools/references.md
@@ -0,0 +1,36 @@
+Find all references to/usage of a symbol by name using the Language Server Protocol (LSP).
+
+WHEN TO USE THIS TOOL:
+
+- **ALWAYS USE THIS FIRST** when searching for where a function, method, variable, type, or constant is used
+- **DO NOT use grep/glob for symbol searches** - this tool is semantic-aware and much more accurate
+- Use when you need to find all usages of a specific symbol (function, variable, type, class, method, etc.)
+- More accurate than grep because it understands code semantics and scope
+- Finds only actual references, not string matches in comments or unrelated code
+- Helpful for understanding where a symbol is used throughout the codebase
+- Useful for refactoring or analyzing code dependencies
+- Good for finding all call sites of a function, method, type, package, constant, variable, etc.
+
+HOW TO USE:
+
+- Provide the symbol name (e.g., "MyFunction", "myVariable", "MyType")
+- Optionally specify a path to narrow the search to a specific directory
+- The tool will automatically find the symbol and locate all references
+
+FEATURES:
+
+- Returns all references grouped by file
+- Shows line and column numbers for each reference
+- Supports multiple programming languages through LSP
+- Automatically finds the symbol without needing exact position
+
+LIMITATIONS:
+
+- May not find references in files that haven't been opened or indexed
+- Results depend on the LSP server's capabilities
+
+TIPS:
+
+- **Use this tool instead of grep when looking for symbol references** - it's more accurate and semantic-aware
+- Simply provide the symbol name and let the tool find it for you
+- This tool understands code structure, so it won't match unrelated strings or comments
diff --git a/internal/llm/tools/rg.go b/internal/llm/tools/rg.go
index 8809b57c8db30b4ac1ed6c070df5a7218c59e233..76dbb5daf2234669ac3d90552cbbc5af5cc003d0 100644
--- a/internal/llm/tools/rg.go
+++ b/internal/llm/tools/rg.go
@@ -43,7 +43,7 @@ func getRgSearchCmd(ctx context.Context, pattern, path, include string) *exec.Cm
return nil
}
// Use -n to show line numbers, -0 for null separation to handle Windows paths
- args := []string{"-H", "-n", "-0", pattern}
+ args := []string{"--json", "-H", "-n", "-0", pattern}
if include != "" {
args = append(args, "--glob", include)
}
diff --git a/internal/llm/tools/testdata/grep.txt b/internal/llm/tools/testdata/grep.txt
new file mode 100644
index 0000000000000000000000000000000000000000..edac9ec894634e3b924fb9a0928a272ac4f29e7e
--- /dev/null
+++ b/internal/llm/tools/testdata/grep.txt
@@ -0,0 +1,3 @@
+test file for grep
+I wanna grep THIS particular word
+and nothing else
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index ff9a3ac9b5249663c151fb2df04a4acb168e4de4..afbe95cc2deb1c37b64c9e9b68fb705a4a0a59f9 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -445,6 +445,16 @@ func (c *Client) WaitForDiagnostics(ctx context.Context, d time.Duration) {
}
}
+// FindReferences finds all references to the symbol at the given position.
+func (c *Client) FindReferences(ctx context.Context, filepath string, line, character int, includeDeclaration bool) ([]protocol.Location, error) {
+ if err := c.OpenFileOnDemand(ctx, filepath); err != nil {
+ return nil, err
+ }
+ // NOTE: line and character should be 0-based.
+ // See: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position
+ return c.client.FindReferences(ctx, filepath, line-1, character-1, includeDeclaration)
+}
+
// HasRootMarkers checks if any of the specified root marker patterns exist in the given directory.
// Uses glob patterns to match files, allowing for more flexible matching.
func HasRootMarkers(dir string, rootMarkers []string) bool {