wip: initial setup

kujtimiihoxha created

Change summary

Taskfile.yaml                          |   2 
go.mod                                 |  65 +-
go.sum                                 | 123 ++++
internal/agent/agent.go                | 620 ++++++++++++++++++++++++++++
internal/agent/coordinator.go          |  20 
internal/agent/errors.go               |  17 
internal/agent/prompt/prompt.go        | 140 ++++++
internal/agent/templates/coder.gotmpl  | 133 ++++++
internal/agent/templates/initialize.md |   9 
internal/agent/templates/summary.md    |  11 
internal/agent/templates/task.gotmpl   |  15 
internal/agent/templates/title.md      |   8 
internal/agent/tools/bash.go           | 315 ++++++++++++++
internal/agent/tools/bash.gotmpl       | 116 +++++
internal/agent/tools/diagnostics.go    | 185 ++++++++
internal/agent/tools/diagnostics.md    |  24 +
internal/agent/tools/download.go       | 157 +++++++
internal/agent/tools/download.md       |  28 +
internal/agent/tools/edit.go           | 449 ++++++++++++++++++++
internal/agent/tools/edit.md           |  60 ++
internal/agent/tools/fetch.go          | 203 +++++++++
internal/agent/tools/fetch.md          |  28 +
internal/agent/tools/file.go           |  53 ++
internal/agent/tools/glob.go           | 118 +++++
internal/agent/tools/glob.md           |  42 +
internal/agent/tools/grep.go           | 430 +++++++++++++++++++
internal/agent/tools/grep.md           |  52 ++
internal/agent/tools/grep_test.go      | 200 +++++++++
internal/agent/tools/ls.go             | 239 ++++++++++
internal/agent/tools/ls.md             |  35 +
internal/agent/tools/mcp-tools.go      | 432 +++++++++++++++++++
internal/agent/tools/multiedit.go      | 366 ++++++++++++++++
internal/agent/tools/multiedit.md      |  50 ++
internal/agent/tools/rg.go             |  53 ++
internal/agent/tools/safe.go           |  70 +++
internal/agent/tools/sourcegraph.go    | 265 +++++++++++
internal/agent/tools/sourcegraph.md    |  57 ++
internal/agent/tools/tools.go          |  26 +
internal/agent/tools/view.go           | 308 +++++++++++++
internal/agent/tools/view.md           |  36 +
internal/agent/tools/write.go          | 177 +++++++
internal/agent/tools/write.md          |  31 +
internal/app/app.go                    |   1 
internal/config/config.go              |  19 
internal/message/content.go            |  93 +++
45 files changed, 5,822 insertions(+), 59 deletions(-)

Detailed changes

Taskfile.yaml 🔗

@@ -21,7 +21,7 @@ tasks:
     env:
       GOEXPERIMENT: null
 
-  lint-fix:
+  lint:fix:
     desc: Run base linters and fix issues
     cmds:
       - golangci-lint run --path-mode=abs --config=".golangci.yml" --timeout=5m --fix

go.mod 🔗

@@ -7,7 +7,6 @@ require (
 	github.com/MakeNowJust/heredoc v1.0.0
 	github.com/PuerkitoBio/goquery v1.10.3
 	github.com/alecthomas/chroma/v2 v2.20.0
-	github.com/anthropics/anthropic-sdk-go v1.12.0
 	github.com/atotto/clipboard v0.1.4
 	github.com/aymanbagabas/go-udiff v0.3.1
 	github.com/bmatcuk/doublestar/v4 v4.9.1
@@ -42,18 +41,18 @@ require (
 	github.com/stretchr/testify v1.11.1
 	github.com/tidwall/sjson v1.2.5
 	github.com/zeebo/xxh3 v1.0.2
+	google.golang.org/genai v1.26.0
 	gopkg.in/natefinch/lumberjack.v2 v2.2.1
 	mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5
 )
 
 require (
 	cloud.google.com/go v0.116.0 // indirect
-	cloud.google.com/go/auth v0.13.0 // indirect
-	cloud.google.com/go/auth/oauth2adapt v0.2.6 // indirect
-	cloud.google.com/go/compute/metadata v0.6.0 // indirect
+	cloud.google.com/go/auth v0.9.3 // indirect
+	cloud.google.com/go/auth/oauth2adapt v0.2.4 // indirect
+	cloud.google.com/go/compute/metadata v0.5.0 // indirect
 	github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 // indirect
 	github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
-	github.com/andybalholm/cascadia v1.3.3 // indirect
 	github.com/aws/aws-sdk-go-v2 v1.30.3 // indirect
 	github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 // indirect
 	github.com/aws/aws-sdk-go-v2/config v1.27.27 // indirect
@@ -68,6 +67,38 @@ require (
 	github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 // indirect
 	github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 // indirect
 	github.com/aws/smithy-go v1.20.3 // indirect
+	github.com/felixge/httpsnoop v1.0.4 // indirect
+	github.com/go-logr/logr v1.4.3 // indirect
+	github.com/go-logr/stdr v1.2.2 // indirect
+	github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
+	github.com/google/go-cmp v0.7.0 // indirect
+	github.com/google/s2a-go v0.1.8 // indirect
+	github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
+	github.com/gorilla/websocket v1.5.3 // indirect
+	go.opencensus.io v0.24.0 // indirect
+	go.opentelemetry.io/auto/sdk v1.1.0 // indirect
+	go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect
+	go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect
+	go.opentelemetry.io/otel v1.37.0 // indirect
+	go.opentelemetry.io/otel/metric v1.37.0 // indirect
+	go.opentelemetry.io/otel/trace v1.37.0 // indirect
+	golang.org/x/crypto v0.41.0 // indirect
+	golang.org/x/oauth2 v0.30.0 // indirect
+	golang.org/x/time v0.6.0 // indirect
+	google.golang.org/api v0.197.0 // indirect
+	google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect
+	google.golang.org/grpc v1.66.2 // indirect
+	google.golang.org/protobuf v1.36.8 // indirect
+)
+
+require (
+	github.com/anthropics/anthropic-sdk-go v1.12.0
+	github.com/charmbracelet/fantasy v0.0.0-20250930080945-d8adb55feb9a
+	github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
+)
+
+require (
+	github.com/andybalholm/cascadia v1.3.3 // indirect
 	github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
 	github.com/aymerick/douceur v0.2.0 // indirect
 	github.com/bahlo/generic-list-go v0.2.0 // indirect
@@ -75,7 +106,7 @@ require (
 	github.com/charmbracelet/colorprofile v0.3.2 // indirect
 	github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef
 	github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
-	github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
+	github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5
 	github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4
 	github.com/charmbracelet/x/term v0.2.1
 	github.com/charmbracelet/x/termios v0.1.1 // indirect
@@ -85,17 +116,9 @@ require (
 	github.com/disintegration/gift v1.1.2 // indirect
 	github.com/dlclark/regexp2 v1.11.5 // indirect
 	github.com/dustin/go-humanize v1.0.1 // indirect
-	github.com/felixge/httpsnoop v1.0.4 // indirect
 	github.com/fsnotify/fsnotify v1.9.0 // indirect
 	github.com/go-logfmt/logfmt v0.6.0 // indirect
-	github.com/go-logr/logr v1.4.3 // indirect
-	github.com/go-logr/stdr v1.2.2 // indirect
-	github.com/google/go-cmp v0.7.0 // indirect
-	github.com/google/s2a-go v0.1.8 // indirect
-	github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
-	github.com/googleapis/gax-go/v2 v2.14.1 // indirect
 	github.com/gorilla/css v1.0.1 // indirect
-	github.com/gorilla/websocket v1.5.3 // indirect
 	github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
 	github.com/inconshreveable/mousetrap v1.1.0 // indirect
 	github.com/klauspost/compress v1.18.0 // indirect
@@ -134,28 +157,14 @@ require (
 	github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
 	github.com/yuin/goldmark v1.7.8 // indirect
 	github.com/yuin/goldmark-emoji v1.0.5 // indirect
-	go.opentelemetry.io/auto/sdk v1.1.0 // indirect
-	go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect
-	go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect
-	go.opentelemetry.io/otel v1.37.0 // indirect
-	go.opentelemetry.io/otel/metric v1.37.0 // indirect
-	go.opentelemetry.io/otel/trace v1.37.0 // indirect
 	go.uber.org/multierr v1.11.0 // indirect
-	golang.org/x/crypto v0.41.0 // indirect
 	golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
 	golang.org/x/image v0.26.0 // indirect
 	golang.org/x/net v0.43.0 // indirect
-	golang.org/x/oauth2 v0.30.0 // indirect
 	golang.org/x/sync v0.17.0 // indirect
 	golang.org/x/sys v0.36.0 // indirect
 	golang.org/x/term v0.34.0 // indirect
 	golang.org/x/text v0.29.0
-	golang.org/x/time v0.8.0 // indirect
-	google.golang.org/api v0.211.0 // indirect
-	google.golang.org/genai v1.26.0
-	google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
-	google.golang.org/grpc v1.71.0 // indirect
-	google.golang.org/protobuf v1.36.8 // indirect
 	gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
 	gopkg.in/yaml.v3 v3.0.1 // indirect
 	mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5

go.sum 🔗

@@ -1,11 +1,12 @@
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
 cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE=
 cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U=
-cloud.google.com/go/auth v0.13.0 h1:8Fu8TZy167JkW8Tj3q7dIkr2v4cndv41ouecJx0PAHs=
-cloud.google.com/go/auth v0.13.0/go.mod h1:COOjD9gwfKNKz+IIduatIhYJQIc0mG3H102r/EMxX6Q=
-cloud.google.com/go/auth/oauth2adapt v0.2.6 h1:V6a6XDu2lTwPZWOawrAa9HUK+DB2zfJyTuciBG5hFkU=
-cloud.google.com/go/auth/oauth2adapt v0.2.6/go.mod h1:AlmsELtlEBnaNTL7jCj8VQFLy6mbZv0s4Q7NGBeQ5E8=
-cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I=
-cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg=
+cloud.google.com/go/auth v0.9.3 h1:VOEUIAADkkLtyfr3BLa3R8Ed/j6w1jTBmARx+wb5w5U=
+cloud.google.com/go/auth v0.9.3/go.mod h1:7z6VY+7h3KUdRov5F1i8NDP5ZzWKYmEPO842BgCsmTk=
+cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY=
+cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc=
+cloud.google.com/go/compute/metadata v0.5.0 h1:Zr0eK8JbFv6+Wi4ilXAR8FJ3wyNdpxHKJNPos6LTZOY=
+cloud.google.com/go/compute/metadata v0.5.0/go.mod h1:aHnloV2TPI38yx4s9+wAZhHykWvVCfu7hQbF+9CWoiY=
 github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 h1:g0EZJwz7xkXQiZAI5xi9f3WWFYBlX1CPTrR+NDToRkQ=
 github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0/go.mod h1:XCW7KnZet0Opnr7HccfUw1PLc4CjHqpcaxW8DHklNkQ=
 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc=
@@ -14,6 +15,7 @@ github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xP
 github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY=
 github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU=
 github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
 github.com/JohannesKaufmann/html-to-markdown v1.6.0 h1:04VXMiE50YYfCfLboJCLcgqF5x+rHJnb1ssNmqpLH/k=
 github.com/JohannesKaufmann/html-to-markdown v1.6.0/go.mod h1:NUI78lGg/a7vpEJTz/0uOcYMaibytE4BUOQS8k78yPQ=
 github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
@@ -74,6 +76,7 @@ github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/
 github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
 github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
 github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
 github.com/charlievieth/fastwalk v1.0.14 h1:3Eh5uaFGwHZd8EGwTjJnSpBkfwfsak9h6ICgnWlhAyg=
 github.com/charlievieth/fastwalk v1.0.14/go.mod h1:diVcUreiU1aQ4/Wu3NbxxH4/KYdKpLDojrQ1Bb2KgNY=
 github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
@@ -86,6 +89,8 @@ github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqI
 github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
 github.com/charmbracelet/fang v0.4.2 h1:nWr7Tb82/TTNNGMGG35aTZ1X68loAOQmpb0qxkKXjas=
 github.com/charmbracelet/fang v0.4.2/go.mod h1:wHJKQYO5ReYsxx+yZl+skDtrlKO/4LLEQ6EXsdHhRhg=
+github.com/charmbracelet/fantasy v0.0.0-20250930080945-d8adb55feb9a h1:pD/ATiDbIRm8i5cKf8k1NR/0HhmSdAejY2kbIpmBiis=
+github.com/charmbracelet/fantasy v0.0.0-20250930080945-d8adb55feb9a/go.mod h1:RZotHpq44tKZDe6Vf0kk1iDqnUgH7Scx+K/7uJ9Qwnw=
 github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
 github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
 github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea h1:g1HfUgSMvye8mgecMD1mPscpt+pzJoDEiSA+p2QXzdQ=
@@ -102,8 +107,8 @@ github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3 h1:1
 github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3/go.mod h1:T9jr8CzFpjhFVHjNjKwbAD7KwBNyFnj2pntAO7F2zw0=
 github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw=
 github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
-github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
-github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
+github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5 h1:DTSZxdV9qQagD4iGcAt9RgaRBZtJl01bfKgdLzUzUPI=
+github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
 github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
 github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
 github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
@@ -112,6 +117,8 @@ github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8
 github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo=
 github.com/charmbracelet/x/windows v0.2.2 h1:IofanmuvaxnKHuV04sC0eBy/smG6kIKrWG2/jYn2GuM=
 github.com/charmbracelet/x/windows v0.2.2/go.mod h1:/8XtdKZzedat74NQFn0NGlGL4soHB0YQZrETF96h75k=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
 github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
 github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
 github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
@@ -128,6 +135,10 @@ github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZ
 github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
 github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
 github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
+github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
 github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
 github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
 github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
@@ -144,21 +155,42 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
 github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
 github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI=
 github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
+github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
+github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
 github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
 github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
 github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
 github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
 github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
 github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
 github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM=
 github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA=
+github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
 github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
 github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
 github.com/googleapis/enterprise-certificate-proxy v0.3.4 h1:XYIDZApgAnrN1c855gTgghdIA6Stxb52D5RnLI1SLyw=
 github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA=
-github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q=
-github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=
 github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
 github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
 github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
@@ -241,6 +273,7 @@ github.com/posthog/posthog-go v1.6.10 h1:OA6bkiUg89rI7f5cSXbcrH5+wLinyS6hHplnD92
 github.com/posthog/posthog-go v1.6.10/go.mod h1:LcC1Nu4AgvV22EndTtrMXTy+7RGVC0MhChSw7Qk5XkY=
 github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjGng=
 github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
 github.com/qjebbs/go-jsons v1.0.0-alpha.4 h1:Qsb4ohRUHQODIUAsJKdKJ/SIDbsO7oGOzsfy+h1yQZs=
 github.com/qjebbs/go-jsons v1.0.0-alpha.4/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
 github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
@@ -276,9 +309,14 @@ github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c/go.mod h1:cNQ3dwVJtS
 github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef h1:Ch6Q+AZUxDBCVqdkI8FSpFyZDtCVBc2VmejdNrm5rRQ=
 github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef/go.mod h1:nXTWP6+gD5+LUJ8krVhhoeHjvHTutPxMYl5SvkcnJNE=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
 github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
 github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
 github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
@@ -313,6 +351,8 @@ github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
 github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
 github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
 github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
+go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
+go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
 go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
 go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 h1:r6I7RJCN86bpD/FQwedZ0vSixDpwuWREjW9oRMsmqDc=
@@ -323,15 +363,12 @@ go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
 go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
 go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
 go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
-go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A=
-go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU=
-go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk=
-go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w=
 go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
 go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
 go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
 go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
 golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
 golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
 golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
@@ -340,16 +377,26 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v
 golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
 golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
 golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
 golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
 golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
 golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
 golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
 golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
 golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
 golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
 golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
 golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
 golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
@@ -362,8 +409,11 @@ golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
 golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
 golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
 golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
 golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
 golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -373,7 +423,10 @@ golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
 golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
 golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
 golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -414,23 +467,47 @@ golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
 golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
 golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
 golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
-golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
-golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
+golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
+golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
 golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
 golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
 golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
 golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
 golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
-google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/api v0.197.0 h1:x6CwqQLsFiA5JKAiGyGBjc2bNtHtLddhJCE2IKuhhcQ=
+google.golang.org/api v0.197.0/go.mod h1:AuOuo20GoQ331nq7DquGHlU6d+2wN2fZ8O0ta60nRNw=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
 google.golang.org/genai v1.26.0 h1:r4HGL54kFv/WCRMTAbZg05Ct+vXfhAbTRlXhFyBkEQo=
 google.golang.org/genai v1.26.0/go.mod h1:OClfdf+r5aaD+sCd4aUSkPzJItmg2wD/WON9lQnRPaY=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
-google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
-google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
+google.golang.org/grpc v1.66.2 h1:3QdXkuq3Bkh7w+ywLdLvM56cmGvQHUMZpiCzt6Rqaoo=
+google.golang.org/grpc v1.66.2/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
 google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
 google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -447,6 +524,8 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
 gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
 gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
 modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ=
 modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8=
 modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=

internal/agent/agent.go 🔗

@@ -0,0 +1,620 @@
+package agent
+
+import (
+	"context"
+	_ "embed"
+	"errors"
+	"fmt"
+	"log/slog"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/catwalk/pkg/catwalk"
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/llm/tools"
+	"github.com/charmbracelet/crush/internal/message"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/crush/internal/session"
+	"github.com/charmbracelet/fantasy/ai"
+	"github.com/charmbracelet/fantasy/anthropic"
+)
+
+//go:embed templates/title.md
+var titlePrompt []byte
+
+//go:embed templates/summary.md
+var summaryPrompt []byte
+
+type SessionAgentCall struct {
+	SessionID        string
+	Prompt           string
+	ProviderOptions  ai.ProviderOptions
+	Attachments      []message.Attachment
+	MaxOutputTokens  int64
+	Temperature      *float64
+	TopP             *float64
+	TopK             *int64
+	FrequencyPenalty *float64
+	PresencePenalty  *float64
+}
+
+type SessionAgent interface {
+	Run(context.Context, SessionAgentCall) (*ai.AgentResult, error)
+	SetModels(large Model, small Model)
+	SetTools(tools []ai.AgentTool)
+	Cancel(sessionID string)
+	CancelAll()
+	IsSessionBusy(sessionID string) bool
+	IsBusy() bool
+	QueuedPrompts(sessionID string) int
+	ClearQueue(sessionID string)
+	Summarize(context.Context, string) error
+}
+
+type Model struct {
+	model  ai.LanguageModel
+	config catwalk.Model
+}
+
+type sessionAgent struct {
+	largeModel      Model
+	smallModel      Model
+	systemPrompt    string
+	tools           []ai.AgentTool
+	maxOutputTokens int64
+	sessions        session.Service
+	messages        message.Service
+
+	messageQueue   *csync.Map[string, []SessionAgentCall]
+	activeRequests *csync.Map[string, context.CancelFunc]
+}
+
+type SessionAgentOption func(*sessionAgent)
+
+func NewSessionAgent() SessionAgent {
+	return &sessionAgent{}
+}
+
+func (a *sessionAgent) Run(ctx context.Context, call SessionAgentCall) (*ai.AgentResult, error) {
+	if call.Prompt == "" {
+		return nil, ErrEmptyPrompt
+	}
+	if call.SessionID == "" {
+		return nil, ErrSessionMissing
+	}
+
+	// Queue the message if busy
+	if a.IsSessionBusy(call.SessionID) {
+		existing, ok := a.messageQueue.Get(call.SessionID)
+		if !ok {
+			existing = []SessionAgentCall{}
+		}
+		existing = append(existing, call)
+		a.messageQueue.Set(call.SessionID, existing)
+		return nil, nil
+	}
+
+	if len(a.tools) > 0 {
+		// add anthropic caching to the last tool
+		a.tools[len(a.tools)-1].SetProviderOptions(a.getCacheControlOptions())
+	}
+
+	agent := ai.NewAgent(
+		a.largeModel.model,
+		ai.WithSystemPrompt(a.systemPrompt),
+		ai.WithTools(a.tools...),
+		ai.WithMaxOutputTokens(a.maxOutputTokens),
+	)
+
+	currentSession, err := a.sessions.Get(ctx, call.SessionID)
+	if err != nil {
+		return nil, fmt.Errorf("failed to get session: %w", err)
+	}
+
+	msgs, err := a.getSessionMessages(ctx, currentSession)
+	if err != nil {
+		return nil, fmt.Errorf("failed to get session messages: %w", err)
+	}
+
+	// Generate title if first message
+	if len(msgs) == 0 {
+		go a.generateTitle(ctx, currentSession, call.Prompt)
+	}
+
+	// Add the user message to the session
+	_, err = a.createUserMessage(ctx, call)
+	if err != nil {
+		return nil, err
+	}
+
+	// add the session to the context
+	ctx = context.WithValue(ctx, tools.SessionIDContextKey, call.SessionID)
+
+	genCtx, cancel := context.WithCancel(ctx)
+	a.activeRequests.Set(call.SessionID, cancel)
+
+	defer cancel()
+	defer a.activeRequests.Del(call.SessionID)
+
+	history, files := a.preparePrompt(msgs, call.Attachments...)
+
+	var currentAssistant *message.Message
+	result, err := agent.Stream(genCtx, ai.AgentStreamCall{
+		Prompt:           call.Prompt,
+		Files:            files,
+		Messages:         history,
+		ProviderOptions:  call.ProviderOptions,
+		MaxOutputTokens:  &call.MaxOutputTokens,
+		TopP:             call.TopP,
+		Temperature:      call.Temperature,
+		PresencePenalty:  call.PresencePenalty,
+		TopK:             call.TopK,
+		FrequencyPenalty: call.FrequencyPenalty,
+		// Before each step create the new assistant message
+		PrepareStep: func(options ai.PrepareStepFunctionOptions) (prepared ai.PrepareStepResult, err error) {
+			var assistantMsg message.Message
+			assistantMsg, err = a.messages.Create(genCtx, call.SessionID, message.CreateMessageParams{
+				Role:     message.Assistant,
+				Parts:    []message.ContentPart{},
+				Model:    a.largeModel.model.Model(),
+				Provider: a.largeModel.model.Provider(),
+			})
+			if err != nil {
+				return prepared, err
+			}
+
+			currentAssistant = &assistantMsg
+
+			prepared.Messages = options.Messages
+
+			queuedCalls, _ := a.messageQueue.Get(call.SessionID)
+			a.messageQueue.Del(call.SessionID)
+			for _, queued := range queuedCalls {
+				userMessage, createErr := a.createUserMessage(genCtx, queued)
+				if createErr != nil {
+					return prepared, createErr
+				}
+				prepared.Messages = append(prepared.Messages, userMessage.ToAIMessage()...)
+			}
+
+			lastSystemRoleInx := 0
+			systemMessageUpdated := false
+			for i, msg := range prepared.Messages {
+				// only add cache control to the last message
+				if msg.Role == ai.MessageRoleSystem {
+					lastSystemRoleInx = i
+				} else if !systemMessageUpdated {
+					prepared.Messages[lastSystemRoleInx].ProviderOptions = a.getCacheControlOptions()
+					systemMessageUpdated = true
+				}
+				// than add cache control to the last 2 messages
+				if i > len(msgs)-3 {
+					prepared.Messages[i].ProviderOptions = a.getCacheControlOptions()
+				}
+			}
+			return prepared, err
+		},
+		OnReasoningDelta: func(id string, text string) error {
+			currentAssistant.AppendReasoningContent(text)
+			return a.messages.Update(genCtx, *currentAssistant)
+		},
+		OnReasoningEnd: func(id string, reasoning ai.ReasoningContent) error {
+			// handle anthropic signature
+			if anthropicData, ok := reasoning.ProviderMetadata[anthropic.Name]; ok {
+				if reasoning, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok {
+					currentAssistant.AppendReasoningSignature(reasoning.Signature)
+				}
+			}
+			currentAssistant.FinishThinking()
+			return a.messages.Update(genCtx, *currentAssistant)
+		},
+		OnTextDelta: func(id string, text string) error {
+			currentAssistant.AppendContent(text)
+			return a.messages.Update(genCtx, *currentAssistant)
+		},
+		OnToolInputStart: func(id string, toolName string) error {
+			toolCall := message.ToolCall{
+				ID:               id,
+				Name:             toolName,
+				ProviderExecuted: false,
+				Finished:         false,
+			}
+			currentAssistant.AddToolCall(toolCall)
+			return a.messages.Update(genCtx, *currentAssistant)
+		},
+		OnRetry: func(err *ai.APICallError, delay time.Duration) {
+			// TODO: implement
+		},
+		OnToolResult: func(result ai.ToolResultContent) error {
+			var resultContent string
+			isError := false
+			switch result.Result.GetType() {
+			case ai.ToolResultContentTypeText:
+				r, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentText](result.Result)
+				if ok {
+					resultContent = r.Text
+				}
+			case ai.ToolResultContentTypeError:
+				r, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentError](result.Result)
+				if ok {
+					isError = true
+					resultContent = r.Error.Error()
+				}
+			case ai.ToolResultContentTypeMedia:
+				// TODO: handle this message type
+			}
+			toolResult := message.ToolResult{
+				ToolCallID: result.ToolCallID,
+				Name:       result.ToolName,
+				Content:    resultContent,
+				IsError:    isError,
+				Metadata:   result.ClientMetadata,
+			}
+			a.messages.Create(context.Background(), currentAssistant.SessionID, message.CreateMessageParams{
+				Role: message.Tool,
+				Parts: []message.ContentPart{
+					toolResult,
+				},
+			})
+			return a.messages.Update(genCtx, *currentAssistant)
+		},
+		OnStepFinish: func(stepResult ai.StepResult) error {
+			finishReason := message.FinishReasonUnknown
+			switch stepResult.FinishReason {
+			case ai.FinishReasonLength:
+				finishReason = message.FinishReasonMaxTokens
+			case ai.FinishReasonStop:
+				finishReason = message.FinishReasonEndTurn
+			case ai.FinishReasonToolCalls:
+				finishReason = message.FinishReasonToolUse
+			}
+			currentAssistant.AddFinish(finishReason, "", "")
+			a.updateSessionUsage(a.largeModel, &currentSession, stepResult.Usage)
+			return a.messages.Update(genCtx, *currentAssistant)
+		},
+	})
+	if err != nil {
+		isCancelErr := errors.Is(err, context.Canceled)
+		isPermissionErr := errors.Is(err, permission.ErrorPermissionDenied)
+		if currentAssistant == nil {
+			return result, err
+		}
+		toolCalls := currentAssistant.ToolCalls()
+		toolResults := currentAssistant.ToolResults()
+		for _, tc := range toolCalls {
+			if !tc.Finished {
+				tc.Finished = true
+				tc.Input = "{}"
+			}
+			currentAssistant.AddToolCall(tc)
+			found := false
+			for _, tr := range toolResults {
+				if tr.ToolCallID == tc.ID {
+					found = true
+					break
+				}
+			}
+			if !found {
+				content := "There was an error while executing the tool"
+				if isCancelErr {
+					content = "Tool execution canceled by user"
+				} else if isPermissionErr {
+					content = "Permission denied"
+				}
+				currentAssistant.AddToolResult(message.ToolResult{
+					ToolCallID: tc.ID,
+					Name:       tc.Name,
+					Content:    content,
+					IsError:    true,
+				})
+			}
+		}
+		if isCancelErr {
+			currentAssistant.AddFinish(message.FinishReasonCanceled, "Request cancelled", "")
+		} else if isPermissionErr {
+			currentAssistant.AddFinish(message.FinishReasonPermissionDenied, "Permission denied", "")
+		} else {
+			currentAssistant.AddFinish(message.FinishReasonError, "API Error", err.Error())
+		}
+		// INFO: we use the parent context here because the genCtx might have been cancelled
+		updateErr := a.messages.Update(ctx, *currentAssistant)
+		if updateErr != nil {
+			return nil, updateErr
+		}
+	}
+	if err != nil {
+		return nil, err
+	}
+
+	queuedMessages, ok := a.messageQueue.Get(call.SessionID)
+	if !ok || len(queuedMessages) == 0 {
+		return result, err
+	}
+	// there are queued messages restart the loop
+	firstQueuedMessage := queuedMessages[0]
+	a.messageQueue.Set(call.SessionID, queuedMessages[1:])
+	return a.Run(genCtx, firstQueuedMessage)
+}
+
+func (a *sessionAgent) Summarize(ctx context.Context, sessionID string) error {
+	if a.IsSessionBusy(sessionID) {
+		return ErrSessionBusy
+	}
+
+	currentSession, err := a.sessions.Get(ctx, sessionID)
+	if err != nil {
+		return fmt.Errorf("failed to get session: %w", err)
+	}
+	msgs, err := a.getSessionMessages(ctx, currentSession)
+	if err != nil {
+		return err
+	}
+	if len(msgs) == 0 {
+		// nothing to summarize
+		return nil
+	}
+
+	aiMsgs, _ := a.preparePrompt(msgs)
+
+	genCtx, cancel := context.WithCancel(ctx)
+	a.activeRequests.Set(sessionID, cancel)
+	defer a.activeRequests.Del(sessionID)
+	defer cancel()
+
+	agent := ai.NewAgent(a.largeModel.model,
+		ai.WithSystemPrompt(string(summaryPrompt)),
+	)
+	summaryMessage, err := a.messages.Create(ctx, sessionID, message.CreateMessageParams{
+		Role:     message.Assistant,
+		Model:    a.largeModel.model.Model(),
+		Provider: a.largeModel.model.Provider(),
+	})
+	if err != nil {
+		return err
+	}
+
+	resp, err := agent.Stream(ctx, ai.AgentStreamCall{
+		Prompt:   "Provide a detailed summary of our conversation above.",
+		Messages: aiMsgs,
+		OnReasoningDelta: func(id string, text string) error {
+			summaryMessage.AppendReasoningContent(text)
+			return a.messages.Update(ctx, summaryMessage)
+		},
+		OnReasoningEnd: func(id string, reasoning ai.ReasoningContent) error {
+			// handle anthropic signature
+			if anthropicData, ok := reasoning.ProviderMetadata["anthropic"]; ok {
+				if signature, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok && signature.Signature != "" {
+					summaryMessage.AppendReasoningSignature(signature.Signature)
+				}
+			}
+			summaryMessage.FinishThinking()
+			return a.messages.Update(ctx, summaryMessage)
+		},
+		OnTextDelta: func(id, text string) error {
+			summaryMessage.AppendContent(text)
+			return a.messages.Update(ctx, summaryMessage)
+		},
+	})
+	if err != nil {
+		return err
+	}
+
+	summaryMessage.AddFinish(message.FinishReasonEndTurn, "", "")
+	err = a.messages.Update(genCtx, summaryMessage)
+	if err != nil {
+		return err
+	}
+
+	a.updateSessionUsage(a.largeModel, &currentSession, resp.TotalUsage)
+
+	// just in case get just the last usage
+	usage := resp.Response.Usage
+	currentSession.SummaryMessageID = summaryMessage.ID
+	currentSession.CompletionTokens = usage.OutputTokens
+	currentSession.PromptTokens = 0
+	_, err = a.sessions.Save(genCtx, currentSession)
+	return err
+}
+
+func (a *sessionAgent) getCacheControlOptions() ai.ProviderOptions {
+	return ai.ProviderOptions{
+		anthropic.Name: &anthropic.ProviderCacheControlOptions{
+			CacheControl: anthropic.CacheControl{Type: "ephemeral"},
+		},
+	}
+}
+
+func (a *sessionAgent) createUserMessage(ctx context.Context, call SessionAgentCall) (message.Message, error) {
+	var attachmentParts []message.ContentPart
+	for _, attachment := range call.Attachments {
+		attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
+	}
+	parts := []message.ContentPart{message.TextContent{Text: call.Prompt}}
+	parts = append(parts, attachmentParts...)
+	msg, err := a.messages.Create(ctx, call.SessionID, message.CreateMessageParams{
+		Role:  message.User,
+		Parts: parts,
+	})
+	if err != nil {
+		return message.Message{}, fmt.Errorf("failed to create user message: %w", err)
+	}
+	return msg, nil
+}
+
+func (a *sessionAgent) preparePrompt(msgs []message.Message, attachments ...message.Attachment) ([]ai.Message, []ai.FilePart) {
+	var history []ai.Message
+	for _, m := range msgs {
+		if len(m.Parts) == 0 {
+			continue
+		}
+		// Assistant message without content or tool calls (cancelled before it returned anything)
+		if m.Role == message.Assistant && len(m.ToolCalls()) == 0 && m.Content().Text == "" && m.ReasoningContent().String() == "" {
+			continue
+		}
+		history = append(history, m.ToAIMessage()...)
+	}
+
+	var files []ai.FilePart
+	for _, attachment := range attachments {
+		files = append(files, ai.FilePart{
+			Filename:  attachment.FileName,
+			Data:      attachment.Content,
+			MediaType: attachment.MimeType,
+		})
+	}
+
+	return history, files
+}
+
+func (a *sessionAgent) getSessionMessages(ctx context.Context, session session.Session) ([]message.Message, error) {
+	msgs, err := a.messages.List(ctx, session.ID)
+	if err != nil {
+		return nil, fmt.Errorf("failed to list messages: %w", err)
+	}
+
+	if session.SummaryMessageID != "" {
+		summaryMsgInex := -1
+		for i, msg := range msgs {
+			if msg.ID == session.SummaryMessageID {
+				summaryMsgInex = i
+				break
+			}
+		}
+		if summaryMsgInex != -1 {
+			msgs = msgs[summaryMsgInex:]
+			msgs[0].Role = message.User
+		}
+	}
+	return msgs, nil
+}
+
+func (a *sessionAgent) generateTitle(ctx context.Context, session session.Session, prompt string) {
+	if prompt == "" {
+		return
+	}
+
+	agent := ai.NewAgent(a.smallModel.model,
+		ai.WithSystemPrompt(string(titlePrompt)),
+		ai.WithMaxOutputTokens(40),
+	)
+
+	resp, err := agent.Stream(ctx, ai.AgentStreamCall{
+		Prompt: fmt.Sprintf("Generate a concise title for the following content:\n\n%s", prompt),
+	})
+	if err != nil {
+		slog.Error("error generating title", "err", err)
+		return
+	}
+
+	title := resp.Response.Content.Text()
+
+	title = strings.ReplaceAll(title, "\n", " ")
+
+	// remove thinking tags if present
+	if idx := strings.Index(title, "</think>"); idx > 0 {
+		title = title[idx+len("</think>"):]
+	}
+
+	title = strings.TrimSpace(title)
+	if title == "" {
+		slog.Warn("failed to generate title", "warn", "empty title")
+		return
+	}
+
+	session.Title = title
+	a.updateSessionUsage(a.smallModel, &session, resp.TotalUsage)
+	_, saveErr := a.sessions.Save(ctx, session)
+	if saveErr != nil {
+		slog.Error("failed to save session title & usage", "error", saveErr)
+		return
+	}
+}
+
+func (a *sessionAgent) updateSessionUsage(model Model, session *session.Session, usage ai.Usage) {
+	modelConfig := model.config
+	cost := modelConfig.CostPer1MInCached/1e6*float64(usage.CacheCreationTokens) +
+		modelConfig.CostPer1MOutCached/1e6*float64(usage.CacheReadTokens) +
+		modelConfig.CostPer1MIn/1e6*float64(usage.InputTokens) +
+		modelConfig.CostPer1MOut/1e6*float64(usage.OutputTokens)
+	session.Cost += cost
+	session.CompletionTokens = usage.OutputTokens + usage.CacheReadTokens
+	session.PromptTokens = usage.InputTokens + usage.CacheCreationTokens
+}
+
+func (a *sessionAgent) Cancel(sessionID string) {
+	// Cancel regular requests
+	if cancel, ok := a.activeRequests.Take(sessionID); ok && cancel != nil {
+		slog.Info("Request cancellation initiated", "session_id", sessionID)
+		cancel()
+	}
+
+	// Also check for summarize requests
+	if cancel, ok := a.activeRequests.Take(sessionID + "-summarize"); ok && cancel != nil {
+		slog.Info("Summarize cancellation initiated", "session_id", sessionID)
+		cancel()
+	}
+
+	if a.QueuedPrompts(sessionID) > 0 {
+		slog.Info("Clearing queued prompts", "session_id", sessionID)
+		a.messageQueue.Del(sessionID)
+	}
+}
+
+func (a *sessionAgent) ClearQueue(sessionID string) {
+	if a.QueuedPrompts(sessionID) > 0 {
+		slog.Info("Clearing queued prompts", "session_id", sessionID)
+		a.messageQueue.Del(sessionID)
+	}
+}
+
+func (a *sessionAgent) CancelAll() {
+	if !a.IsBusy() {
+		return
+	}
+	for key := range a.activeRequests.Seq2() {
+		a.Cancel(key) // key is sessionID
+	}
+
+	timeout := time.After(5 * time.Second)
+	for a.IsBusy() {
+		select {
+		case <-timeout:
+			return
+		default:
+			time.Sleep(200 * time.Millisecond)
+		}
+	}
+}
+
+func (a *sessionAgent) IsBusy() bool {
+	var busy bool
+	for cancelFunc := range a.activeRequests.Seq() {
+		if cancelFunc != nil {
+			busy = true
+			break
+		}
+	}
+	return busy
+}
+
+func (a *sessionAgent) IsSessionBusy(sessionID string) bool {
+	_, busy := a.activeRequests.Get(sessionID)
+	return busy
+}
+
+func (a *sessionAgent) QueuedPrompts(sessionID string) int {
+	l, ok := a.messageQueue.Get(sessionID)
+	if !ok {
+		return 0
+	}
+	return len(l)
+}
+
+func (a *sessionAgent) SetModels(large Model, small Model) {
+	a.largeModel = large
+	a.smallModel = small
+}
+
+func (a *sessionAgent) SetTools(tools []ai.AgentTool) {
+	a.tools = tools
+}

internal/agent/coordinator.go 🔗

@@ -0,0 +1,20 @@
+package agent
+
+import (
+	"context"
+
+	"github.com/charmbracelet/crush/internal/config"
+	"github.com/charmbracelet/crush/internal/message"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+type Coordinator interface {
+	// INFO: (kujtim) this is not used yet we will use this when we have multiple agents
+	SetMainAgent(string)
+	Run(ctx context.Context, sessionID, prompt string, attachments ...message.Attachment) (*ai.AgentResult, error)
+}
+
+type coordinator struct {
+	cfg          *config.Config
+	currentAgent SessionAgent
+}

internal/agent/errors.go 🔗

@@ -0,0 +1,17 @@
+package agent
+
+import (
+	"context"
+	"errors"
+)
+
+var (
+	ErrRequestCancelled = errors.New("request canceled by user")
+	ErrSessionBusy      = errors.New("session is currently processing another request")
+	ErrEmptyPrompt      = errors.New("prompt is empty")
+	ErrSessionMissing   = errors.New("session id is missing")
+)
+
+func isCancelledErr(err error) bool {
+	return errors.Is(err, context.Canceled) || errors.Is(err, ErrRequestCancelled)
+}

internal/agent/prompt/prompt.go 🔗

@@ -0,0 +1,140 @@
+package prompt
+
+import (
+	"fmt"
+	"os"
+	"path/filepath"
+	"runtime"
+	"strings"
+	"text/template"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/config"
+	"github.com/charmbracelet/crush/internal/home"
+)
+
+// Prompt represents a template-based prompt generator.
+type Prompt struct {
+	name     string
+	template string
+}
+
+type PromptDat struct {
+	Provider   string
+	Model      string
+	Config     config.Config
+	WorkingDir string
+	IsGitRepo  bool
+	Platform   string
+	Date       string
+}
+
+type ContextFile struct {
+	Path    string
+	Content string
+}
+
+func NewPrompt(name, promptTemplate string) (*Prompt, error) {
+	return &Prompt{
+		name:     name,
+		template: promptTemplate,
+	}, nil
+}
+
+func (p *Prompt) Build(provider, model string, cfg config.Config) (string, error) {
+	t, err := template.New(p.name).Funcs(p.funcMap(cfg)).Parse(p.template)
+	if err != nil {
+		return "", fmt.Errorf("parsing template: %w", err)
+	}
+	var sb strings.Builder
+	if err := t.Execute(&sb, promptData(provider, model, cfg)); err != nil {
+		return "", fmt.Errorf("executing template: %w", err)
+	}
+
+	return sb.String(), nil
+}
+
+func (p *Prompt) funcMap(cfg config.Config) template.FuncMap {
+	return template.FuncMap{
+		"contextFiles": func(path string) []ContextFile {
+			path = expandPath(path, cfg)
+			return processContextPath(path, cfg)
+		},
+	}
+}
+
+func processFile(filePath string) *ContextFile {
+	content, err := os.ReadFile(filePath)
+	if err != nil {
+		return nil
+	}
+	return &ContextFile{
+		Path:    filePath,
+		Content: string(content),
+	}
+}
+
+func processContextPath(p string, cfg config.Config) []ContextFile {
+	var contexts []ContextFile
+	fullPath := p
+	if !filepath.IsAbs(p) {
+		fullPath = filepath.Join(cfg.WorkingDir(), p)
+	}
+	info, err := os.Stat(fullPath)
+	if err != nil {
+		return contexts
+	}
+	if info.IsDir() {
+		filepath.WalkDir(fullPath, func(path string, d os.DirEntry, err error) error {
+			if err != nil {
+				return err
+			}
+			if !d.IsDir() {
+				if result := processFile(path); result != nil {
+					contexts = append(contexts, *result)
+				}
+			}
+			return nil
+		})
+	} else {
+		result := processFile(fullPath)
+		if result != nil {
+			contexts = append(contexts, *result)
+		}
+	}
+	return contexts
+}
+
+// expandPath expands ~ and environment variables in file paths
+func expandPath(path string, cfg config.Config) string {
+	path = home.Long(path)
+	// Handle environment variable expansion using the same pattern as config
+	if strings.HasPrefix(path, "$") {
+		if expanded, err := cfg.Resolver().ResolveValue(path); err == nil {
+			path = expanded
+		}
+	}
+
+	return path
+}
+
+func promptData(provider, model string, cfg config.Config) PromptDat {
+	return PromptDat{
+		Provider:   provider,
+		Model:      model,
+		Config:     cfg,
+		WorkingDir: cfg.WorkingDir(),
+		IsGitRepo:  isGitRepo(cfg.WorkingDir()),
+		Platform:   runtime.GOOS,
+		Date:       time.Now().Format("1/2/2006"),
+	}
+}
+
+func isGitRepo(dir string) bool {
+	_, err := os.Stat(filepath.Join(dir, ".git"))
+	return err == nil
+}
+
+func (p *Prompt) Name() string {
+	return p.name
+}

internal/agent/templates/coder.gotmpl 🔗

@@ -0,0 +1,133 @@
+You are Crush, a powerful AI Assistant that runs in the CLI.
+Use the instructions below and the tools available to you to assist the user.
+
+<memory_instructions>
+If the current working directory contains a file called CRUSH.md, it will be automatically added to your context.
+
+This file serves multiple purposes:
+
+- Storing frequently used bash commands (build, test, lint, etc.) so you can use them without searching each time
+- Recording the user's code style preferences (naming conventions, preferred libraries, etc.)
+- Maintaining useful information about the codebase structure and organization
+
+When you discover important information that could be useful for the future update/add the info to the CRUSH.md.
+
+Memory might be added to you during a task if there are nested memory files that relate to the work you are doing.
+</memory_instructions>
+<communication_style>
+- Be concise and direct
+- Keep responses under 4 lines unless details requested
+- Answer without preamble/postamble ("Here is...", "The answer is...")
+- One-word answers preferred when possible
+- Never use emojis in your responses
+- You MUST answer concisely with fewer than 4 lines of text (not including tool use or code generation), unless user asks for detail
+- User markdown formatting for responses when appropriate
+
+<example>
+user: 2 + 2
+assistant: 4
+</example>
+
+<example>
+user: what is 2+2?
+assistant: 4
+</example>
+
+<example>
+user: is 11 a prime number?
+assistant: true
+</example>
+
+<example>
+user: what command should I run to list files in the current directory?
+assistant: ls
+</example>
+
+<example>
+user: what command should I run to watch files in the current directory?
+assistant: [use the ls tool to list the files in the current directory, then read docs/commands in the relevant file to find out how to watch files]
+npm run dev
+</example>
+
+<example>
+user: How many golf balls fit inside a jetta?
+assistant: 150000
+</example>
+
+<example>
+user: what files are in the directory src/?
+assistant: [runs ls and sees foo.c, bar.c, baz.c]
+user: which file contains the implementation of foo?
+assistant: src/foo.c
+</example>
+
+<example>
+user: write tests for new feature
+assistant: [uses grep and glob search tools to find where similar tests are defined, uses concurrent read file tool use blocks in one tool call to read relevant files at the same time, uses edit file tool to write new tests]
+</example>
+</communication_style>
+
+<proactiveness>
+You are allowed to be proactive, but only when the user asks you to do something. You should strive to strike a balance between:
+
+- Doing the right thing when asked, including taking actions and follow-up actions
+- Not surprising the user with actions you take without asking
+  - For example, if the user asks you how to approach something, you should do your best to answer their question first, and not immediately jump into taking actions.
+- Do not add additional code explanation summary unless requested by the user. After working on a file, just stop, rather than providing an explanation of what you did.
+</proactiveness>
+
+<following_conversations>
+When making changes to files, first understand the file's code conventions. Mimic code style, use existing libraries and utilities, and follow existing patterns.
+
+- NEVER assume that a given library is available, even if it is well known. Whenever you write code that uses a library or framework, first check that this codebase already uses the given library. For example, you might look at neighboring files, or check the package.json (or cargo.toml, and so on depending on the language).
+- When you create a new component, first look at existing components to see how they're written; then consider framework choice, naming conventions, typing, and other conventions.
+- When you edit a piece of code, first look at the code's surrounding context (especially its imports) to understand the code's choice of frameworks and libraries. Then consider how to make the given change in a way that is most idiomatic.
+- Always follow security best practices. Never introduce code that exposes or logs secrets and keys. Never commit secrets or keys to the repository.
+</following_conversations>
+
+<code_style>
+- Follow existing code style and patterns
+- Do not add any comments to code you write unless asked to do so
+</code_style>
+
+<doing_tasks>
+The user will primarily request you perform software engineering tasks. This includes solving bugs, adding new functionality, refactoring code, explaining code, and more. For these tasks the following steps are recommended:
+
+- Use the available search tools to understand the codebase and the user's query.
+- Plan out the implementation
+- Implement the solution using all tools available to you
+- Verify the solution if possible with tests. NEVER assume specific test framework or test script. Check the README or search codebase to determine the testing approach.
+- When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to CRUSH.md so that you will know to run it next time.
+
+NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked.
+</doing_tasks>
+
+<tool_use>
+- When doing file search, prefer to use the Agent tool in order to reduce context usage.
+- All tools are executed in parallel when multiple tool calls are sent in a single message. Only send multiple tool calls when they are safe to run in parallel (no dependencies between them).
+- The user does not see the full output of the tool responses, so if you need the output of the tool for the response make sure to summarize it for the user.
+</tool_use>
+
+<env>
+Working directory: {{.WorkingDir}}
+Is directory a git repo: {{if .IsGitRepo}} yes {{else}} no {{end}}
+Platform: {{.Platform}}
+Today's date: {{.Date}}
+</env>{{if gt (len .Config.LSP) 0}}
+<lsp>
+Tools that support it will also include useful diagnostics such as linting and typechecking.
+- These diagnostics will be automatically enabled when you run the tool, and will be displayed in the output at the bottom within the <file_diagnostics></file_diagnostics> and <project_diagnostics></project_diagnostics> tags.
+- Take necessary actions to fix the issues.
+- You should ignore diagnostics of files that you did not change or are not related or caused by your changes unless the user explicitly asks you to fix them.
+</lsp>
+{{end}}{{if gt (len .Config.Options.ContextPaths) 0}}
+<memory>
+{{range  .Config.Options.ContextPaths}}
+{{range  contextFiles .}}
+<file path="{{.Path}}">
+{{.Content}}
+</file>
+{{end}}
+{{end}}
+</memory>
+{{end}}

internal/agent/templates/initialize.md 🔗

@@ -0,0 +1,9 @@
+`Please analyze this codebase and create a **CRUSH.md** file containing:
+
+- Build/lint/test commands - especially for running a single test
+- Code style guidelines including imports, formatting, types, naming conventions, error handling, etc.
+
+The file you create will be given to agentic coding agents (such as yourself) that operate in this repository. Make it about 20-30 lines long.
+If there's already a **CRUSH.md**, improve it.
+
+If there are Cursor rules (in `.cursor/rules/` or `.cursorrules`) or Copilot rules (in `.github/copilot-instructions.md`), make sure to include them.

internal/agent/templates/summary.md 🔗

@@ -0,0 +1,11 @@
+You are a helpful AI assistant tasked with summarizing conversations.
+
+When asked to summarize, provide a detailed but concise summary of the conversation.
+Focus on information that would be helpful for continuing the conversation, including:
+
+- What was done
+- What is currently being worked on
+- Which files are being modified
+- What needs to be done next
+
+Your summary should be comprehensive enough to provide context but concise enough to be quickly understood.

internal/agent/templates/task.gotmpl 🔗

@@ -0,0 +1,15 @@
+You are an agent for Crush. Given the user's prompt, you should use the tools available to you to answer the user's question.
+
+<rules>
+1. You should be concise, direct, and to the point, since your responses will be displayed on a command line interface. Answer the user's question directly, without elaboration, explanation, or details. One word answers are best. Avoid introductions, conclusions, and explanations. You MUST avoid text before/after your response, such as "The answer is <answer>.", "Here is the content of the file..." or "Based on the information provided, the answer is..." or "Here is what I will do next...".
+2. When relevant, share file names and code snippets relevant to the query
+3. Any file paths you return in your final response MUST be absolute. DO NOT use relative paths.
+</rules>
+
+<env>
+Working directory: {{.WorkingDir}}
+Is directory a git repo: {{if .IsGitRepo}} yes {{else}} no {{end}}
+Platform: {{.Platform}}
+Today's date: {{.Date}}
+</env>
+

internal/agent/templates/title.md 🔗

@@ -0,0 +1,8 @@
+you will generate a short title based on the first message a user begins a conversation with
+
+- ensure it is not more than 50 characters long
+- the title should be a summary of the user's message
+- it should be one line long
+- do not use quotes or colons
+- the entire text you return will be used as the title
+- never return anything that is more than one sentence (one line) long

internal/agent/tools/bash.go 🔗

@@ -0,0 +1,315 @@
+package tools
+
+import (
+	"bytes"
+	"context"
+	_ "embed"
+	"fmt"
+	"html/template"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/config"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/crush/internal/shell"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+type BashParams struct {
+	Command     string `json:"command" description:"The command to execute"`
+	Description string `json:"description,omitempty" description:"A brief description of what the command does"`
+	Timeout     int    `json:"timeout,omitempty" description:"Optional timeout in milliseconds (max 600000)"`
+}
+
+type BashPermissionsParams struct {
+	Command     string `json:"command"`
+	Description string `json:"description"`
+	Timeout     int    `json:"timeout"`
+}
+
+type BashResponseMetadata struct {
+	StartTime        int64  `json:"start_time"`
+	EndTime          int64  `json:"end_time"`
+	Output           string `json:"output"`
+	Description      string `json:"description"`
+	WorkingDirectory string `json:"working_directory"`
+}
+
+const (
+	BashToolName = "bash"
+
+	DefaultTimeout  = 1 * 60 * 1000  // 1 minutes in milliseconds
+	MaxTimeout      = 10 * 60 * 1000 // 10 minutes in milliseconds
+	MaxOutputLength = 30000
+	BashNoOutput    = "no output"
+)
+
+//go:embed bash.gotmpl
+var bashDescriptionTmpl []byte
+
+var bashDescriptionTpl = template.Must(
+	template.New("bashDescription").
+		Parse(string(bashDescriptionTmpl)),
+)
+
+type bashDescriptionData struct {
+	BannedCommands  string
+	MaxOutputLength int
+	Attribution     config.Attribution
+}
+
+var bannedCommands = []string{
+	// Network/Download tools
+	"alias",
+	"aria2c",
+	"axel",
+	"chrome",
+	"curl",
+	"curlie",
+	"firefox",
+	"http-prompt",
+	"httpie",
+	"links",
+	"lynx",
+	"nc",
+	"safari",
+	"scp",
+	"ssh",
+	"telnet",
+	"w3m",
+	"wget",
+	"xh",
+
+	// System administration
+	"doas",
+	"su",
+	"sudo",
+
+	// Package managers
+	"apk",
+	"apt",
+	"apt-cache",
+	"apt-get",
+	"dnf",
+	"dpkg",
+	"emerge",
+	"home-manager",
+	"makepkg",
+	"opkg",
+	"pacman",
+	"paru",
+	"pkg",
+	"pkg_add",
+	"pkg_delete",
+	"portage",
+	"rpm",
+	"yay",
+	"yum",
+	"zypper",
+
+	// System modification
+	"at",
+	"batch",
+	"chkconfig",
+	"crontab",
+	"fdisk",
+	"mkfs",
+	"mount",
+	"parted",
+	"service",
+	"systemctl",
+	"umount",
+
+	// Network configuration
+	"firewall-cmd",
+	"ifconfig",
+	"ip",
+	"iptables",
+	"netstat",
+	"pfctl",
+	"route",
+	"ufw",
+}
+
+func bashDescription(attribution *config.Attribution) string {
+	bannedCommandsStr := strings.Join(bannedCommands, ", ")
+	var out bytes.Buffer
+	if err := bashDescriptionTpl.Execute(&out, bashDescriptionData{
+		BannedCommands:  bannedCommandsStr,
+		MaxOutputLength: MaxOutputLength,
+		Attribution:     *attribution,
+	}); err != nil {
+		// this should never happen.
+		panic("failed to execute bash description template: " + err.Error())
+	}
+	return out.String()
+}
+
+func blockFuncs() []shell.BlockFunc {
+	return []shell.BlockFunc{
+		shell.CommandsBlocker(bannedCommands),
+
+		// System package managers
+		shell.ArgumentsBlocker("apk", []string{"add"}, nil),
+		shell.ArgumentsBlocker("apt", []string{"install"}, nil),
+		shell.ArgumentsBlocker("apt-get", []string{"install"}, nil),
+		shell.ArgumentsBlocker("dnf", []string{"install"}, nil),
+		shell.ArgumentsBlocker("pacman", nil, []string{"-S"}),
+		shell.ArgumentsBlocker("pkg", []string{"install"}, nil),
+		shell.ArgumentsBlocker("yum", []string{"install"}, nil),
+		shell.ArgumentsBlocker("zypper", []string{"install"}, nil),
+
+		// Language-specific package managers
+		shell.ArgumentsBlocker("brew", []string{"install"}, nil),
+		shell.ArgumentsBlocker("cargo", []string{"install"}, nil),
+		shell.ArgumentsBlocker("gem", []string{"install"}, nil),
+		shell.ArgumentsBlocker("go", []string{"install"}, nil),
+		shell.ArgumentsBlocker("npm", []string{"install"}, []string{"--global"}),
+		shell.ArgumentsBlocker("npm", []string{"install"}, []string{"-g"}),
+		shell.ArgumentsBlocker("pip", []string{"install"}, []string{"--user"}),
+		shell.ArgumentsBlocker("pip3", []string{"install"}, []string{"--user"}),
+		shell.ArgumentsBlocker("pnpm", []string{"add"}, []string{"--global"}),
+		shell.ArgumentsBlocker("pnpm", []string{"add"}, []string{"-g"}),
+		shell.ArgumentsBlocker("yarn", []string{"global", "add"}, nil),
+
+		// `go test -exec` can run arbitrary commands
+		shell.ArgumentsBlocker("go", []string{"test"}, []string{"-exec"}),
+	}
+}
+
+func NewBashTool(permissions permission.Service, workingDir string, attribution *config.Attribution) ai.AgentTool {
+	// Set up command blocking on the persistent shell
+	persistentShell := shell.GetPersistentShell(workingDir)
+	persistentShell.SetBlockFuncs(blockFuncs())
+	return ai.NewAgentTool(
+		BashToolName,
+		string(bashDescription(attribution)),
+		func(ctx context.Context, params BashParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.Timeout > MaxTimeout {
+				params.Timeout = MaxTimeout
+			} else if params.Timeout <= 0 {
+				params.Timeout = DefaultTimeout
+			}
+
+			if params.Command == "" {
+				return ai.NewTextErrorResponse("missing command"), nil
+			}
+
+			isSafeReadOnly := false
+			cmdLower := strings.ToLower(params.Command)
+
+			for _, safe := range safeCommands {
+				if strings.HasPrefix(cmdLower, safe) {
+					if len(cmdLower) == len(safe) || cmdLower[len(safe)] == ' ' || cmdLower[len(safe)] == '-' {
+						isSafeReadOnly = true
+						break
+					}
+				}
+			}
+
+			sessionID := GetSessionFromContext(ctx)
+			if sessionID == "" {
+				return ai.ToolResponse{}, fmt.Errorf("session ID is required for executing shell command")
+			}
+			if !isSafeReadOnly {
+				shell := shell.GetPersistentShell(workingDir)
+				p := permissions.Request(
+					permission.CreatePermissionRequest{
+						SessionID:   sessionID,
+						Path:        shell.GetWorkingDir(),
+						ToolCallID:  call.ID,
+						ToolName:    BashToolName,
+						Action:      "execute",
+						Description: fmt.Sprintf("Execute command: %s", params.Command),
+						Params: BashPermissionsParams{
+							Command:     params.Command,
+							Description: params.Description,
+						},
+					},
+				)
+				if !p {
+					return ai.ToolResponse{}, permission.ErrorPermissionDenied
+				}
+			}
+			startTime := time.Now()
+			if params.Timeout > 0 {
+				var cancel context.CancelFunc
+				ctx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Millisecond)
+				defer cancel()
+			}
+
+			persistentShell := shell.GetPersistentShell(workingDir)
+			stdout, stderr, err := persistentShell.Exec(ctx, params.Command)
+
+			// Get the current working directory after command execution
+			currentWorkingDir := persistentShell.GetWorkingDir()
+			interrupted := shell.IsInterrupt(err)
+			exitCode := shell.ExitCode(err)
+			if exitCode == 0 && !interrupted && err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error executing command: %w", err)
+			}
+
+			stdout = truncateOutput(stdout)
+			stderr = truncateOutput(stderr)
+
+			errorMessage := stderr
+			if errorMessage == "" && err != nil {
+				errorMessage = err.Error()
+			}
+
+			if interrupted {
+				if errorMessage != "" {
+					errorMessage += "\n"
+				}
+				errorMessage += "Command was aborted before completion"
+			} else if exitCode != 0 {
+				if errorMessage != "" {
+					errorMessage += "\n"
+				}
+				errorMessage += fmt.Sprintf("Exit code %d", exitCode)
+			}
+
+			hasBothOutputs := stdout != "" && stderr != ""
+
+			if hasBothOutputs {
+				stdout += "\n"
+			}
+
+			if errorMessage != "" {
+				stdout += "\n" + errorMessage
+			}
+
+			metadata := BashResponseMetadata{
+				StartTime:        startTime.UnixMilli(),
+				EndTime:          time.Now().UnixMilli(),
+				Output:           stdout,
+				Description:      params.Description,
+				WorkingDirectory: currentWorkingDir,
+			}
+			if stdout == "" {
+				return ai.WithResponseMetadata(ai.NewTextResponse(BashNoOutput), metadata), nil
+			}
+			stdout += fmt.Sprintf("\n\n<cwd>%s</cwd>", currentWorkingDir)
+			return ai.WithResponseMetadata(ai.NewTextResponse(stdout), metadata), nil
+		})
+}
+
+func truncateOutput(content string) string {
+	if len(content) <= MaxOutputLength {
+		return content
+	}
+
+	halfLength := MaxOutputLength / 2
+	start := content[:halfLength]
+	end := content[len(content)-halfLength:]
+
+	truncatedLinesCount := countLines(content[halfLength : len(content)-halfLength])
+	return fmt.Sprintf("%s\n\n... [%d lines truncated] ...\n\n%s", start, truncatedLinesCount, end)
+}
+
+func countLines(s string) int {
+	if s == "" {
+		return 0
+	}
+	return len(strings.Split(s, "\n"))
+}

internal/agent/tools/bash.gotmpl 🔗

@@ -0,0 +1,116 @@
+Executes bash commands in persistent shell session with timeout and security measures.
+
+<cross_platform>
+Uses mvdan/sh interpreter (Bash-compatible on all platforms including Windows).
+Use forward slashes for paths: "ls C:/foo/bar" not "ls C:\foo\bar".
+Common shell builtins and core utils available on Windows.
+</cross_platform>
+
+<execution_steps>
+1. Directory Verification: If creating directories/files, use LS tool to verify parent exists
+2. Security Check: Banned commands ({{ .BannedCommands }}) return error - explain to user. Safe read-only commands execute without prompts
+3. Command Execution: Execute with proper quoting, capture output
+4. Output Processing: Truncate if exceeds {{ .MaxOutputLength }} characters
+5. Return Result: Include errors, metadata with <cwd></cwd> tags
+</execution_steps>
+
+<usage_notes>
+- Command required, timeout optional (max 600000ms/10min, default 30min if unspecified)
+- IMPORTANT: Use Grep/Glob/Agent tools instead of 'find'/'grep'. Use View/LS tools instead of 'cat'/'head'/'tail'/'ls'
+- Chain with ';' or '&&', avoid newlines except in quoted strings
+- Shell state persists (env vars, virtual envs, cwd, etc.)
+- Prefer absolute paths over 'cd' (use 'cd' only if user explicitly requests)
+</usage_notes>
+
+<git_commits>
+When user asks to create git commit:
+
+1. Single message with three tool_use blocks (IMPORTANT for speed):
+   - git status (untracked files)
+   - git diff (staged/unstaged changes)
+   - git log (recent commit message style)
+
+2. Add relevant untracked files to staging. Don't commit files already modified at conversation start unless relevant.
+
+3. Analyze staged changes in <commit_analysis> tags:
+   - List changed/added files, summarize nature (feature/enhancement/bug fix/refactoring/test/docs)
+   - Brainstorm purpose/motivation, assess project impact, check for sensitive info
+   - Don't use tools beyond git context
+   - Draft concise (1-2 sentences) message focusing on "why" not "what"
+   - Use clear language, accurate reflection ("add"=new feature, "update"=enhancement, "fix"=bug fix)
+   - Avoid generic messages, review draft
+
+4. Create commit with Crush signature using HEREDOC:
+   git commit -m "$(cat <<'EOF'
+   Commit message here.
+{{ if .Attribution.GeneratedWith}}
+   💘 Generated with Crush
+{{ end }}
+{{ if .Attribution.CoAuthoredBy}}
+   Co-Authored-By: Crush <crush@charm.land>
+{{ end }}
+   EOF
+   )"
+
+5. If pre-commit hook fails, retry ONCE. If fails again, hook preventing commit. If succeeds but files modified, MUST amend.
+
+6. Run git status to verify.
+
+Notes: Use "git commit -am" when possible, don't stage unrelated files, NEVER update config, don't push, no -i flags, no empty commits, return empty response.
+</git_commits>
+
+<pull_requests>
+Use gh command for ALL GitHub tasks. When user asks to create PR:
+
+1. Single message with multiple tool_use blocks (VERY IMPORTANT for speed):
+   - git status (untracked files)
+   - git diff (staged/unstaged changes)
+   - Check if branch tracks remote and is up to date
+   - git log and 'git diff main...HEAD' (full commit history from main divergence)
+
+2. Create new branch if needed
+3. Commit changes if needed
+4. Push to remote with -u flag if needed
+
+5. Analyze changes in <pr_analysis> tags:
+   - List commits since diverging from main
+   - Summarize nature of changes
+   - Brainstorm purpose/motivation
+   - Assess project impact
+   - Don't use tools beyond git context
+   - Check for sensitive information
+   - Draft concise (1-2 bullet points) PR summary focusing on "why"
+   - Ensure summary reflects ALL changes since main divergence
+   - Clear, concise language
+   - Accurate reflection of changes and purpose
+   - Avoid generic summaries
+   - Review draft
+
+6. Create PR with gh pr create using HEREDOC:
+   gh pr create --title "title" --body "$(cat <<'EOF'
+
+   ## Summary
+
+   <1-3 bullet points>
+
+   ## Test plan
+
+   [Checklist of TODOs...]
+
+{{ if .Attribution.GeneratedWith}}
+   💘 Generated with Crush
+{{ end }}
+
+   EOF
+   )"
+
+Important:
+
+- Return empty response - user sees gh output
+- Never update git config
+  </pull_requests>
+
+<examples>
+Good: pytest /foo/bar/tests
+Bad: cd /foo/bar && pytest tests
+</examples>

internal/agent/tools/diagnostics.go 🔗

@@ -0,0 +1,185 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"fmt"
+	"log/slog"
+	"sort"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/lsp"
+	"github.com/charmbracelet/fantasy/ai"
+	"github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
+)
+
+type DiagnosticsParams struct {
+	FilePath string `json:"file_path,omitempty" description:"The path to the file to get diagnostics for (leave w empty for project diagnostics)"`
+}
+
+const DiagnosticsToolName = "diagnostics"
+
+//go:embed diagnostics.md
+var diagnosticsDescription []byte
+
+func NewDiagnosticsTool(lspClients *csync.Map[string, *lsp.Client]) ai.AgentTool {
+	return ai.NewAgentTool(
+		DiagnosticsToolName,
+		string(diagnosticsDescription),
+		func(ctx context.Context, params DiagnosticsParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if lspClients.Len() == 0 {
+				return ai.NewTextErrorResponse("no LSP clients available"), nil
+			}
+			notifyLSPs(ctx, lspClients, params.FilePath)
+			output := getDiagnostics(params.FilePath, lspClients)
+			return ai.NewTextResponse(output), nil
+		})
+}
+
+func notifyLSPs(ctx context.Context, lsps *csync.Map[string, *lsp.Client], filepath string) {
+	if filepath == "" {
+		return
+	}
+	for client := range lsps.Seq() {
+		if !client.HandlesFile(filepath) {
+			continue
+		}
+		_ = client.OpenFileOnDemand(ctx, filepath)
+		_ = client.NotifyChange(ctx, filepath)
+		client.WaitForDiagnostics(ctx, 5*time.Second)
+	}
+}
+
+func getDiagnostics(filePath string, lsps *csync.Map[string, *lsp.Client]) string {
+	fileDiagnostics := []string{}
+	projectDiagnostics := []string{}
+
+	for lspName, client := range lsps.Seq2() {
+		for location, diags := range client.GetDiagnostics() {
+			path, err := location.Path()
+			if err != nil {
+				slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err)
+				continue
+			}
+			isCurrentFile := path == filePath
+			for _, diag := range diags {
+				formattedDiag := formatDiagnostic(path, diag, lspName)
+				if isCurrentFile {
+					fileDiagnostics = append(fileDiagnostics, formattedDiag)
+				} else {
+					projectDiagnostics = append(projectDiagnostics, formattedDiag)
+				}
+			}
+		}
+	}
+
+	sortDiagnostics(fileDiagnostics)
+	sortDiagnostics(projectDiagnostics)
+
+	var output strings.Builder
+	writeDiagnostics(&output, "file_diagnostics", fileDiagnostics)
+	writeDiagnostics(&output, "project_diagnostics", projectDiagnostics)
+
+	if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
+		fileErrors := countSeverity(fileDiagnostics, "Error")
+		fileWarnings := countSeverity(fileDiagnostics, "Warn")
+		projectErrors := countSeverity(projectDiagnostics, "Error")
+		projectWarnings := countSeverity(projectDiagnostics, "Warn")
+		output.WriteString("\n<diagnostic_summary>\n")
+		fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
+		fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
+		output.WriteString("</diagnostic_summary>\n")
+	}
+
+	out := output.String()
+	slog.Info("Diagnostics", "output", fmt.Sprintf("%q", out))
+	return out
+}
+
+func writeDiagnostics(output *strings.Builder, tag string, in []string) {
+	if len(in) == 0 {
+		return
+	}
+	output.WriteString("\n<" + tag + ">\n")
+	if len(in) > 10 {
+		output.WriteString(strings.Join(in[:10], "\n"))
+		fmt.Fprintf(output, "\n... and %d more diagnostics", len(in)-10)
+	} else {
+		output.WriteString(strings.Join(in, "\n"))
+	}
+	output.WriteString("\n</" + tag + ">\n")
+}
+
+func sortDiagnostics(in []string) []string {
+	sort.Slice(in, func(i, j int) bool {
+		iIsError := strings.HasPrefix(in[i], "Error")
+		jIsError := strings.HasPrefix(in[j], "Error")
+		if iIsError != jIsError {
+			return iIsError // Errors come first
+		}
+		return in[i] < in[j] // Then alphabetically
+	})
+	return in
+}
+
+func formatDiagnostic(pth string, diagnostic protocol.Diagnostic, source string) string {
+	severity := "Info"
+	switch diagnostic.Severity {
+	case protocol.SeverityError:
+		severity = "Error"
+	case protocol.SeverityWarning:
+		severity = "Warn"
+	case protocol.SeverityHint:
+		severity = "Hint"
+	}
+
+	location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
+
+	sourceInfo := ""
+	if diagnostic.Source != "" {
+		sourceInfo = diagnostic.Source
+	} else if source != "" {
+		sourceInfo = source
+	}
+
+	codeInfo := ""
+	if diagnostic.Code != nil {
+		codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
+	}
+
+	tagsInfo := ""
+	if len(diagnostic.Tags) > 0 {
+		tags := []string{}
+		for _, tag := range diagnostic.Tags {
+			switch tag {
+			case protocol.Unnecessary:
+				tags = append(tags, "unnecessary")
+			case protocol.Deprecated:
+				tags = append(tags, "deprecated")
+			}
+		}
+		if len(tags) > 0 {
+			tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
+		}
+	}
+
+	return fmt.Sprintf("%s: %s [%s]%s%s %s",
+		severity,
+		location,
+		sourceInfo,
+		codeInfo,
+		tagsInfo,
+		diagnostic.Message)
+}
+
+func countSeverity(diagnostics []string, severity string) int {
+	count := 0
+	for _, diag := range diagnostics {
+		if strings.HasPrefix(diag, severity) {
+			count++
+		}
+	}
+	return count
+}

internal/agent/tools/diagnostics.md 🔗

@@ -0,0 +1,24 @@
+Get diagnostics for file and/or project.
+
+<usage>
+- Provide file path to get diagnostics for that file
+- Leave path empty to get diagnostics for entire project
+- Results displayed in structured format with severity levels
+</usage>
+
+<features>
+- Displays errors, warnings, and hints
+- Groups diagnostics by severity
+- Provides detailed information about each diagnostic
+</features>
+
+<limitations>
+- Results limited to diagnostics provided by LSP clients
+- May not cover all possible code issues
+- Does not provide suggestions for fixing issues
+</limitations>
+
+<tips>
+- Use with other tools for comprehensive code review
+- Combine with LSP client for real-time diagnostics
+</tips>

internal/agent/tools/download.go 🔗

@@ -0,0 +1,157 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"fmt"
+	"io"
+	"net/http"
+	"os"
+	"path/filepath"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+type DownloadParams struct {
+	URL      string `json:"url" description:"The URL to download from"`
+	FilePath string `json:"file_path" description:"The local file path where the downloaded content should be saved"`
+	Timeout  int    `json:"timeout,omitempty" description:"Optional timeout in seconds (max 600)"`
+}
+
+type DownloadPermissionsParams struct {
+	URL      string `json:"url"`
+	FilePath string `json:"file_path"`
+	Timeout  int    `json:"timeout,omitempty"`
+}
+
+const DownloadToolName = "download"
+
+//go:embed download.md
+var downloadDescription []byte
+
+func NewDownloadTool(permissions permission.Service, workingDir string) ai.AgentTool {
+	client := &http.Client{
+		Timeout: 5 * time.Minute, // Default 5 minute timeout for downloads
+		Transport: &http.Transport{
+			MaxIdleConns:        100,
+			MaxIdleConnsPerHost: 10,
+			IdleConnTimeout:     90 * time.Second,
+		},
+	}
+	return ai.NewAgentTool(
+		DownloadToolName,
+		string(downloadDescription),
+		func(ctx context.Context, params DownloadParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.URL == "" {
+				return ai.NewTextErrorResponse("URL parameter is required"), nil
+			}
+
+			if params.FilePath == "" {
+				return ai.NewTextErrorResponse("file_path parameter is required"), nil
+			}
+
+			if !strings.HasPrefix(params.URL, "http://") && !strings.HasPrefix(params.URL, "https://") {
+				return ai.NewTextErrorResponse("URL must start with http:// or https://"), nil
+			}
+
+			// Convert relative path to absolute path
+			var filePath string
+			if filepath.IsAbs(params.FilePath) {
+				filePath = params.FilePath
+			} else {
+				filePath = filepath.Join(workingDir, params.FilePath)
+			}
+
+			sessionID := GetSessionFromContext(ctx)
+			if sessionID == "" {
+				return ai.ToolResponse{}, fmt.Errorf("session ID is required for downloading files")
+			}
+
+			p := permissions.Request(
+				permission.CreatePermissionRequest{
+					SessionID:   sessionID,
+					Path:        filePath,
+					ToolName:    DownloadToolName,
+					Action:      "download",
+					Description: fmt.Sprintf("Download file from URL: %s to %s", params.URL, filePath),
+					Params:      DownloadPermissionsParams(params),
+				},
+			)
+
+			if !p {
+				return ai.ToolResponse{}, permission.ErrorPermissionDenied
+			}
+
+			// Handle timeout with context
+			requestCtx := ctx
+			if params.Timeout > 0 {
+				maxTimeout := 600 // 10 minutes
+				if params.Timeout > maxTimeout {
+					params.Timeout = maxTimeout
+				}
+				var cancel context.CancelFunc
+				requestCtx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Second)
+				defer cancel()
+			}
+
+			req, err := http.NewRequestWithContext(requestCtx, "GET", params.URL, nil)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to create request: %w", err)
+			}
+
+			req.Header.Set("User-Agent", "crush/1.0")
+
+			resp, err := client.Do(req)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to download from URL: %w", err)
+			}
+			defer resp.Body.Close()
+
+			if resp.StatusCode != http.StatusOK {
+				return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil
+			}
+
+			// Check content length if available
+			maxSize := int64(100 * 1024 * 1024) // 100MB
+			if resp.ContentLength > maxSize {
+				return ai.NewTextErrorResponse(fmt.Sprintf("File too large: %d bytes (max %d bytes)", resp.ContentLength, maxSize)), nil
+			}
+
+			// Create parent directories if they don't exist
+			if err := os.MkdirAll(filepath.Dir(filePath), 0o755); err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to create parent directories: %w", err)
+			}
+
+			// Create the output file
+			outFile, err := os.Create(filePath)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to create output file: %w", err)
+			}
+			defer outFile.Close()
+
+			// Copy data with size limit
+			limitedReader := io.LimitReader(resp.Body, maxSize)
+			bytesWritten, err := io.Copy(outFile, limitedReader)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
+			}
+
+			// Check if we hit the size limit
+			if bytesWritten == maxSize {
+				// Clean up the file since it might be incomplete
+				os.Remove(filePath)
+				return ai.NewTextErrorResponse(fmt.Sprintf("File too large: exceeded %d bytes limit", maxSize)), nil
+			}
+
+			contentType := resp.Header.Get("Content-Type")
+			responseMsg := fmt.Sprintf("Successfully downloaded %d bytes to %s", bytesWritten, filePath)
+			if contentType != "" {
+				responseMsg += fmt.Sprintf(" (Content-Type: %s)", contentType)
+			}
+
+			return ai.NewTextResponse(responseMsg), nil
+		})
+}

internal/agent/tools/download.md 🔗

@@ -0,0 +1,28 @@
+Downloads binary data from URL and saves to local file.
+
+<usage>
+- Provide URL to download from
+- Specify local file path where content should be saved
+- Optional timeout for request
+</usage>
+
+<features>
+- Downloads any file type (binary or text)
+- Auto-creates parent directories if missing
+- Handles large files efficiently with streaming
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before requests
+</features>
+
+<limitations>
+- Max file size: 100MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+- Will overwrite existing files without warning
+</limitations>
+
+<tips>
+- Use absolute paths or paths relative to working directory
+- Set appropriate timeouts for large files or slow connections
+</tips>

internal/agent/tools/edit.go 🔗

@@ -0,0 +1,449 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"fmt"
+	"log/slog"
+	"os"
+	"path/filepath"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/diff"
+	"github.com/charmbracelet/crush/internal/fsext"
+	"github.com/charmbracelet/crush/internal/history"
+	"github.com/charmbracelet/fantasy/ai"
+
+	"github.com/charmbracelet/crush/internal/lsp"
+	"github.com/charmbracelet/crush/internal/permission"
+)
+
+type EditParams struct {
+	FilePath   string `json:"file_path" description:"The absolute path to the file to modify"`
+	OldString  string `json:"old_string" description:"The text to replace"`
+	NewString  string `json:"new_string" description:"The text to replace it with"`
+	ReplaceAll bool   `json:"replace_all,omitempty" description:"Replace all occurrences of old_string (default false)"`
+}
+
+type EditPermissionsParams struct {
+	FilePath   string `json:"file_path"`
+	OldContent string `json:"old_content,omitempty"`
+	NewContent string `json:"new_content,omitempty"`
+}
+
+type EditResponseMetadata struct {
+	Additions  int    `json:"additions"`
+	Removals   int    `json:"removals"`
+	OldContent string `json:"old_content,omitempty"`
+	NewContent string `json:"new_content,omitempty"`
+}
+
+const EditToolName = "edit"
+
+//go:embed edit.md
+var editDescription []byte
+
+type editContext struct {
+	ctx         context.Context
+	permissions permission.Service
+	files       history.Service
+	workingDir  string
+}
+
+func NewEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) ai.AgentTool {
+	return ai.NewAgentTool(
+		EditToolName,
+		string(editDescription),
+		func(ctx context.Context, params EditParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.FilePath == "" {
+				return ai.NewTextErrorResponse("file_path is required"), nil
+			}
+
+			if !filepath.IsAbs(params.FilePath) {
+				params.FilePath = filepath.Join(workingDir, params.FilePath)
+			}
+
+			var response ai.ToolResponse
+			var err error
+
+			editCtx := editContext{ctx, permissions, files, workingDir}
+
+			if params.OldString == "" {
+				response, err = createNewFile(editCtx, params.FilePath, params.NewString, call)
+				if err != nil {
+					return response, err
+				}
+			}
+
+			if params.NewString == "" {
+				response, err = deleteContent(editCtx, params.FilePath, params.OldString, params.ReplaceAll, call)
+				if err != nil {
+					return response, err
+				}
+			}
+
+			response, err = replaceContent(editCtx, params.FilePath, params.OldString, params.NewString, params.ReplaceAll, call)
+			if err != nil {
+				return response, err
+			}
+			if response.IsError {
+				// Return early if there was an error during content replacement
+				// This prevents unnecessary LSP diagnostics processing
+				return response, nil
+			}
+
+			notifyLSPs(ctx, lspClients, params.FilePath)
+
+			text := fmt.Sprintf("<result>\n%s\n</result>\n", response.Content)
+			text += getDiagnostics(params.FilePath, lspClients)
+			response.Content = text
+			return response, nil
+		})
+}
+
+func createNewFile(edit editContext, filePath, content string, call ai.ToolCall) (ai.ToolResponse, error) {
+	fileInfo, err := os.Stat(filePath)
+	if err == nil {
+		if fileInfo.IsDir() {
+			return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil
+		}
+		return ai.NewTextErrorResponse(fmt.Sprintf("file already exists: %s", filePath)), nil
+	} else if !os.IsNotExist(err) {
+		return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
+	}
+
+	dir := filepath.Dir(filePath)
+	if err = os.MkdirAll(dir, 0o755); err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to create parent directories: %w", err)
+	}
+
+	sessionID := GetSessionFromContext(edit.ctx)
+	if sessionID == "" {
+		return ai.ToolResponse{}, fmt.Errorf("session ID is required for creating a new file")
+	}
+
+	_, additions, removals := diff.GenerateDiff(
+		"",
+		content,
+		strings.TrimPrefix(filePath, edit.workingDir),
+	)
+	p := edit.permissions.Request(
+		permission.CreatePermissionRequest{
+			SessionID:   sessionID,
+			Path:        fsext.PathOrPrefix(filePath, edit.workingDir),
+			ToolCallID:  call.ID,
+			ToolName:    EditToolName,
+			Action:      "write",
+			Description: fmt.Sprintf("Create file %s", filePath),
+			Params: EditPermissionsParams{
+				FilePath:   filePath,
+				OldContent: "",
+				NewContent: content,
+			},
+		},
+	)
+	if !p {
+		return ai.ToolResponse{}, permission.ErrorPermissionDenied
+	}
+
+	err = os.WriteFile(filePath, []byte(content), 0o644)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
+	}
+
+	// File can't be in the history so we create a new file history
+	_, err = edit.files.Create(edit.ctx, sessionID, filePath, "")
+	if err != nil {
+		// Log error but don't fail the operation
+		return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
+	}
+
+	// Add the new content to the file history
+	_, err = edit.files.CreateVersion(edit.ctx, sessionID, filePath, content)
+	if err != nil {
+		// Log error but don't fail the operation
+		slog.Debug("Error creating file history version", "error", err)
+	}
+
+	recordFileWrite(filePath)
+	recordFileRead(filePath)
+
+	return ai.WithResponseMetadata(
+		ai.NewTextResponse("File created: "+filePath),
+		EditResponseMetadata{
+			OldContent: "",
+			NewContent: content,
+			Additions:  additions,
+			Removals:   removals,
+		},
+	), nil
+}
+
+func deleteContent(edit editContext, filePath, oldString string, replaceAll bool, call ai.ToolCall) (ai.ToolResponse, error) {
+	fileInfo, err := os.Stat(filePath)
+	if err != nil {
+		if os.IsNotExist(err) {
+			return ai.NewTextErrorResponse(fmt.Sprintf("file not found: %s", filePath)), nil
+		}
+		return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
+	}
+
+	if fileInfo.IsDir() {
+		return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil
+	}
+
+	if getLastReadTime(filePath).IsZero() {
+		return ai.NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil
+	}
+
+	modTime := fileInfo.ModTime()
+	lastRead := getLastReadTime(filePath)
+	if modTime.After(lastRead) {
+		return ai.NewTextErrorResponse(
+			fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)",
+				filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339),
+			)), nil
+	}
+
+	content, err := os.ReadFile(filePath)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
+	}
+
+	oldContent, isCrlf := fsext.ToUnixLineEndings(string(content))
+
+	var newContent string
+	var deletionCount int
+
+	if replaceAll {
+		newContent = strings.ReplaceAll(oldContent, oldString, "")
+		deletionCount = strings.Count(oldContent, oldString)
+		if deletionCount == 0 {
+			return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil
+		}
+	} else {
+		index := strings.Index(oldContent, oldString)
+		if index == -1 {
+			return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil
+		}
+
+		lastIndex := strings.LastIndex(oldContent, oldString)
+		if index != lastIndex {
+			return ai.NewTextErrorResponse("old_string appears multiple times in the file. Please provide more context to ensure a unique match, or set replace_all to true"), nil
+		}
+
+		newContent = oldContent[:index] + oldContent[index+len(oldString):]
+		deletionCount = 1
+	}
+
+	sessionID := GetSessionFromContext(edit.ctx)
+
+	if sessionID == "" {
+		return ai.ToolResponse{}, fmt.Errorf("session ID is required for creating a new file")
+	}
+
+	_, additions, removals := diff.GenerateDiff(
+		oldContent,
+		newContent,
+		strings.TrimPrefix(filePath, edit.workingDir),
+	)
+
+	p := edit.permissions.Request(
+		permission.CreatePermissionRequest{
+			SessionID:   sessionID,
+			Path:        fsext.PathOrPrefix(filePath, edit.workingDir),
+			ToolCallID:  call.ID,
+			ToolName:    EditToolName,
+			Action:      "write",
+			Description: fmt.Sprintf("Delete content from file %s", filePath),
+			Params: EditPermissionsParams{
+				FilePath:   filePath,
+				OldContent: oldContent,
+				NewContent: newContent,
+			},
+		},
+	)
+	if !p {
+		return ai.ToolResponse{}, permission.ErrorPermissionDenied
+	}
+
+	if isCrlf {
+		newContent, _ = fsext.ToWindowsLineEndings(newContent)
+	}
+
+	err = os.WriteFile(filePath, []byte(newContent), 0o644)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
+	}
+
+	// Check if file exists in history
+	file, err := edit.files.GetByPathAndSession(edit.ctx, filePath, sessionID)
+	if err != nil {
+		_, err = edit.files.Create(edit.ctx, sessionID, filePath, oldContent)
+		if err != nil {
+			// Log error but don't fail the operation
+			return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
+		}
+	}
+	if file.Content != oldContent {
+		// User Manually changed the content store an intermediate version
+		_, err = edit.files.CreateVersion(edit.ctx, sessionID, filePath, oldContent)
+		if err != nil {
+			slog.Debug("Error creating file history version", "error", err)
+		}
+	}
+	// Store the new version
+	_, err = edit.files.CreateVersion(edit.ctx, sessionID, filePath, "")
+	if err != nil {
+		slog.Debug("Error creating file history version", "error", err)
+	}
+
+	recordFileWrite(filePath)
+	recordFileRead(filePath)
+
+	return ai.WithResponseMetadata(
+		ai.NewTextResponse("Content deleted from file: "+filePath),
+		EditResponseMetadata{
+			OldContent: oldContent,
+			NewContent: newContent,
+			Additions:  additions,
+			Removals:   removals,
+		},
+	), nil
+}
+
+func replaceContent(edit editContext, filePath, oldString, newString string, replaceAll bool, call ai.ToolCall) (ai.ToolResponse, error) {
+	fileInfo, err := os.Stat(filePath)
+	if err != nil {
+		if os.IsNotExist(err) {
+			return ai.NewTextErrorResponse(fmt.Sprintf("file not found: %s", filePath)), nil
+		}
+		return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
+	}
+
+	if fileInfo.IsDir() {
+		return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", filePath)), nil
+	}
+
+	if getLastReadTime(filePath).IsZero() {
+		return ai.NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil
+	}
+
+	modTime := fileInfo.ModTime()
+	lastRead := getLastReadTime(filePath)
+	if modTime.After(lastRead) {
+		return ai.NewTextErrorResponse(
+			fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)",
+				filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339),
+			)), nil
+	}
+
+	content, err := os.ReadFile(filePath)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
+	}
+
+	oldContent, isCrlf := fsext.ToUnixLineEndings(string(content))
+
+	var newContent string
+	var replacementCount int
+
+	if replaceAll {
+		newContent = strings.ReplaceAll(oldContent, oldString, newString)
+		replacementCount = strings.Count(oldContent, oldString)
+		if replacementCount == 0 {
+			return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil
+		}
+	} else {
+		index := strings.Index(oldContent, oldString)
+		if index == -1 {
+			return ai.NewTextErrorResponse("old_string not found in file. Make sure it matches exactly, including whitespace and line breaks"), nil
+		}
+
+		lastIndex := strings.LastIndex(oldContent, oldString)
+		if index != lastIndex {
+			return ai.NewTextErrorResponse("old_string appears multiple times in the file. Please provide more context to ensure a unique match, or set replace_all to true"), nil
+		}
+
+		newContent = oldContent[:index] + newString + oldContent[index+len(oldString):]
+		replacementCount = 1
+	}
+
+	if oldContent == newContent {
+		return ai.NewTextErrorResponse("new content is the same as old content. No changes made."), nil
+	}
+	sessionID := GetSessionFromContext(edit.ctx)
+
+	if sessionID == "" {
+		return ai.ToolResponse{}, fmt.Errorf("session ID is required for creating a new file")
+	}
+	_, additions, removals := diff.GenerateDiff(
+		oldContent,
+		newContent,
+		strings.TrimPrefix(filePath, edit.workingDir),
+	)
+
+	p := edit.permissions.Request(
+		permission.CreatePermissionRequest{
+			SessionID:   sessionID,
+			Path:        fsext.PathOrPrefix(filePath, edit.workingDir),
+			ToolCallID:  call.ID,
+			ToolName:    EditToolName,
+			Action:      "write",
+			Description: fmt.Sprintf("Replace content in file %s", filePath),
+			Params: EditPermissionsParams{
+				FilePath:   filePath,
+				OldContent: oldContent,
+				NewContent: newContent,
+			},
+		},
+	)
+	if !p {
+		return ai.ToolResponse{}, permission.ErrorPermissionDenied
+	}
+
+	if isCrlf {
+		newContent, _ = fsext.ToWindowsLineEndings(newContent)
+	}
+
+	err = os.WriteFile(filePath, []byte(newContent), 0o644)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
+	}
+
+	// Check if file exists in history
+	file, err := edit.files.GetByPathAndSession(edit.ctx, filePath, sessionID)
+	if err != nil {
+		_, err = edit.files.Create(edit.ctx, sessionID, filePath, oldContent)
+		if err != nil {
+			// Log error but don't fail the operation
+			return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
+		}
+	}
+	if file.Content != oldContent {
+		// User Manually changed the content store an intermediate version
+		_, err = edit.files.CreateVersion(edit.ctx, sessionID, filePath, oldContent)
+		if err != nil {
+			slog.Debug("Error creating file history version", "error", err)
+		}
+	}
+	// Store the new version
+	_, err = edit.files.CreateVersion(edit.ctx, sessionID, filePath, newContent)
+	if err != nil {
+		slog.Debug("Error creating file history version", "error", err)
+	}
+
+	recordFileWrite(filePath)
+	recordFileRead(filePath)
+
+	return ai.WithResponseMetadata(
+		ai.NewTextResponse("Content replaced in file: "+filePath),
+		EditResponseMetadata{
+			OldContent: oldContent,
+			NewContent: newContent,
+			Additions:  additions,
+			Removals:   removals,
+		}), nil
+}

internal/agent/tools/edit.md 🔗

@@ -0,0 +1,60 @@
+Edits files by replacing text, creating new files, or deleting content. For moving/renaming use Bash 'mv'. For large edits use Write tool.
+
+<prerequisites>
+1. Use View tool to understand file contents and context
+2. For new files: Use LS tool to verify parent directory exists
+</prerequisites>
+
+<parameters>
+1. file_path: Absolute path to file (required)
+2. old_string: Text to replace (must match exactly including whitespace/indentation)
+3. new_string: Replacement text
+4. replace_all: Replace all occurrences (default false)
+</parameters>
+
+<special_cases>
+
+- Create file: provide file_path + new_string, leave old_string empty
+- Delete content: provide file_path + old_string, leave new_string empty
+  </special_cases>
+
+<critical_requirements>
+UNIQUENESS (when replace_all=false): old_string MUST uniquely identify target instance
+
+- Include 3-5 lines context BEFORE and AFTER change point
+- Include exact whitespace, indentation, surrounding code
+
+SINGLE INSTANCE: Tool changes ONE instance when replace_all=false
+
+- For multiple instances: set replace_all=true OR make separate calls with unique context
+- Plan calls carefully to avoid conflicts
+
+VERIFICATION: Before using
+
+- Check how many instances of target text exist
+- Gather sufficient context for unique identification
+- Plan separate calls or use replace_all
+  </critical_requirements>
+
+<warnings>
+Tool fails if:
+- old_string matches multiple locations and replace_all=false
+- old_string doesn't match exactly (including whitespace)
+- Insufficient context causes wrong instance change
+</warnings>
+
+<best_practices>
+
+- Ensure edits result in correct, idiomatic code
+- Don't leave code in broken state
+- Use absolute file paths (starting with /)
+- Use forward slashes (/) for cross-platform compatibility
+- Multiple edits to same file: send all in single message with multiple tool calls
+  </best_practices>
+
+<windows_notes>
+
+- Forward slashes work throughout (C:/path/file)
+- File permissions handled automatically
+- Line endings converted automatically (\n ↔ \r\n)
+  </windows_notes>

internal/agent/tools/fetch.go 🔗

@@ -0,0 +1,203 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"fmt"
+	"io"
+	"net/http"
+	"strings"
+	"time"
+	"unicode/utf8"
+
+	md "github.com/JohannesKaufmann/html-to-markdown"
+	"github.com/PuerkitoBio/goquery"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+type FetchParams struct {
+	URL     string `json:"url" description:"The URL to fetch content from"`
+	Format  string `json:"format" description:"The format to return the content in (text, markdown, or html)"`
+	Timeout int    `json:"timeout,omitempty" description:"Optional timeout in seconds (max 120)"`
+}
+
+type FetchPermissionsParams struct {
+	URL     string `json:"url"`
+	Format  string `json:"format"`
+	Timeout int    `json:"timeout,omitempty"`
+}
+
+type fetchTool struct {
+	client      *http.Client
+	permissions permission.Service
+	workingDir  string
+}
+
+const FetchToolName = "fetch"
+
+//go:embed fetch.md
+var fetchDescription []byte
+
+func NewFetchTool(permissions permission.Service, workingDir string) ai.AgentTool {
+	client := &http.Client{
+		Timeout: 30 * time.Second,
+		Transport: &http.Transport{
+			MaxIdleConns:        100,
+			MaxIdleConnsPerHost: 10,
+			IdleConnTimeout:     90 * time.Second,
+		},
+	}
+
+	return ai.NewAgentTool(
+		FetchToolName,
+		string(fetchDescription),
+		func(ctx context.Context, params FetchParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.URL == "" {
+				return ai.NewTextErrorResponse("URL parameter is required"), nil
+			}
+
+			format := strings.ToLower(params.Format)
+			if format != "text" && format != "markdown" && format != "html" {
+				return ai.NewTextErrorResponse("Format must be one of: text, markdown, html"), nil
+			}
+
+			if !strings.HasPrefix(params.URL, "http://") && !strings.HasPrefix(params.URL, "https://") {
+				return ai.NewTextErrorResponse("URL must start with http:// or https://"), nil
+			}
+
+			sessionID := GetSessionFromContext(ctx)
+			if sessionID == "" {
+				return ai.ToolResponse{}, fmt.Errorf("session ID is required for creating a new file")
+			}
+
+			p := permissions.Request(
+				permission.CreatePermissionRequest{
+					SessionID:   sessionID,
+					Path:        workingDir,
+					ToolCallID:  call.ID,
+					ToolName:    FetchToolName,
+					Action:      "fetch",
+					Description: fmt.Sprintf("Fetch content from URL: %s", params.URL),
+					Params:      FetchPermissionsParams(params),
+				},
+			)
+
+			if !p {
+				return ai.ToolResponse{}, permission.ErrorPermissionDenied
+			}
+
+			// Handle timeout with context
+			requestCtx := ctx
+			if params.Timeout > 0 {
+				maxTimeout := 120 // 2 minutes
+				if params.Timeout > maxTimeout {
+					params.Timeout = maxTimeout
+				}
+				var cancel context.CancelFunc
+				requestCtx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Second)
+				defer cancel()
+			}
+
+			req, err := http.NewRequestWithContext(requestCtx, "GET", params.URL, nil)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to create request: %w", err)
+			}
+
+			req.Header.Set("User-Agent", "crush/1.0")
+
+			resp, err := client.Do(req)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to fetch URL: %w", err)
+			}
+			defer resp.Body.Close()
+
+			if resp.StatusCode != http.StatusOK {
+				return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil
+			}
+
+			maxSize := int64(5 * 1024 * 1024) // 5MB
+			body, err := io.ReadAll(io.LimitReader(resp.Body, maxSize))
+			if err != nil {
+				return ai.NewTextErrorResponse("Failed to read response body: " + err.Error()), nil
+			}
+
+			content := string(body)
+
+			isValidUt8 := utf8.ValidString(content)
+			if !isValidUt8 {
+				return ai.NewTextErrorResponse("Response content is not valid UTF-8"), nil
+			}
+			contentType := resp.Header.Get("Content-Type")
+
+			switch format {
+			case "text":
+				if strings.Contains(contentType, "text/html") {
+					text, err := extractTextFromHTML(content)
+					if err != nil {
+						return ai.NewTextErrorResponse("Failed to extract text from HTML: " + err.Error()), nil
+					}
+					content = text
+				}
+
+			case "markdown":
+				if strings.Contains(contentType, "text/html") {
+					markdown, err := convertHTMLToMarkdown(content)
+					if err != nil {
+						return ai.NewTextErrorResponse("Failed to convert HTML to Markdown: " + err.Error()), nil
+					}
+					content = markdown
+				}
+
+				content = "```\n" + content + "\n```"
+
+			case "html":
+				// return only the body of the HTML document
+				if strings.Contains(contentType, "text/html") {
+					doc, err := goquery.NewDocumentFromReader(strings.NewReader(content))
+					if err != nil {
+						return ai.NewTextErrorResponse("Failed to parse HTML: " + err.Error()), nil
+					}
+					body, err := doc.Find("body").Html()
+					if err != nil {
+						return ai.NewTextErrorResponse("Failed to extract body from HTML: " + err.Error()), nil
+					}
+					if body == "" {
+						return ai.NewTextErrorResponse("No body content found in HTML"), nil
+					}
+					content = "<html>\n<body>\n" + body + "\n</body>\n</html>"
+				}
+			}
+			// calculate byte size of content
+			contentSize := int64(len(content))
+			if contentSize > MaxReadSize {
+				content = content[:MaxReadSize]
+				content += fmt.Sprintf("\n\n[Content truncated to %d bytes]", MaxReadSize)
+			}
+
+			return ai.NewTextResponse(content), nil
+		})
+}
+
+func extractTextFromHTML(html string) (string, error) {
+	doc, err := goquery.NewDocumentFromReader(strings.NewReader(html))
+	if err != nil {
+		return "", err
+	}
+
+	text := doc.Find("body").Text()
+	text = strings.Join(strings.Fields(text), " ")
+
+	return text, nil
+}
+
+func convertHTMLToMarkdown(html string) (string, error) {
+	converter := md.NewConverter("", true, nil)
+
+	markdown, err := converter.ConvertString(html)
+	if err != nil {
+		return "", err
+	}
+
+	return markdown, nil
+}

internal/agent/tools/fetch.md 🔗

@@ -0,0 +1,28 @@
+Fetches content from URL and returns it in specified format.
+
+<usage>
+- Provide URL to fetch content from
+- Specify desired output format (text, markdown, or html)
+- Optional timeout for request
+</usage>
+
+<features>
+- Supports three output formats: text, markdown, html
+- Auto-handles HTTP redirects
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before requests
+</features>
+
+<limitations>
+- Max response size: 5MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+</limitations>
+
+<tips>
+- Use text format for plain text content or simple API responses
+- Use markdown format for content that should be rendered with formatting
+- Use html format when you need raw HTML structure
+- Set appropriate timeouts for potentially slow websites
+</tips>

internal/agent/tools/file.go 🔗

@@ -0,0 +1,53 @@
+package tools
+
+import (
+	"sync"
+	"time"
+)
+
+// File record to track when files were read/written
+type fileRecord struct {
+	path      string
+	readTime  time.Time
+	writeTime time.Time
+}
+
+var (
+	fileRecords     = make(map[string]fileRecord)
+	fileRecordMutex sync.RWMutex
+)
+
+func recordFileRead(path string) {
+	fileRecordMutex.Lock()
+	defer fileRecordMutex.Unlock()
+
+	record, exists := fileRecords[path]
+	if !exists {
+		record = fileRecord{path: path}
+	}
+	record.readTime = time.Now()
+	fileRecords[path] = record
+}
+
+func getLastReadTime(path string) time.Time {
+	fileRecordMutex.RLock()
+	defer fileRecordMutex.RUnlock()
+
+	record, exists := fileRecords[path]
+	if !exists {
+		return time.Time{}
+	}
+	return record.readTime
+}
+
+func recordFileWrite(path string) {
+	fileRecordMutex.Lock()
+	defer fileRecordMutex.Unlock()
+
+	record, exists := fileRecords[path]
+	if !exists {
+		record = fileRecord{path: path}
+	}
+	record.writeTime = time.Now()
+	fileRecords[path] = record
+}

internal/agent/tools/glob.go 🔗

@@ -0,0 +1,118 @@
+package tools
+
+import (
+	"bytes"
+	"context"
+	_ "embed"
+	"fmt"
+	"log/slog"
+	"os/exec"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"github.com/charmbracelet/crush/internal/fsext"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+const GlobToolName = "glob"
+
+//go:embed glob.md
+var globDescription []byte
+
+type GlobParams struct {
+	Pattern string `json:"pattern" description:"The glob pattern to match files against"`
+	Path    string `json:"path" description:"The directory to search in. Defaults to the current working directory."`
+}
+
+type GlobResponseMetadata struct {
+	NumberOfFiles int  `json:"number_of_files"`
+	Truncated     bool `json:"truncated"`
+}
+
+func NewGlobTool(workingDir string) ai.AgentTool {
+	return ai.NewAgentTool(
+		GlobToolName,
+		string(globDescription),
+		func(ctx context.Context, params GlobParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.Pattern == "" {
+				return ai.NewTextErrorResponse("pattern is required"), nil
+			}
+
+			searchPath := params.Path
+			if searchPath == "" {
+				searchPath = workingDir
+			}
+
+			files, truncated, err := globFiles(ctx, params.Pattern, searchPath, 100)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error finding files: %w", err)
+			}
+
+			var output string
+			if len(files) == 0 {
+				output = "No files found"
+			} else {
+				output = strings.Join(files, "\n")
+				if truncated {
+					output += "\n\n(Results are truncated. Consider using a more specific path or pattern.)"
+				}
+			}
+
+			return ai.WithResponseMetadata(
+				ai.NewTextResponse(output),
+				GlobResponseMetadata{
+					NumberOfFiles: len(files),
+					Truncated:     truncated,
+				},
+			), nil
+		})
+}
+
+func globFiles(ctx context.Context, pattern, searchPath string, limit int) ([]string, bool, error) {
+	cmdRg := getRgCmd(ctx, pattern)
+	if cmdRg != nil {
+		cmdRg.Dir = searchPath
+		matches, err := runRipgrep(cmdRg, searchPath, limit)
+		if err == nil {
+			return matches, len(matches) >= limit && limit > 0, nil
+		}
+		slog.Warn("Ripgrep execution failed, falling back to doublestar", "error", err)
+	}
+
+	return fsext.GlobWithDoubleStar(pattern, searchPath, limit)
+}
+
+func runRipgrep(cmd *exec.Cmd, searchRoot string, limit int) ([]string, error) {
+	out, err := cmd.CombinedOutput()
+	if err != nil {
+		if ee, ok := err.(*exec.ExitError); ok && ee.ExitCode() == 1 {
+			return nil, nil
+		}
+		return nil, fmt.Errorf("ripgrep: %w\n%s", err, out)
+	}
+
+	var matches []string
+	for p := range bytes.SplitSeq(out, []byte{0}) {
+		if len(p) == 0 {
+			continue
+		}
+		absPath := string(p)
+		if !filepath.IsAbs(absPath) {
+			absPath = filepath.Join(searchRoot, absPath)
+		}
+		if fsext.SkipHidden(absPath) {
+			continue
+		}
+		matches = append(matches, absPath)
+	}
+
+	sort.SliceStable(matches, func(i, j int) bool {
+		return len(matches[i]) < len(matches[j])
+	})
+
+	if limit > 0 && len(matches) > limit {
+		matches = matches[:limit]
+	}
+	return matches, nil
+}

internal/agent/tools/glob.md 🔗

@@ -0,0 +1,42 @@
+Fast file pattern matching tool that finds files by name/pattern, returning paths sorted by modification time (newest first).
+
+<usage>
+- Provide glob pattern to match against file paths
+- Optional starting directory (defaults to current working directory)
+- Results sorted with most recently modified files first
+</usage>
+
+<pattern_syntax>
+
+- '\*' matches any sequence of non-separator characters
+- '\*\*' matches any sequence including separators
+- '?' matches any single non-separator character
+- '[...]' matches any character in brackets
+- '[!...]' matches any character not in brackets
+  </pattern_syntax>
+
+<examples>
+- '*.js' - JavaScript files in current directory
+- '**/*.js' - JavaScript files in any subdirectory
+- 'src/**/*.{ts,tsx}' - TypeScript files in src directory
+- '*.{html,css,js}' - HTML, CSS, and JS files
+</examples>
+
+<limitations>
+- Results limited to 100 files (newest first)
+- Does not search file contents (use Grep for that)
+- Hidden files (starting with '.') skipped
+</limitations>
+
+<cross_platform>
+
+- Path separators handled automatically (/ and \ work)
+- Uses ripgrep (rg) if available, otherwise Go implementation
+- Patterns should use forward slashes (/) for compatibility
+  </cross_platform>
+
+<tips>
+- Combine with Grep: find files with Glob, search contents with Grep
+- For iterative exploration requiring multiple searches, consider Agent tool
+- Check if results truncated and refine pattern if needed
+</tips>

internal/agent/tools/grep.go 🔗

@@ -0,0 +1,430 @@
+package tools
+
+import (
+	"bufio"
+	"context"
+	_ "embed"
+	"fmt"
+	"io"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"regexp"
+	"sort"
+	"strconv"
+	"strings"
+	"sync"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/fsext"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+// regexCache provides thread-safe caching of compiled regex patterns
+type regexCache struct {
+	cache map[string]*regexp.Regexp
+	mu    sync.RWMutex
+}
+
+// newRegexCache creates a new regex cache
+func newRegexCache() *regexCache {
+	return &regexCache{
+		cache: make(map[string]*regexp.Regexp),
+	}
+}
+
+// get retrieves a compiled regex from cache or compiles and caches it
+func (rc *regexCache) get(pattern string) (*regexp.Regexp, error) {
+	// Try to get from cache first (read lock)
+	rc.mu.RLock()
+	if regex, exists := rc.cache[pattern]; exists {
+		rc.mu.RUnlock()
+		return regex, nil
+	}
+	rc.mu.RUnlock()
+
+	// Compile the regex (write lock)
+	rc.mu.Lock()
+	defer rc.mu.Unlock()
+
+	// Double-check in case another goroutine compiled it while we waited
+	if regex, exists := rc.cache[pattern]; exists {
+		return regex, nil
+	}
+
+	// Compile and cache the regex
+	regex, err := regexp.Compile(pattern)
+	if err != nil {
+		return nil, err
+	}
+
+	rc.cache[pattern] = regex
+	return regex, nil
+}
+
+// Global regex cache instances
+var (
+	searchRegexCache = newRegexCache()
+	globRegexCache   = newRegexCache()
+	// Pre-compiled regex for glob conversion (used frequently)
+	globBraceRegex = regexp.MustCompile(`\{([^}]+)\}`)
+)
+
+type GrepParams struct {
+	Pattern     string `json:"pattern" description:"The regex pattern to search for in file contents"`
+	Path        string `json:"path" description:"The directory to search in. Defaults to the current working directory."`
+	Include     string `json:"include" description:"File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")"`
+	LiteralText bool   `json:"literal_text" description:"If true, the pattern will be treated as literal text with special regex characters escaped. Default is false."`
+}
+
+type grepMatch struct {
+	path     string
+	modTime  time.Time
+	lineNum  int
+	lineText string
+}
+
+type GrepResponseMetadata struct {
+	NumberOfMatches int  `json:"number_of_matches"`
+	Truncated       bool `json:"truncated"`
+}
+
+const GrepToolName = "grep"
+
+//go:embed grep.md
+var grepDescription []byte
+
+func NewGrepTool(workingDir string) ai.AgentTool {
+	return ai.NewAgentTool(
+		GrepToolName,
+		string(grepDescription),
+		func(ctx context.Context, params GrepParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.Pattern == "" {
+				return ai.NewTextErrorResponse("pattern is required"), nil
+			}
+
+			// If literal_text is true, escape the pattern
+			searchPattern := params.Pattern
+			if params.LiteralText {
+				searchPattern = escapeRegexPattern(params.Pattern)
+			}
+
+			searchPath := params.Path
+			if searchPath == "" {
+				searchPath = workingDir
+			}
+
+			matches, truncated, err := searchFiles(ctx, searchPattern, searchPath, params.Include, 100)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error searching files: %w", err)
+			}
+
+			var output strings.Builder
+			if len(matches) == 0 {
+				output.WriteString("No files found")
+			} else {
+				fmt.Fprintf(&output, "Found %d matches\n", len(matches))
+
+				currentFile := ""
+				for _, match := range matches {
+					if currentFile != match.path {
+						if currentFile != "" {
+							output.WriteString("\n")
+						}
+						currentFile = match.path
+						fmt.Fprintf(&output, "%s:\n", match.path)
+					}
+					if match.lineNum > 0 {
+						fmt.Fprintf(&output, "  Line %d: %s\n", match.lineNum, match.lineText)
+					} else {
+						fmt.Fprintf(&output, "  %s\n", match.path)
+					}
+				}
+
+				if truncated {
+					output.WriteString("\n(Results are truncated. Consider using a more specific path or pattern.)")
+				}
+			}
+
+			return ai.WithResponseMetadata(
+				ai.NewTextResponse(output.String()),
+				GrepResponseMetadata{
+					NumberOfMatches: len(matches),
+					Truncated:       truncated,
+				},
+			), nil
+		})
+}
+
+// escapeRegexPattern escapes special regex characters so they're treated as literal characters
+func escapeRegexPattern(pattern string) string {
+	specialChars := []string{"\\", ".", "+", "*", "?", "(", ")", "[", "]", "{", "}", "^", "$", "|"}
+	escaped := pattern
+
+	for _, char := range specialChars {
+		escaped = strings.ReplaceAll(escaped, char, "\\"+char)
+	}
+
+	return escaped
+}
+
+func searchFiles(ctx context.Context, pattern, rootPath, include string, limit int) ([]grepMatch, bool, error) {
+	matches, err := searchWithRipgrep(ctx, pattern, rootPath, include)
+	if err != nil {
+		matches, err = searchFilesWithRegex(pattern, rootPath, include)
+		if err != nil {
+			return nil, false, err
+		}
+	}
+
+	sort.Slice(matches, func(i, j int) bool {
+		return matches[i].modTime.After(matches[j].modTime)
+	})
+
+	truncated := len(matches) > limit
+	if truncated {
+		matches = matches[:limit]
+	}
+
+	return matches, truncated, nil
+}
+
+func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]grepMatch, error) {
+	cmd := getRgSearchCmd(ctx, pattern, path, include)
+	if cmd == nil {
+		return nil, fmt.Errorf("ripgrep not found in $PATH")
+	}
+
+	// Only add ignore files if they exist
+	for _, ignoreFile := range []string{".gitignore", ".crushignore"} {
+		ignorePath := filepath.Join(path, ignoreFile)
+		if _, err := os.Stat(ignorePath); err == nil {
+			cmd.Args = append(cmd.Args, "--ignore-file", ignorePath)
+		}
+	}
+
+	output, err := cmd.Output()
+	if err != nil {
+		if exitErr, ok := err.(*exec.ExitError); ok && exitErr.ExitCode() == 1 {
+			return []grepMatch{}, nil
+		}
+		return nil, err
+	}
+
+	lines := strings.Split(strings.TrimSpace(string(output)), "\n")
+	matches := make([]grepMatch, 0, len(lines))
+
+	for _, line := range lines {
+		if line == "" {
+			continue
+		}
+
+		// Parse ripgrep output using null separation
+		filePath, lineNumStr, lineText, ok := parseRipgrepLine(line)
+		if !ok {
+			continue
+		}
+
+		lineNum, err := strconv.Atoi(lineNumStr)
+		if err != nil {
+			continue
+		}
+
+		fileInfo, err := os.Stat(filePath)
+		if err != nil {
+			continue // Skip files we can't access
+		}
+
+		matches = append(matches, grepMatch{
+			path:     filePath,
+			modTime:  fileInfo.ModTime(),
+			lineNum:  lineNum,
+			lineText: lineText,
+		})
+	}
+
+	return matches, nil
+}
+
+// parseRipgrepLine parses ripgrep output with null separation to handle Windows paths
+func parseRipgrepLine(line string) (filePath, lineNum, lineText string, ok bool) {
+	// Split on null byte first to separate filename from rest
+	parts := strings.SplitN(line, "\x00", 2)
+	if len(parts) != 2 {
+		return "", "", "", false
+	}
+
+	filePath = parts[0]
+	remainder := parts[1]
+
+	// Now split the remainder on first colon: "linenum:content"
+	colonIndex := strings.Index(remainder, ":")
+	if colonIndex == -1 {
+		return "", "", "", false
+	}
+
+	lineNumStr := remainder[:colonIndex]
+	lineText = remainder[colonIndex+1:]
+
+	if _, err := strconv.Atoi(lineNumStr); err != nil {
+		return "", "", "", false
+	}
+
+	return filePath, lineNumStr, lineText, true
+}
+
+func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
+	matches := []grepMatch{}
+
+	// Use cached regex compilation
+	regex, err := searchRegexCache.get(pattern)
+	if err != nil {
+		return nil, fmt.Errorf("invalid regex pattern: %w", err)
+	}
+
+	var includePattern *regexp.Regexp
+	if include != "" {
+		regexPattern := globToRegex(include)
+		includePattern, err = globRegexCache.get(regexPattern)
+		if err != nil {
+			return nil, fmt.Errorf("invalid include pattern: %w", err)
+		}
+	}
+
+	// Create walker with gitignore and crushignore support
+	walker := fsext.NewFastGlobWalker(rootPath)
+
+	err = filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
+		if err != nil {
+			return nil // Skip errors
+		}
+
+		if info.IsDir() {
+			// Check if directory should be skipped
+			if walker.ShouldSkip(path) {
+				return filepath.SkipDir
+			}
+			return nil // Continue into directory
+		}
+
+		// Use walker's shouldSkip method for files
+		if walker.ShouldSkip(path) {
+			return nil
+		}
+
+		// Skip hidden files (starting with a dot) to match ripgrep's default behavior
+		base := filepath.Base(path)
+		if base != "." && strings.HasPrefix(base, ".") {
+			return nil
+		}
+
+		if includePattern != nil && !includePattern.MatchString(path) {
+			return nil
+		}
+
+		match, lineNum, lineText, err := fileContainsPattern(path, regex)
+		if err != nil {
+			return nil // Skip files we can't read
+		}
+
+		if match {
+			matches = append(matches, grepMatch{
+				path:     path,
+				modTime:  info.ModTime(),
+				lineNum:  lineNum,
+				lineText: lineText,
+			})
+
+			if len(matches) >= 200 {
+				return filepath.SkipAll
+			}
+		}
+
+		return nil
+	})
+	if err != nil {
+		return nil, err
+	}
+
+	return matches, nil
+}
+
+func fileContainsPattern(filePath string, pattern *regexp.Regexp) (bool, int, string, error) {
+	// Quick binary file detection
+	if isBinaryFile(filePath) {
+		return false, 0, "", nil
+	}
+
+	file, err := os.Open(filePath)
+	if err != nil {
+		return false, 0, "", err
+	}
+	defer file.Close()
+
+	scanner := bufio.NewScanner(file)
+	lineNum := 0
+	for scanner.Scan() {
+		lineNum++
+		line := scanner.Text()
+		if pattern.MatchString(line) {
+			return true, lineNum, line, nil
+		}
+	}
+
+	return false, 0, "", scanner.Err()
+}
+
+var binaryExts = map[string]struct{}{
+	".exe": {}, ".dll": {}, ".so": {}, ".dylib": {},
+	".bin": {}, ".obj": {}, ".o": {}, ".a": {},
+	".zip": {}, ".tar": {}, ".gz": {}, ".bz2": {},
+	".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {},
+	".pdf": {}, ".doc": {}, ".docx": {}, ".xls": {},
+	".mp3": {}, ".mp4": {}, ".avi": {}, ".mov": {},
+}
+
+// isBinaryFile performs a quick check to determine if a file is binary
+func isBinaryFile(filePath string) bool {
+	// Check file extension first (fastest)
+	ext := strings.ToLower(filepath.Ext(filePath))
+	if _, isBinary := binaryExts[ext]; isBinary {
+		return true
+	}
+
+	// Quick content check for files without clear extensions
+	file, err := os.Open(filePath)
+	if err != nil {
+		return false // If we can't open it, let the caller handle the error
+	}
+	defer file.Close()
+
+	// Read first 512 bytes to check for null bytes
+	buffer := make([]byte, 512)
+	n, err := file.Read(buffer)
+	if err != nil && err != io.EOF {
+		return false
+	}
+
+	// Check for null bytes (common in binary files)
+	for i := range n {
+		if buffer[i] == 0 {
+			return true
+		}
+	}
+
+	return false
+}
+
+func globToRegex(glob string) string {
+	regexPattern := strings.ReplaceAll(glob, ".", "\\.")
+	regexPattern = strings.ReplaceAll(regexPattern, "*", ".*")
+	regexPattern = strings.ReplaceAll(regexPattern, "?", ".")
+
+	// Use pre-compiled regex instead of compiling each time
+	regexPattern = globBraceRegex.ReplaceAllStringFunc(regexPattern, func(match string) string {
+		inner := match[1 : len(match)-1]
+		return "(" + strings.ReplaceAll(inner, ",", "|") + ")"
+	})
+
+	return regexPattern
+}

internal/agent/tools/grep.md 🔗

@@ -0,0 +1,52 @@
+Fast content search tool that finds files containing specific text/patterns, returning matching paths sorted by modification time (newest first).
+
+<usage>
+- Provide regex pattern to search within file contents
+- Set literal_text=true for exact text with special characters (recommended for non-regex users)
+- Optional starting directory (defaults to current working directory)
+- Optional include pattern to filter which files to search
+- Results sorted with most recently modified files first
+</usage>
+
+<regex_syntax>
+When literal_text=false (supports standard regex):
+
+- 'function' searches for literal text "function"
+- 'log\..\*Error' finds text starting with "log." and ending with "Error"
+- 'import\s+.\*\s+from' finds import statements in JavaScript/TypeScript
+  </regex_syntax>
+
+<include_patterns>
+
+- '\*.js' - Only search JavaScript files
+- '\*.{ts,tsx}' - Only search TypeScript files
+- '\*.go' - Only search Go files
+  </include_patterns>
+
+<limitations>
+- Results limited to 100 files (newest first)
+- Performance depends on number of files searched
+- Very large binary files may be skipped
+- Hidden files (starting with '.') skipped
+</limitations>
+
+<ignore_support>
+
+- Respects .gitignore patterns to skip ignored files/directories
+- Respects .crushignore patterns for additional ignore rules
+- Both ignore files auto-detected in search root directory
+  </ignore_support>
+
+<cross_platform>
+
+- Uses ripgrep (rg) if available for better performance
+- Falls back to Go implementation if ripgrep unavailable
+- File paths normalized automatically for compatibility
+  </cross_platform>
+
+<tips>
+- For faster searches: use Glob to find relevant files first, then Grep
+- For iterative exploration requiring multiple searches, consider Agent tool
+- Check if results truncated and refine search pattern if needed
+- Use literal_text=true for exact text with special characters (dots, parentheses, etc.)
+</tips>

internal/agent/tools/grep_test.go 🔗

@@ -0,0 +1,200 @@
+package tools
+
+import (
+	"os"
+	"path/filepath"
+	"regexp"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestRegexCache(t *testing.T) {
+	cache := newRegexCache()
+
+	// Test basic caching
+	pattern := "test.*pattern"
+	regex1, err := cache.get(pattern)
+	if err != nil {
+		t.Fatalf("Failed to compile regex: %v", err)
+	}
+
+	regex2, err := cache.get(pattern)
+	if err != nil {
+		t.Fatalf("Failed to get cached regex: %v", err)
+	}
+
+	// Should be the same instance (cached)
+	if regex1 != regex2 {
+		t.Error("Expected cached regex to be the same instance")
+	}
+
+	// Test that it actually works
+	if !regex1.MatchString("test123pattern") {
+		t.Error("Regex should match test string")
+	}
+}
+
+func TestGlobToRegexCaching(t *testing.T) {
+	// Test that globToRegex uses pre-compiled regex
+	pattern1 := globToRegex("*.{js,ts}")
+
+	// Should not panic and should work correctly
+	regex1, err := regexp.Compile(pattern1)
+	if err != nil {
+		t.Fatalf("Failed to compile glob regex: %v", err)
+	}
+
+	if !regex1.MatchString("test.js") {
+		t.Error("Glob regex should match .js files")
+	}
+	if !regex1.MatchString("test.ts") {
+		t.Error("Glob regex should match .ts files")
+	}
+	if regex1.MatchString("test.go") {
+		t.Error("Glob regex should not match .go files")
+	}
+}
+
+func TestGrepWithIgnoreFiles(t *testing.T) {
+	t.Parallel()
+	tempDir := t.TempDir()
+
+	// Create test files
+	testFiles := map[string]string{
+		"file1.txt":           "hello world",
+		"file2.txt":           "hello world",
+		"ignored/file3.txt":   "hello world",
+		"node_modules/lib.js": "hello world",
+		"secret.key":          "hello world",
+	}
+
+	for path, content := range testFiles {
+		fullPath := filepath.Join(tempDir, path)
+		require.NoError(t, os.MkdirAll(filepath.Dir(fullPath), 0o755))
+		require.NoError(t, os.WriteFile(fullPath, []byte(content), 0o644))
+	}
+
+	// Create .gitignore file
+	gitignoreContent := "ignored/\n*.key\n"
+	require.NoError(t, os.WriteFile(filepath.Join(tempDir, ".gitignore"), []byte(gitignoreContent), 0o644))
+
+	// Create .crushignore file
+	crushignoreContent := "node_modules/\n"
+	require.NoError(t, os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644))
+
+	// Test both implementations
+	for name, fn := range map[string]func(pattern, path, include string) ([]grepMatch, error){
+		"regex": searchFilesWithRegex,
+		"rg": func(pattern, path, include string) ([]grepMatch, error) {
+			return searchWithRipgrep(t.Context(), pattern, path, include)
+		},
+	} {
+		t.Run(name, func(t *testing.T) {
+			t.Parallel()
+
+			if name == "rg" && getRg() == "" {
+				t.Skip("rg is not in $PATH")
+			}
+
+			matches, err := fn("hello world", tempDir, "")
+			require.NoError(t, err)
+
+			// Convert matches to a set of file paths for easier testing
+			foundFiles := make(map[string]bool)
+			for _, match := range matches {
+				foundFiles[filepath.Base(match.path)] = true
+			}
+
+			// Should find file1.txt and file2.txt
+			require.True(t, foundFiles["file1.txt"], "Should find file1.txt")
+			require.True(t, foundFiles["file2.txt"], "Should find file2.txt")
+
+			// Should NOT find ignored files
+			require.False(t, foundFiles["file3.txt"], "Should not find file3.txt (ignored by .gitignore)")
+			require.False(t, foundFiles["lib.js"], "Should not find lib.js (ignored by .crushignore)")
+			require.False(t, foundFiles["secret.key"], "Should not find secret.key (ignored by .gitignore)")
+
+			// Should find exactly 2 matches
+			require.Equal(t, 2, len(matches), "Should find exactly 2 matches")
+		})
+	}
+}
+
+func TestSearchImplementations(t *testing.T) {
+	t.Parallel()
+	tempDir := t.TempDir()
+
+	for path, content := range map[string]string{
+		"file1.go":         "package main\nfunc main() {\n\tfmt.Println(\"hello world\")\n}",
+		"file2.js":         "console.log('hello world');",
+		"file3.txt":        "hello world from text file",
+		"binary.exe":       "\x00\x01\x02\x03",
+		"empty.txt":        "",
+		"subdir/nested.go": "package nested\n// hello world comment",
+		".hidden.txt":      "hello world in hidden file",
+		"file4.txt":        "hello world from a banana",
+		"file5.txt":        "hello world from a grape",
+	} {
+		fullPath := filepath.Join(tempDir, path)
+		require.NoError(t, os.MkdirAll(filepath.Dir(fullPath), 0o755))
+		require.NoError(t, os.WriteFile(fullPath, []byte(content), 0o644))
+	}
+
+	require.NoError(t, os.WriteFile(filepath.Join(tempDir, ".gitignore"), []byte("file4.txt\n"), 0o644))
+	require.NoError(t, os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte("file5.txt\n"), 0o644))
+
+	for name, fn := range map[string]func(pattern, path, include string) ([]grepMatch, error){
+		"regex": searchFilesWithRegex,
+		"rg": func(pattern, path, include string) ([]grepMatch, error) {
+			return searchWithRipgrep(t.Context(), pattern, path, include)
+		},
+	} {
+		t.Run(name, func(t *testing.T) {
+			t.Parallel()
+
+			if name == "rg" && getRg() == "" {
+				t.Skip("rg is not in $PATH")
+			}
+
+			matches, err := fn("hello world", tempDir, "")
+			require.NoError(t, err)
+
+			require.Equal(t, len(matches), 4)
+			for _, match := range matches {
+				require.NotEmpty(t, match.path)
+				require.NotZero(t, match.lineNum)
+				require.NotEmpty(t, match.lineText)
+				require.NotZero(t, match.modTime)
+				require.NotContains(t, match.path, ".hidden.txt")
+				require.NotContains(t, match.path, "file4.txt")
+				require.NotContains(t, match.path, "file5.txt")
+				require.NotContains(t, match.path, "binary.exe")
+			}
+		})
+	}
+}
+
+// Benchmark to show performance improvement
+func BenchmarkRegexCacheVsCompile(b *testing.B) {
+	cache := newRegexCache()
+	pattern := "test.*pattern.*[0-9]+"
+
+	b.Run("WithCache", func(b *testing.B) {
+		for b.Loop() {
+			_, err := cache.get(pattern)
+			if err != nil {
+				b.Fatal(err)
+			}
+		}
+	})
+
+	b.Run("WithoutCache", func(b *testing.B) {
+		for b.Loop() {
+			_, err := regexp.Compile(pattern)
+			if err != nil {
+				b.Fatal(err)
+			}
+		}
+	})
+}

internal/agent/tools/ls.go 🔗

@@ -0,0 +1,239 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"github.com/charmbracelet/crush/internal/fsext"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+type LSParams struct {
+	Path   string   `json:"path" description:"The path to the directory to list (defaults to current working directory)"`
+	Ignore []string `json:"ignore,omitempty" description:"List of glob patterns to ignore"`
+}
+
+type LSPermissionsParams struct {
+	Path   string   `json:"path"`
+	Ignore []string `json:"ignore"`
+}
+
+type TreeNode struct {
+	Name     string      `json:"name"`
+	Path     string      `json:"path"`
+	Type     string      `json:"type"` // "file" or "directory"
+	Children []*TreeNode `json:"children,omitempty"`
+}
+
+type LSResponseMetadata struct {
+	NumberOfFiles int  `json:"number_of_files"`
+	Truncated     bool `json:"truncated"`
+}
+
+const (
+	LSToolName = "ls"
+	MaxLSFiles = 1000
+)
+
+//go:embed ls.md
+var lsDescription []byte
+
+func NewLsTool(permissions permission.Service, workingDir string) ai.AgentTool {
+	return ai.NewAgentTool(
+		LSToolName,
+		string(lsDescription),
+		func(ctx context.Context, params LSParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			searchPath := params.Path
+			if searchPath == "" {
+				searchPath = workingDir
+			}
+
+			var err error
+			searchPath, err = fsext.Expand(searchPath)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error expanding path: %w", err)
+			}
+
+			if !filepath.IsAbs(searchPath) {
+				searchPath = filepath.Join(workingDir, searchPath)
+			}
+
+			// Check if directory is outside working directory and request permission if needed
+			absWorkingDir, err := filepath.Abs(workingDir)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error resolving working directory: %w", err)
+			}
+
+			absSearchPath, err := filepath.Abs(searchPath)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error resolving search path: %w", err)
+			}
+
+			relPath, err := filepath.Rel(absWorkingDir, absSearchPath)
+			if err != nil || strings.HasPrefix(relPath, "..") {
+				// Directory is outside working directory, request permission
+				sessionID := GetSessionFromContext(ctx)
+				if sessionID == "" {
+					return ai.ToolResponse{}, fmt.Errorf("session ID is required for accessing directories outside working directory")
+				}
+
+				granted := permissions.Request(
+					permission.CreatePermissionRequest{
+						SessionID:   sessionID,
+						Path:        absSearchPath,
+						ToolCallID:  call.ID,
+						ToolName:    LSToolName,
+						Action:      "list",
+						Description: fmt.Sprintf("List directory outside working directory: %s", absSearchPath),
+						Params:      LSPermissionsParams(params),
+					},
+				)
+
+				if !granted {
+					return ai.ToolResponse{}, permission.ErrorPermissionDenied
+				}
+			}
+
+			output, err := ListDirectoryTree(searchPath, params.Ignore)
+			if err != nil {
+				return ai.ToolResponse{}, err
+			}
+
+			// Get file count for metadata
+			files, truncated, err := fsext.ListDirectory(searchPath, params.Ignore, MaxLSFiles)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error listing directory for metadata: %w", err)
+			}
+
+			return ai.WithResponseMetadata(
+				ai.NewTextResponse(output),
+				LSResponseMetadata{
+					NumberOfFiles: len(files),
+					Truncated:     truncated,
+				},
+			), nil
+		})
+}
+
+func ListDirectoryTree(searchPath string, ignore []string) (string, error) {
+	if _, err := os.Stat(searchPath); os.IsNotExist(err) {
+		return "", fmt.Errorf("path does not exist: %s", searchPath)
+	}
+
+	files, truncated, err := fsext.ListDirectory(searchPath, ignore, MaxLSFiles)
+	if err != nil {
+		return "", fmt.Errorf("error listing directory: %w", err)
+	}
+
+	tree := createFileTree(files, searchPath)
+	output := printTree(tree, searchPath)
+
+	if truncated {
+		output = fmt.Sprintf("There are more than %d files in the directory. Use a more specific path or use the Glob tool to find specific files. The first %d files and directories are included below:\n\n%s", MaxLSFiles, MaxLSFiles, output)
+	}
+
+	return output, nil
+}
+
+func createFileTree(sortedPaths []string, rootPath string) []*TreeNode {
+	root := []*TreeNode{}
+	pathMap := make(map[string]*TreeNode)
+
+	for _, path := range sortedPaths {
+		relativePath := strings.TrimPrefix(path, rootPath)
+		parts := strings.Split(relativePath, string(filepath.Separator))
+		currentPath := ""
+		var parentPath string
+
+		var cleanParts []string
+		for _, part := range parts {
+			if part != "" {
+				cleanParts = append(cleanParts, part)
+			}
+		}
+		parts = cleanParts
+
+		if len(parts) == 0 {
+			continue
+		}
+
+		for i, part := range parts {
+			if currentPath == "" {
+				currentPath = part
+			} else {
+				currentPath = filepath.Join(currentPath, part)
+			}
+
+			if _, exists := pathMap[currentPath]; exists {
+				parentPath = currentPath
+				continue
+			}
+
+			isLastPart := i == len(parts)-1
+			isDir := !isLastPart || strings.HasSuffix(relativePath, string(filepath.Separator))
+			nodeType := "file"
+			if isDir {
+				nodeType = "directory"
+			}
+			newNode := &TreeNode{
+				Name:     part,
+				Path:     currentPath,
+				Type:     nodeType,
+				Children: []*TreeNode{},
+			}
+
+			pathMap[currentPath] = newNode
+
+			if i > 0 && parentPath != "" {
+				if parent, ok := pathMap[parentPath]; ok {
+					parent.Children = append(parent.Children, newNode)
+				}
+			} else {
+				root = append(root, newNode)
+			}
+
+			parentPath = currentPath
+		}
+	}
+
+	return root
+}
+
+func printTree(tree []*TreeNode, rootPath string) string {
+	var result strings.Builder
+
+	result.WriteString("- ")
+	result.WriteString(rootPath)
+	if rootPath[len(rootPath)-1] != '/' {
+		result.WriteByte(filepath.Separator)
+	}
+	result.WriteByte('\n')
+
+	for _, node := range tree {
+		printNode(&result, node, 1)
+	}
+
+	return result.String()
+}
+
+func printNode(builder *strings.Builder, node *TreeNode, level int) {
+	indent := strings.Repeat("  ", level)
+
+	nodeName := node.Name
+	if node.Type == "directory" {
+		nodeName = nodeName + string(filepath.Separator)
+	}
+
+	fmt.Fprintf(builder, "%s- %s\n", indent, nodeName)
+
+	if node.Type == "directory" && len(node.Children) > 0 {
+		for _, child := range node.Children {
+			printNode(builder, child, level+1)
+		}
+	}
+}

internal/agent/tools/ls.md 🔗

@@ -0,0 +1,35 @@
+Shows files and subdirectories in tree structure for exploring project organization.
+
+<usage>
+- Provide path to list (defaults to current working directory)
+- Optional glob patterns to ignore
+- Results displayed in tree structure
+</usage>
+
+<features>
+- Hierarchical view of files and directories
+- Auto-skips hidden files/directories (starting with '.')
+- Skips common system directories like __pycache__
+- Can filter files matching specific patterns
+</features>
+
+<limitations>
+- Results limited to 1000 files
+- Large directories truncated
+- No file sizes or permissions shown
+- Cannot recursively list all directories in large projects
+</limitations>
+
+<cross_platform>
+
+- Hidden file detection uses Unix convention (files starting with '.')
+- Windows hidden files (with hidden attribute) not auto-skipped
+- Common Windows directories (System32, Program Files) not in default ignore
+- Path separators handled automatically (/ and \ work)
+  </cross_platform>
+
+<tips>
+- Use Glob for finding files by name patterns instead of browsing
+- Use Grep for searching file contents
+- Combine with other tools for effective exploration
+</tips>

internal/agent/tools/mcp-tools.go 🔗

@@ -0,0 +1,432 @@
+package tools
+
+import (
+	"cmp"
+	"context"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"log/slog"
+	"maps"
+	"slices"
+	"strings"
+	"sync"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/config"
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/home"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/crush/internal/pubsub"
+	"github.com/charmbracelet/crush/internal/version"
+	"github.com/charmbracelet/fantasy/ai"
+	"github.com/mark3labs/mcp-go/client"
+	"github.com/mark3labs/mcp-go/client/transport"
+	"github.com/mark3labs/mcp-go/mcp"
+)
+
+// MCPState represents the current state of an MCP client
+type MCPState int
+
+const (
+	MCPStateDisabled MCPState = iota
+	MCPStateStarting
+	MCPStateConnected
+	MCPStateError
+)
+
+func (s MCPState) String() string {
+	switch s {
+	case MCPStateDisabled:
+		return "disabled"
+	case MCPStateStarting:
+		return "starting"
+	case MCPStateConnected:
+		return "connected"
+	case MCPStateError:
+		return "error"
+	default:
+		return "unknown"
+	}
+}
+
+// MCPEventType represents the type of MCP event
+type MCPEventType string
+
+const (
+	MCPEventStateChanged MCPEventType = "state_changed"
+)
+
+// MCPEvent represents an event in the MCP system
+type MCPEvent struct {
+	Type      MCPEventType
+	Name      string
+	State     MCPState
+	Error     error
+	ToolCount int
+}
+
+// MCPClientInfo holds information about an MCP client's state
+type MCPClientInfo struct {
+	Name        string
+	State       MCPState
+	Error       error
+	Client      *client.Client
+	ToolCount   int
+	ConnectedAt time.Time
+}
+
+var (
+	mcpToolsOnce sync.Once
+	mcpTools     []ai.AgentTool
+	mcpClients   = csync.NewMap[string, *client.Client]()
+	mcpStates    = csync.NewMap[string, MCPClientInfo]()
+	mcpBroker    = pubsub.NewBroker[MCPEvent]()
+)
+
+type McpTool struct {
+	mcpName         string
+	tool            mcp.Tool
+	permissions     permission.Service
+	workingDir      string
+	providerOptions ai.ProviderOptions
+}
+
+func (m *McpTool) SetProviderOptions(opts ai.ProviderOptions) {
+	m.providerOptions = opts
+}
+
+func (m *McpTool) ProviderOptions() ai.ProviderOptions {
+	return m.providerOptions
+}
+
+func (b *McpTool) Name() string {
+	return fmt.Sprintf("mcp_%s_%s", b.mcpName, b.tool.Name)
+}
+
+func (b *McpTool) Info() ai.ToolInfo {
+	required := b.tool.InputSchema.Required
+	if required == nil {
+		required = make([]string, 0)
+	}
+	parameters := b.tool.InputSchema.Properties
+	if parameters == nil {
+		parameters = make(map[string]any)
+	}
+	return ai.ToolInfo{
+		Name:        fmt.Sprintf("mcp_%s_%s", b.mcpName, b.tool.Name),
+		Description: b.tool.Description,
+		Parameters:  parameters,
+		Required:    required,
+	}
+}
+
+func runTool(ctx context.Context, name, toolName string, input string) (ai.ToolResponse, error) {
+	var args map[string]any
+	if err := json.Unmarshal([]byte(input), &args); err != nil {
+		return ai.NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
+	}
+
+	c, err := getOrRenewClient(ctx, name)
+	if err != nil {
+		return ai.NewTextErrorResponse(err.Error()), nil
+	}
+	result, err := c.CallTool(ctx, mcp.CallToolRequest{
+		Params: mcp.CallToolParams{
+			Name:      toolName,
+			Arguments: args,
+		},
+	})
+	if err != nil {
+		return ai.NewTextErrorResponse(err.Error()), nil
+	}
+
+	output := make([]string, 0, len(result.Content))
+	for _, v := range result.Content {
+		if v, ok := v.(mcp.TextContent); ok {
+			output = append(output, v.Text)
+		} else {
+			output = append(output, fmt.Sprintf("%v", v))
+		}
+	}
+	return ai.NewTextResponse(strings.Join(output, "\n")), nil
+}
+
+func getOrRenewClient(ctx context.Context, name string) (*client.Client, error) {
+	c, ok := mcpClients.Get(name)
+	if !ok {
+		return nil, fmt.Errorf("mcp '%s' not available", name)
+	}
+
+	cfg := config.Get()
+	m := cfg.MCP[name]
+	state, _ := mcpStates.Get(name)
+
+	timeout := mcpTimeout(m)
+	pingCtx, cancel := context.WithTimeout(ctx, timeout)
+	defer cancel()
+	err := c.Ping(pingCtx)
+	if err == nil {
+		return c, nil
+	}
+	updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, state.ToolCount)
+
+	c, err = createAndInitializeClient(ctx, name, m, cfg.Resolver())
+	if err != nil {
+		return nil, err
+	}
+
+	updateMCPState(name, MCPStateConnected, nil, c, state.ToolCount)
+	mcpClients.Set(name, c)
+	return c, nil
+}
+
+func (b *McpTool) Run(ctx context.Context, params ai.ToolCall) (ai.ToolResponse, error) {
+	sessionID := GetSessionFromContext(ctx)
+	if sessionID == "" {
+		return ai.ToolResponse{}, fmt.Errorf("session ID is required for creating a new file")
+	}
+	permissionDescription := fmt.Sprintf("execute %s with the following parameters:", b.Info().Name)
+	p := b.permissions.Request(
+		permission.CreatePermissionRequest{
+			SessionID:   sessionID,
+			ToolCallID:  params.ID,
+			Path:        b.workingDir,
+			ToolName:    b.Info().Name,
+			Action:      "execute",
+			Description: permissionDescription,
+			Params:      params.Input,
+		},
+	)
+	if !p {
+		return ai.ToolResponse{}, permission.ErrorPermissionDenied
+	}
+
+	return runTool(ctx, b.mcpName, b.tool.Name, params.Input)
+}
+
+func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) []ai.AgentTool {
+	result, err := c.ListTools(ctx, mcp.ListToolsRequest{})
+	if err != nil {
+		slog.Error("error listing tools", "error", err)
+		updateMCPState(name, MCPStateError, err, nil, 0)
+		c.Close()
+		mcpClients.Del(name)
+		return nil
+	}
+	mcpTools := make([]ai.AgentTool, 0, len(result.Tools))
+	for _, tool := range result.Tools {
+		mcpTools = append(mcpTools, &McpTool{
+			mcpName:     name,
+			tool:        tool,
+			permissions: permissions,
+			workingDir:  workingDir,
+		})
+	}
+	return mcpTools
+}
+
+// SubscribeMCPEvents returns a channel for MCP events
+func SubscribeMCPEvents(ctx context.Context) <-chan pubsub.Event[MCPEvent] {
+	return mcpBroker.Subscribe(ctx)
+}
+
+// GetMCPStates returns the current state of all MCP clients
+func GetMCPStates() map[string]MCPClientInfo {
+	return maps.Collect(mcpStates.Seq2())
+}
+
+// GetMCPState returns the state of a specific MCP client
+func GetMCPState(name string) (MCPClientInfo, bool) {
+	return mcpStates.Get(name)
+}
+
+// updateMCPState updates the state of an MCP client and publishes an event
+func updateMCPState(name string, state MCPState, err error, client *client.Client, toolCount int) {
+	info := MCPClientInfo{
+		Name:      name,
+		State:     state,
+		Error:     err,
+		Client:    client,
+		ToolCount: toolCount,
+	}
+	if state == MCPStateConnected {
+		info.ConnectedAt = time.Now()
+	}
+	mcpStates.Set(name, info)
+
+	// Publish state change event
+	mcpBroker.Publish(pubsub.UpdatedEvent, MCPEvent{
+		Type:      MCPEventStateChanged,
+		Name:      name,
+		State:     state,
+		Error:     err,
+		ToolCount: toolCount,
+	})
+}
+
+// CloseMCPClients closes all MCP clients. This should be called during application shutdown.
+func CloseMCPClients() error {
+	var errs []error
+	for name, c := range mcpClients.Seq2() {
+		if err := c.Close(); err != nil {
+			errs = append(errs, fmt.Errorf("close mcp: %s: %w", name, err))
+		}
+	}
+	mcpBroker.Shutdown()
+	return errors.Join(errs...)
+}
+
+var mcpInitRequest = mcp.InitializeRequest{
+	Params: mcp.InitializeParams{
+		ProtocolVersion: mcp.LATEST_PROTOCOL_VERSION,
+		ClientInfo: mcp.Implementation{
+			Name:    "Crush",
+			Version: version.Version,
+		},
+	},
+}
+
+func GetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) []ai.AgentTool {
+	var wg sync.WaitGroup
+	result := csync.NewSlice[ai.AgentTool]()
+
+	// Initialize states for all configured MCPs
+	for name, m := range cfg.MCP {
+		if m.Disabled {
+			updateMCPState(name, MCPStateDisabled, nil, nil, 0)
+			slog.Debug("skipping disabled mcp", "name", name)
+			continue
+		}
+
+		// Set initial starting state
+		updateMCPState(name, MCPStateStarting, nil, nil, 0)
+
+		wg.Add(1)
+		go func(name string, m config.MCPConfig) {
+			defer func() {
+				wg.Done()
+				if r := recover(); r != nil {
+					var err error
+					switch v := r.(type) {
+					case error:
+						err = v
+					case string:
+						err = fmt.Errorf("panic: %s", v)
+					default:
+						err = fmt.Errorf("panic: %v", v)
+					}
+					updateMCPState(name, MCPStateError, err, nil, 0)
+					slog.Error("panic in mcp client initialization", "error", err, "name", name)
+				}
+			}()
+
+			ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
+			defer cancel()
+			c, err := createAndInitializeClient(ctx, name, m, cfg.Resolver())
+			if err != nil {
+				return
+			}
+			mcpClients.Set(name, c)
+
+			tools := getTools(ctx, name, permissions, c, cfg.WorkingDir())
+			updateMCPState(name, MCPStateConnected, nil, c, len(tools))
+			result.Append(tools...)
+		}(name, m)
+	}
+	wg.Wait()
+	return slices.Collect(result.Seq())
+}
+
+func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
+	c, err := createMcpClient(name, m, resolver)
+	if err != nil {
+		updateMCPState(name, MCPStateError, err, nil, 0)
+		slog.Error("error creating mcp client", "error", err, "name", name)
+		return nil, err
+	}
+
+	timeout := mcpTimeout(m)
+	initCtx, cancel := context.WithTimeout(ctx, timeout)
+	defer cancel()
+
+	// Only call Start() for non-stdio clients, as stdio clients auto-start
+	if m.Type != config.MCPStdio {
+		if err := c.Start(initCtx); err != nil {
+			updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
+			slog.Error("error starting mcp client", "error", err, "name", name)
+			_ = c.Close()
+			return nil, err
+		}
+	}
+	if _, err := c.Initialize(initCtx, mcpInitRequest); err != nil {
+		updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
+		slog.Error("error initializing mcp client", "error", err, "name", name)
+		_ = c.Close()
+		return nil, err
+	}
+
+	slog.Info("Initialized mcp client", "name", name)
+	return c, nil
+}
+
+func maybeTimeoutErr(err error, timeout time.Duration) error {
+	if errors.Is(err, context.DeadlineExceeded) {
+		return fmt.Errorf("timed out after %s", timeout)
+	}
+	return err
+}
+
+func createMcpClient(name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
+	switch m.Type {
+	case config.MCPStdio:
+		command, err := resolver.ResolveValue(m.Command)
+		if err != nil {
+			return nil, fmt.Errorf("invalid mcp command: %w", err)
+		}
+		if strings.TrimSpace(command) == "" {
+			return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
+		}
+		return client.NewStdioMCPClientWithOptions(
+			home.Long(command),
+			m.ResolvedEnv(),
+			m.Args,
+			transport.WithCommandLogger(mcpLogger{name: name}),
+		)
+	case config.MCPHttp:
+		if strings.TrimSpace(m.URL) == "" {
+			return nil, fmt.Errorf("mcp http config requires a non-empty 'url' field")
+		}
+		return client.NewStreamableHttpClient(
+			m.URL,
+			transport.WithHTTPHeaders(m.ResolvedHeaders()),
+			transport.WithHTTPLogger(mcpLogger{name: name}),
+		)
+	case config.MCPSse:
+		if strings.TrimSpace(m.URL) == "" {
+			return nil, fmt.Errorf("mcp sse config requires a non-empty 'url' field")
+		}
+		return client.NewSSEMCPClient(
+			m.URL,
+			client.WithHeaders(m.ResolvedHeaders()),
+			transport.WithSSELogger(mcpLogger{name: name}),
+		)
+	default:
+		return nil, fmt.Errorf("unsupported mcp type: %s", m.Type)
+	}
+}
+
+// for MCP's clients.
+type mcpLogger struct{ name string }
+
+func (l mcpLogger) Errorf(format string, v ...any) {
+	slog.Error(fmt.Sprintf(format, v...), "name", l.name)
+}
+
+func (l mcpLogger) Infof(format string, v ...any) {
+	slog.Info(fmt.Sprintf(format, v...), "name", l.name)
+}
+
+func mcpTimeout(m config.MCPConfig) time.Duration {
+	return time.Duration(cmp.Or(m.Timeout, 15)) * time.Second
+}

internal/agent/tools/multiedit.go 🔗

@@ -0,0 +1,366 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"fmt"
+	"log/slog"
+	"os"
+	"path/filepath"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/diff"
+	"github.com/charmbracelet/crush/internal/fsext"
+	"github.com/charmbracelet/crush/internal/history"
+	"github.com/charmbracelet/crush/internal/lsp"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+type MultiEditOperation struct {
+	OldString  string `json:"old_string" description:"The text to replace"`
+	NewString  string `json:"new_string" description:"The text to replace it with"`
+	ReplaceAll bool   `json:"replace_all,omitempty" description:"Replace all occurrences of old_string (default false)."`
+}
+
+type MultiEditParams struct {
+	FilePath string               `json:"file_path" description:"The absolute path to the file to modify"`
+	Edits    []MultiEditOperation `json:"edits" description:"Array of edit operations to perform sequentially on the file"`
+}
+
+type MultiEditPermissionsParams struct {
+	FilePath   string `json:"file_path"`
+	OldContent string `json:"old_content,omitempty"`
+	NewContent string `json:"new_content,omitempty"`
+}
+
+type MultiEditResponseMetadata struct {
+	Additions    int    `json:"additions"`
+	Removals     int    `json:"removals"`
+	OldContent   string `json:"old_content,omitempty"`
+	NewContent   string `json:"new_content,omitempty"`
+	EditsApplied int    `json:"edits_applied"`
+}
+
+const MultiEditToolName = "multiedit"
+
+//go:embed multiedit.md
+var multieditDescription []byte
+
+func NewMultiEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) ai.AgentTool {
+	return ai.NewAgentTool(
+		MultiEditToolName,
+		string(multieditDescription),
+		func(ctx context.Context, params MultiEditParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.FilePath == "" {
+				return ai.NewTextErrorResponse("file_path is required"), nil
+			}
+
+			if len(params.Edits) == 0 {
+				return ai.NewTextErrorResponse("at least one edit operation is required"), nil
+			}
+
+			if !filepath.IsAbs(params.FilePath) {
+				params.FilePath = filepath.Join(workingDir, params.FilePath)
+			}
+
+			// Validate all edits before applying any
+			if err := validateEdits(params.Edits); err != nil {
+				return ai.NewTextErrorResponse(err.Error()), nil
+			}
+
+			var response ai.ToolResponse
+			var err error
+
+			editCtx := editContext{ctx, permissions, files, workingDir}
+			// Handle file creation case (first edit has empty old_string)
+			if len(params.Edits) > 0 && params.Edits[0].OldString == "" {
+				response, err = processMultiEditWithCreation(editCtx, params, call)
+			} else {
+				response, err = processMultiEditExistingFile(editCtx, params, call)
+			}
+
+			if err != nil {
+				return response, err
+			}
+
+			if response.IsError {
+				return response, nil
+			}
+
+			// Notify LSP clients about the change
+			notifyLSPs(ctx, lspClients, params.FilePath)
+
+			// Wait for LSP diagnostics and add them to the response
+			text := fmt.Sprintf("<result>\n%s\n</result>\n", response.Content)
+			text += getDiagnostics(params.FilePath, lspClients)
+			response.Content = text
+			return response, nil
+		})
+}
+
+func validateEdits(edits []MultiEditOperation) error {
+	for i, edit := range edits {
+		if edit.OldString == edit.NewString {
+			return fmt.Errorf("edit %d: old_string and new_string are identical", i+1)
+		}
+		// Only the first edit can have empty old_string (for file creation)
+		if i > 0 && edit.OldString == "" {
+			return fmt.Errorf("edit %d: only the first edit can have empty old_string (for file creation)", i+1)
+		}
+	}
+	return nil
+}
+
+func processMultiEditWithCreation(edit editContext, params MultiEditParams, call ai.ToolCall) (ai.ToolResponse, error) {
+	// First edit creates the file
+	firstEdit := params.Edits[0]
+	if firstEdit.OldString != "" {
+		return ai.NewTextErrorResponse("first edit must have empty old_string for file creation"), nil
+	}
+
+	// Check if file already exists
+	if _, err := os.Stat(params.FilePath); err == nil {
+		return ai.NewTextErrorResponse(fmt.Sprintf("file already exists: %s", params.FilePath)), nil
+	} else if !os.IsNotExist(err) {
+		return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
+	}
+
+	// Create parent directories
+	dir := filepath.Dir(params.FilePath)
+	if err := os.MkdirAll(dir, 0o755); err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to create parent directories: %w", err)
+	}
+
+	// Start with the content from the first edit
+	currentContent := firstEdit.NewString
+
+	// Apply remaining edits to the content
+	for i := 1; i < len(params.Edits); i++ {
+		edit := params.Edits[i]
+		newContent, err := applyEditToContent(currentContent, edit)
+		if err != nil {
+			return ai.NewTextErrorResponse(fmt.Sprintf("edit %d failed: %s", i+1, err.Error())), nil
+		}
+		currentContent = newContent
+	}
+
+	// Get session and message IDs
+	sessionID := GetSessionFromContext(edit.ctx)
+	if sessionID == "" {
+		return ai.ToolResponse{}, fmt.Errorf("session ID is required for creating a new file")
+	}
+
+	// Check permissions
+	_, additions, removals := diff.GenerateDiff("", currentContent, strings.TrimPrefix(params.FilePath, edit.workingDir))
+
+	p := edit.permissions.Request(permission.CreatePermissionRequest{
+		SessionID:   sessionID,
+		Path:        fsext.PathOrPrefix(params.FilePath, edit.workingDir),
+		ToolCallID:  call.ID,
+		ToolName:    MultiEditToolName,
+		Action:      "write",
+		Description: fmt.Sprintf("Create file %s with %d edits", params.FilePath, len(params.Edits)),
+		Params: MultiEditPermissionsParams{
+			FilePath:   params.FilePath,
+			OldContent: "",
+			NewContent: currentContent,
+		},
+	})
+	if !p {
+		return ai.ToolResponse{}, permission.ErrorPermissionDenied
+	}
+
+	// Write the file
+	err := os.WriteFile(params.FilePath, []byte(currentContent), 0o644)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
+	}
+
+	// Update file history
+	_, err = edit.files.Create(edit.ctx, sessionID, params.FilePath, "")
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
+	}
+
+	_, err = edit.files.CreateVersion(edit.ctx, sessionID, params.FilePath, currentContent)
+	if err != nil {
+		slog.Debug("Error creating file history version", "error", err)
+	}
+
+	recordFileWrite(params.FilePath)
+	recordFileRead(params.FilePath)
+
+	return ai.WithResponseMetadata(
+		ai.NewTextResponse(fmt.Sprintf("File created with %d edits: %s", len(params.Edits), params.FilePath)),
+		MultiEditResponseMetadata{
+			OldContent:   "",
+			NewContent:   currentContent,
+			Additions:    additions,
+			Removals:     removals,
+			EditsApplied: len(params.Edits),
+		},
+	), nil
+}
+
+func processMultiEditExistingFile(edit editContext, params MultiEditParams, call ai.ToolCall) (ai.ToolResponse, error) {
+	// Validate file exists and is readable
+	fileInfo, err := os.Stat(params.FilePath)
+	if err != nil {
+		if os.IsNotExist(err) {
+			return ai.NewTextErrorResponse(fmt.Sprintf("file not found: %s", params.FilePath)), nil
+		}
+		return ai.ToolResponse{}, fmt.Errorf("failed to access file: %w", err)
+	}
+
+	if fileInfo.IsDir() {
+		return ai.NewTextErrorResponse(fmt.Sprintf("path is a directory, not a file: %s", params.FilePath)), nil
+	}
+
+	// Check if file was read before editing
+	if getLastReadTime(params.FilePath).IsZero() {
+		return ai.NewTextErrorResponse("you must read the file before editing it. Use the View tool first"), nil
+	}
+
+	// Check if file was modified since last read
+	modTime := fileInfo.ModTime()
+	lastRead := getLastReadTime(params.FilePath)
+	if modTime.After(lastRead) {
+		return ai.NewTextErrorResponse(
+			fmt.Sprintf("file %s has been modified since it was last read (mod time: %s, last read: %s)",
+				params.FilePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339),
+			)), nil
+	}
+
+	// Read current file content
+	content, err := os.ReadFile(params.FilePath)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
+	}
+
+	oldContent, isCrlf := fsext.ToUnixLineEndings(string(content))
+	currentContent := oldContent
+
+	// Apply all edits sequentially
+	for i, edit := range params.Edits {
+		newContent, err := applyEditToContent(currentContent, edit)
+		if err != nil {
+			return ai.NewTextErrorResponse(fmt.Sprintf("edit %d failed: %s", i+1, err.Error())), nil
+		}
+		currentContent = newContent
+	}
+
+	// Check if content actually changed
+	if oldContent == currentContent {
+		return ai.NewTextErrorResponse("no changes made - all edits resulted in identical content"), nil
+	}
+
+	// Get session and message IDs
+	sessionID := GetSessionFromContext(edit.ctx)
+	if sessionID == "" {
+		return ai.ToolResponse{}, fmt.Errorf("session ID is required for editing file")
+	}
+
+	// Generate diff and check permissions
+	_, additions, removals := diff.GenerateDiff(oldContent, currentContent, strings.TrimPrefix(params.FilePath, edit.workingDir))
+	p := edit.permissions.Request(permission.CreatePermissionRequest{
+		SessionID:   sessionID,
+		Path:        fsext.PathOrPrefix(params.FilePath, edit.workingDir),
+		ToolCallID:  call.ID,
+		ToolName:    MultiEditToolName,
+		Action:      "write",
+		Description: fmt.Sprintf("Apply %d edits to file %s", len(params.Edits), params.FilePath),
+		Params: MultiEditPermissionsParams{
+			FilePath:   params.FilePath,
+			OldContent: oldContent,
+			NewContent: currentContent,
+		},
+	})
+	if !p {
+		return ai.ToolResponse{}, permission.ErrorPermissionDenied
+	}
+
+	if isCrlf {
+		currentContent, _ = fsext.ToWindowsLineEndings(currentContent)
+	}
+
+	// Write the updated content
+	err = os.WriteFile(params.FilePath, []byte(currentContent), 0o644)
+	if err != nil {
+		return ai.ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
+	}
+
+	// Update file history
+	file, err := edit.files.GetByPathAndSession(edit.ctx, params.FilePath, sessionID)
+	if err != nil {
+		_, err = edit.files.Create(edit.ctx, sessionID, params.FilePath, oldContent)
+		if err != nil {
+			return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
+		}
+	}
+	if file.Content != oldContent {
+		// User manually changed the content, store an intermediate version
+		_, err = edit.files.CreateVersion(edit.ctx, sessionID, params.FilePath, oldContent)
+		if err != nil {
+			slog.Debug("Error creating file history version", "error", err)
+		}
+	}
+
+	// Store the new version
+	_, err = edit.files.CreateVersion(edit.ctx, sessionID, params.FilePath, currentContent)
+	if err != nil {
+		slog.Debug("Error creating file history version", "error", err)
+	}
+
+	recordFileWrite(params.FilePath)
+	recordFileRead(params.FilePath)
+
+	return ai.WithResponseMetadata(
+		ai.NewTextResponse(fmt.Sprintf("Applied %d edits to file: %s", len(params.Edits), params.FilePath)),
+		MultiEditResponseMetadata{
+			OldContent:   oldContent,
+			NewContent:   currentContent,
+			Additions:    additions,
+			Removals:     removals,
+			EditsApplied: len(params.Edits),
+		},
+	), nil
+}
+
+func applyEditToContent(content string, edit MultiEditOperation) (string, error) {
+	if edit.OldString == "" && edit.NewString == "" {
+		return content, nil
+	}
+
+	if edit.OldString == "" {
+		return "", fmt.Errorf("old_string cannot be empty for content replacement")
+	}
+
+	var newContent string
+	var replacementCount int
+
+	if edit.ReplaceAll {
+		newContent = strings.ReplaceAll(content, edit.OldString, edit.NewString)
+		replacementCount = strings.Count(content, edit.OldString)
+		if replacementCount == 0 {
+			return "", fmt.Errorf("old_string not found in content. Make sure it matches exactly, including whitespace and line breaks")
+		}
+	} else {
+		index := strings.Index(content, edit.OldString)
+		if index == -1 {
+			return "", fmt.Errorf("old_string not found in content. Make sure it matches exactly, including whitespace and line breaks")
+		}
+
+		lastIndex := strings.LastIndex(content, edit.OldString)
+		if index != lastIndex {
+			return "", fmt.Errorf("old_string appears multiple times in the content. Please provide more context to ensure a unique match, or set replace_all to true")
+		}
+
+		newContent = content[:index] + edit.NewString + content[index+len(edit.OldString):]
+		replacementCount = 1
+	}
+
+	return newContent, nil
+}

internal/agent/tools/multiedit.md 🔗

@@ -0,0 +1,50 @@
+Makes multiple edits to a single file in one operation. Built on Edit tool for efficient multiple find-and-replace operations. Prefer over Edit tool for multiple edits to same file.
+
+<prerequisites>
+1. Use View tool to understand file contents and context
+2. Verify directory path is correct
+</prerequisites>
+
+<parameters>
+1. file_path: Absolute path to file (required)
+2. edits: Array of edit operations, each containing:
+   - old_string: Text to replace (must match exactly including whitespace/indentation)
+   - new_string: Replacement text
+   - replace_all: Replace all occurrences (optional, defaults to false)
+</parameters>
+
+<operation>
+- Edits applied sequentially in provided order
+- Each edit operates on result of previous edit
+- All edits must be valid for operation to succeed - if any fails, none applied
+- Ideal for several changes to different parts of same file
+</operation>
+
+<critical_requirements>
+
+1. All edits follow same requirements as single Edit tool
+2. Edits are atomic - either all succeed or none applied
+3. Plan edits carefully to avoid conflicts between sequential operations
+   </critical_requirements>
+
+<warnings>
+- Tool fails if old_string doesn't match file contents exactly (including whitespace)
+- Tool fails if old_string and new_string are identical
+- Earlier edits may affect text that later edits try to find - plan sequence carefully
+</warnings>
+
+<best_practices>
+
+- Ensure all edits result in correct, idiomatic code
+- Don't leave code in broken state
+- Use absolute file paths (starting with /)
+- Use replace_all for renaming variables across file
+- Avoid adding emojis unless user explicitly requests
+  </best_practices>
+
+<new_file_creation>
+
+- Provide new file path (including directory if needed)
+- First edit: empty old_string, new file contents as new_string
+- Subsequent edits: normal edit operations on created content
+  </new_file_creation>

internal/agent/tools/rg.go 🔗

@@ -0,0 +1,53 @@
+package tools
+
+import (
+	"context"
+	"log/slog"
+	"os/exec"
+	"path/filepath"
+	"strings"
+	"sync"
+
+	"github.com/charmbracelet/crush/internal/log"
+)
+
+var getRg = sync.OnceValue(func() string {
+	path, err := exec.LookPath("rg")
+	if err != nil {
+		if log.Initialized() {
+			slog.Warn("Ripgrep (rg) not found in $PATH. Some grep features might be limited or slower.")
+		}
+		return ""
+	}
+	return path
+})
+
+func getRgCmd(ctx context.Context, globPattern string) *exec.Cmd {
+	name := getRg()
+	if name == "" {
+		return nil
+	}
+	args := []string{"--files", "-L", "--null"}
+	if globPattern != "" {
+		if !filepath.IsAbs(globPattern) && !strings.HasPrefix(globPattern, "/") {
+			globPattern = "/" + globPattern
+		}
+		args = append(args, "--glob", globPattern)
+	}
+	return exec.CommandContext(ctx, name, args...)
+}
+
+func getRgSearchCmd(ctx context.Context, pattern, path, include string) *exec.Cmd {
+	name := getRg()
+	if name == "" {
+		return nil
+	}
+	// Use -n to show line numbers, -0 for null separation to handle Windows paths
+	args := []string{"-H", "-n", "-0", pattern}
+	if include != "" {
+		args = append(args, "--glob", include)
+	}
+	args = append(args, path)
+
+	return exec.CommandContext(ctx, name, args...)
+}

internal/agent/tools/safe.go 🔗

@@ -0,0 +1,70 @@
+package tools
+
+import "runtime"
+
+var safeCommands = []string{
+	// Bash builtins and core utils
+	"cal",
+	"date",
+	"df",
+	"du",
+	"echo",
+	"env",
+	"free",
+	"groups",
+	"hostname",
+	"id",
+	"kill",
+	"killall",
+	"ls",
+	"nice",
+	"nohup",
+	"printenv",
+	"ps",
+	"pwd",
+	"set",
+	"time",
+	"timeout",
+	"top",
+	"type",
+	"uname",
+	"unset",
+	"uptime",
+	"whatis",
+	"whereis",
+	"which",
+	"whoami",
+
+	// Git
+	"git blame",
+	"git branch",
+	"git config --get",
+	"git config --list",
+	"git describe",
+	"git diff",
+	"git grep",
+	"git log",
+	"git ls-files",
+	"git ls-remote",
+	"git remote",
+	"git rev-parse",
+	"git shortlog",
+	"git show",
+	"git status",
+	"git tag",
+}
+
+func init() {
+	if runtime.GOOS == "windows" {
+		safeCommands = append(
+			safeCommands,
+			// Windows-specific commands
+			"ipconfig",
+			"nslookup",
+			"ping",
+			"systeminfo",
+			"tasklist",
+			"where",
+		)
+	}
+}

internal/agent/tools/sourcegraph.go 🔗

@@ -0,0 +1,265 @@
+package tools
+
+import (
+	"bytes"
+	"context"
+	_ "embed"
+	"encoding/json"
+	"fmt"
+	"io"
+	"net/http"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+type SourcegraphParams struct {
+	Query         string `json:"query" description:"The Sourcegraph search query"`
+	Count         int    `json:"count,omitempty" description:"Optional number of results to return (default: 10, max: 20)"`
+	ContextWindow int    `json:"context_window,omitempty" description:"The context around the match to return (default: 10 lines)"`
+	Timeout       int    `json:"timeout,omitempty" description:"Optional timeout in seconds (max 120)"`
+}
+
+type SourcegraphResponseMetadata struct {
+	NumberOfMatches int  `json:"number_of_matches"`
+	Truncated       bool `json:"truncated"`
+}
+
+const SourcegraphToolName = "sourcegraph"
+
+//go:embed sourcegraph.md
+var sourcegraphDescription []byte
+
+func NewSourcegraphTool() ai.AgentTool {
+	client := &http.Client{
+		Timeout: 30 * time.Second,
+		Transport: &http.Transport{
+			MaxIdleConns:        100,
+			MaxIdleConnsPerHost: 10,
+			IdleConnTimeout:     90 * time.Second,
+		},
+	}
+	return ai.NewAgentTool(
+		SourcegraphToolName,
+		string(sourcegraphDescription),
+		func(ctx context.Context, params SourcegraphParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.Query == "" {
+				return ai.NewTextErrorResponse("Query parameter is required"), nil
+			}
+
+			if params.Count <= 0 {
+				params.Count = 10
+			} else if params.Count > 20 {
+				params.Count = 20 // Limit to 20 results
+			}
+
+			if params.ContextWindow <= 0 {
+				params.ContextWindow = 10 // Default context window
+			}
+
+			// Handle timeout with context
+			requestCtx := ctx
+			if params.Timeout > 0 {
+				maxTimeout := 120 // 2 minutes
+				if params.Timeout > maxTimeout {
+					params.Timeout = maxTimeout
+				}
+				var cancel context.CancelFunc
+				requestCtx, cancel = context.WithTimeout(ctx, time.Duration(params.Timeout)*time.Second)
+				defer cancel()
+			}
+
+			type graphqlRequest struct {
+				Query     string `json:"query"`
+				Variables struct {
+					Query string `json:"query"`
+				} `json:"variables"`
+			}
+
+			request := graphqlRequest{
+				Query: "query Search($query: String!) { search(query: $query, version: V2, patternType: keyword ) { results { matchCount, limitHit, resultCount, approximateResultCount, missing { name }, timedout { name }, indexUnavailable, results { __typename, ... on FileMatch { repository { name }, file { path, url, content }, lineMatches { preview, lineNumber, offsetAndLengths } } } } } }",
+			}
+			request.Variables.Query = params.Query
+
+			graphqlQueryBytes, err := json.Marshal(request)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to marshal GraphQL request: %w", err)
+			}
+			graphqlQuery := string(graphqlQueryBytes)
+
+			req, err := http.NewRequestWithContext(
+				requestCtx,
+				"POST",
+				"https://sourcegraph.com/.api/graphql",
+				bytes.NewBuffer([]byte(graphqlQuery)),
+			)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to create request: %w", err)
+			}
+
+			req.Header.Set("Content-Type", "application/json")
+			req.Header.Set("User-Agent", "crush/1.0")
+
+			resp, err := client.Do(req)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to fetch URL: %w", err)
+			}
+			defer resp.Body.Close()
+
+			if resp.StatusCode != http.StatusOK {
+				body, _ := io.ReadAll(resp.Body)
+				if len(body) > 0 {
+					return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d, response: %s", resp.StatusCode, string(body))), nil
+				}
+
+				return ai.NewTextErrorResponse(fmt.Sprintf("Request failed with status code: %d", resp.StatusCode)), nil
+			}
+			body, err := io.ReadAll(resp.Body)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to read response body: %w", err)
+			}
+
+			var result map[string]any
+			if err = json.Unmarshal(body, &result); err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("failed to unmarshal response: %w", err)
+			}
+
+			formattedResults, err := formatSourcegraphResults(result, params.ContextWindow)
+			if err != nil {
+				return ai.NewTextErrorResponse("Failed to format results: " + err.Error()), nil
+			}
+
+			return ai.NewTextResponse(formattedResults), nil
+		})
+}
+
+func formatSourcegraphResults(result map[string]any, contextWindow int) (string, error) {
+	var buffer strings.Builder
+
+	if errors, ok := result["errors"].([]any); ok && len(errors) > 0 {
+		buffer.WriteString("## Sourcegraph API Error\n\n")
+		for _, err := range errors {
+			if errMap, ok := err.(map[string]any); ok {
+				if message, ok := errMap["message"].(string); ok {
+					buffer.WriteString(fmt.Sprintf("- %s\n", message))
+				}
+			}
+		}
+		return buffer.String(), nil
+	}
+
+	data, ok := result["data"].(map[string]any)
+	if !ok {
+		return "", fmt.Errorf("invalid response format: missing data field")
+	}
+
+	search, ok := data["search"].(map[string]any)
+	if !ok {
+		return "", fmt.Errorf("invalid response format: missing search field")
+	}
+
+	searchResults, ok := search["results"].(map[string]any)
+	if !ok {
+		return "", fmt.Errorf("invalid response format: missing results field")
+	}
+
+	matchCount, _ := searchResults["matchCount"].(float64)
+	resultCount, _ := searchResults["resultCount"].(float64)
+	limitHit, _ := searchResults["limitHit"].(bool)
+
+	buffer.WriteString("# Sourcegraph Search Results\n\n")
+	buffer.WriteString(fmt.Sprintf("Found %d matches across %d results\n", int(matchCount), int(resultCount)))
+
+	if limitHit {
+		buffer.WriteString("(Result limit reached, try a more specific query)\n")
+	}
+
+	buffer.WriteString("\n")
+
+	results, ok := searchResults["results"].([]any)
+	if !ok || len(results) == 0 {
+		buffer.WriteString("No results found. Try a different query.\n")
+		return buffer.String(), nil
+	}
+
+	maxResults := 10
+	if len(results) > maxResults {
+		results = results[:maxResults]
+	}
+
+	for i, res := range results {
+		fileMatch, ok := res.(map[string]any)
+		if !ok {
+			continue
+		}
+
+		typeName, _ := fileMatch["__typename"].(string)
+		if typeName != "FileMatch" {
+			continue
+		}
+
+		repo, _ := fileMatch["repository"].(map[string]any)
+		file, _ := fileMatch["file"].(map[string]any)
+		lineMatches, _ := fileMatch["lineMatches"].([]any)
+
+		if repo == nil || file == nil {
+			continue
+		}
+
+		repoName, _ := repo["name"].(string)
+		filePath, _ := file["path"].(string)
+		fileURL, _ := file["url"].(string)
+		fileContent, _ := file["content"].(string)
+
+		buffer.WriteString(fmt.Sprintf("## Result %d: %s/%s\n\n", i+1, repoName, filePath))
+
+		if fileURL != "" {
+			buffer.WriteString(fmt.Sprintf("URL: %s\n\n", fileURL))
+		}
+
+		if len(lineMatches) > 0 {
+			for _, lm := range lineMatches {
+				lineMatch, ok := lm.(map[string]any)
+				if !ok {
+					continue
+				}
+
+				lineNumber, _ := lineMatch["lineNumber"].(float64)
+				preview, _ := lineMatch["preview"].(string)
+
+				if fileContent != "" {
+					lines := strings.Split(fileContent, "\n")
+
+					buffer.WriteString("```\n")
+
+					startLine := max(1, int(lineNumber)-contextWindow)
+
+					for j := startLine - 1; j < int(lineNumber)-1 && j < len(lines); j++ {
+						if j >= 0 {
+							buffer.WriteString(fmt.Sprintf("%d| %s\n", j+1, lines[j]))
+						}
+					}
+
+					buffer.WriteString(fmt.Sprintf("%d|  %s\n", int(lineNumber), preview))
+
+					endLine := int(lineNumber) + contextWindow
+
+					for j := int(lineNumber); j < endLine && j < len(lines); j++ {
+						if j < len(lines) {
+							buffer.WriteString(fmt.Sprintf("%d| %s\n", j+1, lines[j]))
+						}
+					}
+
+					buffer.WriteString("```\n\n")
+				} else {
+					buffer.WriteString("```\n")
+					buffer.WriteString(fmt.Sprintf("%d| %s\n", int(lineNumber), preview))
+					buffer.WriteString("```\n\n")
+				}
+			}
+		}
+	}
+
+	return buffer.String(), nil
+}

internal/agent/tools/sourcegraph.md 🔗

@@ -0,0 +1,57 @@
+Search code across public repositories using Sourcegraph's GraphQL API.
+
+<usage>
+- Provide search query using Sourcegraph syntax
+- Optional result count (default: 10, max: 20)
+- Optional timeout for request
+</usage>
+
+<basic_syntax>
+
+- "fmt.Println" - exact matches
+- "file:.go fmt.Println" - limit to Go files
+- "repo:^github\.com/golang/go$ fmt.Println" - specific repos
+- "lang:go fmt.Println" - limit to Go code
+- "fmt.Println AND log.Fatal" - combined terms
+- "fmt\.(Print|Printf|Println)" - regex patterns
+- "\"exact phrase\"" - exact phrase matching
+- "-file:test" or "-repo:forks" - exclude matches
+  </basic_syntax>
+
+<key_filters>
+Repository: repo:name, repo:^exact$, repo:org/repo@branch, -repo:exclude, fork:yes, archived:yes, visibility:public
+File: file:\.js$, file:internal/, -file:test, file:has.content(text)
+Content: content:"exact", -content:"unwanted", case:yes
+Type: type:symbol, type:file, type:path, type:diff, type:commit
+Time: after:"1 month ago", before:"2023-01-01", author:name, message:"fix"
+Result: select:repo, select:file, select:content, count:100, timeout:30s
+</key_filters>
+
+<examples>
+- "file:.go context.WithTimeout" - Go code using context.WithTimeout
+- "lang:typescript useState type:symbol" - TypeScript React useState hooks
+- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Kubernetes pod files
+- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with base images
+</examples>
+
+<boolean_operators>
+
+- "term1 AND term2" - both terms
+- "term1 OR term2" - either term
+- "term1 NOT term2" - term1 but not term2
+- "term1 and (term2 or term3)" - grouping with parentheses
+  </boolean_operators>
+
+<limitations>
+- Only searches public repositories
+- Rate limits may apply
+- Complex queries take longer
+- Max 20 results per query
+</limitations>
+
+<tips>
+- Use specific file extensions to narrow results
+- Add repo: filters for targeted searches
+- Use type:symbol for function/method definitions
+- Use type:file to find relevant files
+</tips>

internal/agent/tools/tools.go 🔗

@@ -0,0 +1,26 @@
+package tools
+
+import (
+	"context"
+)
+
+type (
+	sessionIDContextKey string
+	messageIDContextKey string
+)
+
+const (
+	SessionIDContextKey sessionIDContextKey = "session_id"
+)
+
+func GetSessionFromContext(ctx context.Context) string {
+	sessionID := ctx.Value(SessionIDContextKey)
+	if sessionID == nil {
+		return ""
+	}
+	s, ok := sessionID.(string)
+	if !ok {
+		return ""
+	}
+	return s
+}

internal/agent/tools/view.go 🔗

@@ -0,0 +1,308 @@
+package tools
+
+import (
+	"bufio"
+	"context"
+	_ "embed"
+	"fmt"
+	"io"
+	"os"
+	"path/filepath"
+	"strings"
+	"unicode/utf8"
+
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/lsp"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/fantasy/ai"
+)
+
+//go:embed view.md
+var viewDescription []byte
+
+type ViewParams struct {
+	FilePath string `json:"file_path" description:"The path to the file to read"`
+	Offset   int    `json:"offset" description:"The line number to start reading from (0-based)"`
+	Limit    int    `json:"limit" description:"The number of lines to read (defaults to 2000)"`
+}
+
+type ViewPermissionsParams struct {
+	FilePath string `json:"file_path"`
+	Offset   int    `json:"offset"`
+	Limit    int    `json:"limit"`
+}
+
+type viewTool struct {
+	lspClients  *csync.Map[string, *lsp.Client]
+	workingDir  string
+	permissions permission.Service
+}
+
+type ViewResponseMetadata struct {
+	FilePath string `json:"file_path"`
+	Content  string `json:"content"`
+}
+
+const (
+	ViewToolName     = "view"
+	MaxReadSize      = 250 * 1024
+	DefaultReadLimit = 2000
+	MaxLineLength    = 2000
+)
+
+func NewViewTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, workingDir string) ai.AgentTool {
+	return ai.NewAgentTool(
+		ViewToolName,
+		string(viewDescription),
+		func(ctx context.Context, params ViewParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.FilePath == "" {
+				return ai.NewTextErrorResponse("file_path is required"), nil
+			}
+
+			// Handle relative paths
+			filePath := params.FilePath
+			if !filepath.IsAbs(filePath) {
+				filePath = filepath.Join(workingDir, filePath)
+			}
+
+			// Check if file is outside working directory and request permission if needed
+			absWorkingDir, err := filepath.Abs(workingDir)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error resolving working directory: %w", err)
+			}
+
+			absFilePath, err := filepath.Abs(filePath)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error resolving file path: %w", err)
+			}
+
+			relPath, err := filepath.Rel(absWorkingDir, absFilePath)
+			if err != nil || strings.HasPrefix(relPath, "..") {
+				// File is outside working directory, request permission
+				sessionID := GetSessionFromContext(ctx)
+				if sessionID == "" {
+					return ai.ToolResponse{}, fmt.Errorf("session ID is required for accessing files outside working directory")
+				}
+
+				granted := permissions.Request(
+					permission.CreatePermissionRequest{
+						SessionID:   sessionID,
+						Path:        absFilePath,
+						ToolCallID:  call.ID,
+						ToolName:    ViewToolName,
+						Action:      "read",
+						Description: fmt.Sprintf("Read file outside working directory: %s", absFilePath),
+						Params:      ViewPermissionsParams(params),
+					},
+				)
+
+				if !granted {
+					return ai.ToolResponse{}, permission.ErrorPermissionDenied
+				}
+			}
+
+			// Check if file exists
+			fileInfo, err := os.Stat(filePath)
+			if err != nil {
+				if os.IsNotExist(err) {
+					// Try to offer suggestions for similarly named files
+					dir := filepath.Dir(filePath)
+					base := filepath.Base(filePath)
+
+					dirEntries, dirErr := os.ReadDir(dir)
+					if dirErr == nil {
+						var suggestions []string
+						for _, entry := range dirEntries {
+							if strings.Contains(strings.ToLower(entry.Name()), strings.ToLower(base)) ||
+								strings.Contains(strings.ToLower(base), strings.ToLower(entry.Name())) {
+								suggestions = append(suggestions, filepath.Join(dir, entry.Name()))
+								if len(suggestions) >= 3 {
+									break
+								}
+							}
+						}
+
+						if len(suggestions) > 0 {
+							return ai.NewTextErrorResponse(fmt.Sprintf("File not found: %s\n\nDid you mean one of these?\n%s",
+								filePath, strings.Join(suggestions, "\n"))), nil
+						}
+					}
+
+					return ai.NewTextErrorResponse(fmt.Sprintf("File not found: %s", filePath)), nil
+				}
+				return ai.ToolResponse{}, fmt.Errorf("error accessing file: %w", err)
+			}
+
+			// Check if it's a directory
+			if fileInfo.IsDir() {
+				return ai.NewTextErrorResponse(fmt.Sprintf("Path is a directory, not a file: %s", filePath)), nil
+			}
+
+			// Check file size
+			if fileInfo.Size() > MaxReadSize {
+				return ai.NewTextErrorResponse(fmt.Sprintf("File is too large (%d bytes). Maximum size is %d bytes",
+					fileInfo.Size(), MaxReadSize)), nil
+			}
+
+			// Set default limit if not provided
+			if params.Limit <= 0 {
+				params.Limit = DefaultReadLimit
+			}
+
+			// Check if it's an image file
+			isImage, imageType := isImageFile(filePath)
+			// TODO: handle images
+			if isImage {
+				return ai.NewTextErrorResponse(fmt.Sprintf("This is an image file of type: %s\n", imageType)), nil
+			}
+
+			// Read the file content
+			content, lineCount, err := readTextFile(filePath, params.Offset, params.Limit)
+			isValidUt8 := utf8.ValidString(content)
+			if !isValidUt8 {
+				return ai.NewTextErrorResponse("File content is not valid UTF-8"), nil
+			}
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error reading file: %w", err)
+			}
+
+			notifyLSPs(ctx, lspClients, filePath)
+			output := "<file>\n"
+			// Format the output with line numbers
+			output += addLineNumbers(content, params.Offset+1)
+
+			// Add a note if the content was truncated
+			if lineCount > params.Offset+len(strings.Split(content, "\n")) {
+				output += fmt.Sprintf("\n\n(File has more lines. Use 'offset' parameter to read beyond line %d)",
+					params.Offset+len(strings.Split(content, "\n")))
+			}
+			output += "\n</file>\n"
+			output += getDiagnostics(filePath, lspClients)
+			recordFileRead(filePath)
+			return ai.WithResponseMetadata(
+				ai.NewTextResponse(output),
+				ViewResponseMetadata{
+					FilePath: filePath,
+					Content:  content,
+				},
+			), nil
+		})
+}
+
+func addLineNumbers(content string, startLine int) string {
+	if content == "" {
+		return ""
+	}
+
+	lines := strings.Split(content, "\n")
+
+	var result []string
+	for i, line := range lines {
+		line = strings.TrimSuffix(line, "\r")
+
+		lineNum := i + startLine
+		numStr := fmt.Sprintf("%d", lineNum)
+
+		if len(numStr) >= 6 {
+			result = append(result, fmt.Sprintf("%s|%s", numStr, line))
+		} else {
+			paddedNum := fmt.Sprintf("%6s", numStr)
+			result = append(result, fmt.Sprintf("%s|%s", paddedNum, line))
+		}
+	}
+
+	return strings.Join(result, "\n")
+}
+
+func readTextFile(filePath string, offset, limit int) (string, int, error) {
+	file, err := os.Open(filePath)
+	if err != nil {
+		return "", 0, err
+	}
+	defer file.Close()
+
+	lineCount := 0
+
+	scanner := NewLineScanner(file)
+	if offset > 0 {
+		for lineCount < offset && scanner.Scan() {
+			lineCount++
+		}
+		if err = scanner.Err(); err != nil {
+			return "", 0, err
+		}
+	}
+
+	if offset == 0 {
+		_, err = file.Seek(0, io.SeekStart)
+		if err != nil {
+			return "", 0, err
+		}
+	}
+
+	// Pre-allocate slice with expected capacity
+	lines := make([]string, 0, limit)
+	lineCount = offset
+
+	for scanner.Scan() && len(lines) < limit {
+		lineCount++
+		lineText := scanner.Text()
+		if len(lineText) > MaxLineLength {
+			lineText = lineText[:MaxLineLength] + "..."
+		}
+		lines = append(lines, lineText)
+	}
+
+	// Continue scanning to get total line count
+	for scanner.Scan() {
+		lineCount++
+	}
+
+	if err := scanner.Err(); err != nil {
+		return "", 0, err
+	}
+
+	return strings.Join(lines, "\n"), lineCount, nil
+}
+
+func isImageFile(filePath string) (bool, string) {
+	ext := strings.ToLower(filepath.Ext(filePath))
+	switch ext {
+	case ".jpg", ".jpeg":
+		return true, "JPEG"
+	case ".png":
+		return true, "PNG"
+	case ".gif":
+		return true, "GIF"
+	case ".bmp":
+		return true, "BMP"
+	case ".svg":
+		return true, "SVG"
+	case ".webp":
+		return true, "WebP"
+	default:
+		return false, ""
+	}
+}
+
+type LineScanner struct {
+	scanner *bufio.Scanner
+}
+
+func NewLineScanner(r io.Reader) *LineScanner {
+	return &LineScanner{
+		scanner: bufio.NewScanner(r),
+	}
+}
+
+func (s *LineScanner) Scan() bool {
+	return s.scanner.Scan()
+}
+
+func (s *LineScanner) Text() string {
+	return s.scanner.Text()
+}
+
+func (s *LineScanner) Err() error {
+	return s.scanner.Err()
+}

internal/agent/tools/view.md 🔗

@@ -0,0 +1,36 @@
+Reads and displays file contents with line numbers for examining code, logs, or text data.
+
+<usage>
+- Provide file path to read
+- Optional offset: start reading from specific line (0-based)
+- Optional limit: control lines read (default 2000)
+- Don't use for directories (use LS tool instead)
+</usage>
+
+<features>
+- Displays contents with line numbers
+- Can read from any file position using offset
+- Handles large files by limiting lines read
+- Auto-truncates very long lines for display
+- Suggests similar filenames when file not found
+</features>
+
+<limitations>
+- Max file size: 250KB
+- Default limit: 2000 lines
+- Lines >2000 chars truncated
+- Cannot display binary files/images (identifies them)
+</limitations>
+
+<cross_platform>
+
+- Handles Windows (CRLF) and Unix (LF) line endings
+- Works with forward slashes (/) and backslashes (\)
+- Auto-detects text encoding for common formats
+  </cross_platform>
+
+<tips>
+- Use with Glob to find files first
+- For code exploration: Grep to find relevant files, then View to examine
+- For large files: use offset parameter for specific sections
+</tips>

internal/agent/tools/write.go 🔗

@@ -0,0 +1,177 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"fmt"
+	"log/slog"
+	"os"
+	"path/filepath"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/diff"
+	"github.com/charmbracelet/crush/internal/fsext"
+	"github.com/charmbracelet/crush/internal/history"
+	"github.com/charmbracelet/fantasy/ai"
+
+	"github.com/charmbracelet/crush/internal/lsp"
+	"github.com/charmbracelet/crush/internal/permission"
+)
+
+//go:embed write.md
+var writeDescription []byte
+
+type WriteParams struct {
+	FilePath string `json:"file_path" description:"The path to the file to write"`
+	Content  string `json:"content" description:"The content to write to the file"`
+}
+
+type WritePermissionsParams struct {
+	FilePath   string `json:"file_path"`
+	OldContent string `json:"old_content,omitempty"`
+	NewContent string `json:"new_content,omitempty"`
+}
+
+type writeTool struct {
+	lspClients  *csync.Map[string, *lsp.Client]
+	permissions permission.Service
+	files       history.Service
+	workingDir  string
+}
+
+type WriteResponseMetadata struct {
+	Diff      string `json:"diff"`
+	Additions int    `json:"additions"`
+	Removals  int    `json:"removals"`
+}
+
+const WriteToolName = "write"
+
+func NewWriteTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) ai.AgentTool {
+	return ai.NewAgentTool(
+		WriteToolName,
+		string(writeDescription),
+		func(ctx context.Context, params WriteParams, call ai.ToolCall) (ai.ToolResponse, error) {
+			if params.FilePath == "" {
+				return ai.NewTextErrorResponse("file_path is required"), nil
+			}
+
+			if params.Content == "" {
+				return ai.NewTextErrorResponse("content is required"), nil
+			}
+
+			filePath := params.FilePath
+			if !filepath.IsAbs(filePath) {
+				filePath = filepath.Join(workingDir, filePath)
+			}
+
+			fileInfo, err := os.Stat(filePath)
+			if err == nil {
+				if fileInfo.IsDir() {
+					return ai.NewTextErrorResponse(fmt.Sprintf("Path is a directory, not a file: %s", filePath)), nil
+				}
+
+				modTime := fileInfo.ModTime()
+				lastRead := getLastReadTime(filePath)
+				if modTime.After(lastRead) {
+					return ai.NewTextErrorResponse(fmt.Sprintf("File %s has been modified since it was last read.\nLast modification: %s\nLast read: %s\n\nPlease read the file again before modifying it.",
+						filePath, modTime.Format(time.RFC3339), lastRead.Format(time.RFC3339))), nil
+				}
+
+				oldContent, readErr := os.ReadFile(filePath)
+				if readErr == nil && string(oldContent) == params.Content {
+					return ai.NewTextErrorResponse(fmt.Sprintf("File %s already contains the exact content. No changes made.", filePath)), nil
+				}
+			} else if !os.IsNotExist(err) {
+				return ai.ToolResponse{}, fmt.Errorf("error checking file: %w", err)
+			}
+
+			dir := filepath.Dir(filePath)
+			if err = os.MkdirAll(dir, 0o755); err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error creating directory: %w", err)
+			}
+
+			oldContent := ""
+			if fileInfo != nil && !fileInfo.IsDir() {
+				oldBytes, readErr := os.ReadFile(filePath)
+				if readErr == nil {
+					oldContent = string(oldBytes)
+				}
+			}
+
+			sessionID := GetSessionFromContext(ctx)
+			if sessionID == "" {
+				return ai.ToolResponse{}, fmt.Errorf("session_id is required")
+			}
+
+			diff, additions, removals := diff.GenerateDiff(
+				oldContent,
+				params.Content,
+				strings.TrimPrefix(filePath, workingDir),
+			)
+
+			p := permissions.Request(
+				permission.CreatePermissionRequest{
+					SessionID:   sessionID,
+					Path:        fsext.PathOrPrefix(filePath, workingDir),
+					ToolCallID:  call.ID,
+					ToolName:    WriteToolName,
+					Action:      "write",
+					Description: fmt.Sprintf("Create file %s", filePath),
+					Params: WritePermissionsParams{
+						FilePath:   filePath,
+						OldContent: oldContent,
+						NewContent: params.Content,
+					},
+				},
+			)
+			if !p {
+				return ai.ToolResponse{}, permission.ErrorPermissionDenied
+			}
+
+			err = os.WriteFile(filePath, []byte(params.Content), 0o644)
+			if err != nil {
+				return ai.ToolResponse{}, fmt.Errorf("error writing file: %w", err)
+			}
+
+			// Check if file exists in history
+			file, err := files.GetByPathAndSession(ctx, filePath, sessionID)
+			if err != nil {
+				_, err = files.Create(ctx, sessionID, filePath, oldContent)
+				if err != nil {
+					// Log error but don't fail the operation
+					return ai.ToolResponse{}, fmt.Errorf("error creating file history: %w", err)
+				}
+			}
+			if file.Content != oldContent {
+				// User Manually changed the content store an intermediate version
+				_, err = files.CreateVersion(ctx, sessionID, filePath, oldContent)
+				if err != nil {
+					slog.Debug("Error creating file history version", "error", err)
+				}
+			}
+			// Store the new version
+			_, err = files.CreateVersion(ctx, sessionID, filePath, params.Content)
+			if err != nil {
+				slog.Debug("Error creating file history version", "error", err)
+			}
+
+			recordFileWrite(filePath)
+			recordFileRead(filePath)
+
+			notifyLSPs(ctx, lspClients, params.FilePath)
+
+			result := fmt.Sprintf("File successfully written: %s", filePath)
+			result = fmt.Sprintf("<result>\n%s\n</result>", result)
+			result += getDiagnostics(filePath, lspClients)
+			return ai.WithResponseMetadata(ai.NewTextResponse(result),
+				WriteResponseMetadata{
+					Diff:      diff,
+					Additions: additions,
+					Removals:  removals,
+				},
+			), nil
+		})
+}

internal/agent/tools/write.md 🔗

@@ -0,0 +1,31 @@
+Creates or updates files in filesystem for saving/modifying text content.
+
+<usage>
+- Provide file path to write
+- Include content to write to file
+- Tool creates necessary parent directories automatically
+</usage>
+
+<features>
+- Creates new files or overwrites existing ones
+- Auto-creates parent directories if missing
+- Checks if file modified since last read for safety
+- Avoids unnecessary writes when content unchanged
+</features>
+
+<limitations>
+- Read file before writing to avoid conflicts
+- Cannot append (rewrites entire file)
+</limitations>
+
+<cross_platform>
+
+- Use forward slashes (/) for compatibility
+  </cross_platform>
+
+<tips>
+- Use View tool first to examine existing files before modifying
+- Use LS tool to verify location when creating new files
+- Combine with Glob/Grep to find and modify multiple files
+- Include descriptive comments when changing existing code
+</tips>

internal/app/app.go 🔗

@@ -31,6 +31,7 @@ type App struct {
 	History     history.Service
 	Permissions permission.Service
 
+	// TODO: (kujtim) remove this when fully implemented
 	CoderAgent agent.Service
 
 	LSPClients *csync.Map[string, *lsp.Client]

internal/config/config.go 🔗

@@ -57,13 +57,23 @@ type SelectedModel struct {
 	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 
 	// Only used by models that use the openai provider and need this set.
+	// Deprecated: use provider_options instead.
 	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 
-	// Overrides the default model configuration.
-	MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
-
 	// Used by anthropic models that can reason to indicate if the model should think.
+	// Deprecated: use provider_options instead.
 	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
+
+	// Overrides the default model configuration.
+	MaxTokens        int64    `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
+	Temperature      *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
+	TopP             *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
+	TopK             *int64   `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
+	FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
+	PresencePenalty  *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
+
+	// Override provider specific options.
+	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 }
 
 type ProviderConfig struct {
@@ -93,6 +103,9 @@ type ProviderConfig struct {
 
 	// The provider models
 	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
+
+	// Override provider specific options.
+	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 }
 
 type MCPType string

internal/message/content.go 🔗

@@ -2,10 +2,13 @@ package message
 
 import (
 	"encoding/base64"
+	"errors"
 	"slices"
 	"time"
 
 	"github.com/charmbracelet/catwalk/pkg/catwalk"
+	"github.com/charmbracelet/fantasy/ai"
+	"github.com/charmbracelet/fantasy/anthropic"
 )
 
 type MessageRole string
@@ -85,11 +88,12 @@ func (bc BinaryContent) String(p catwalk.InferenceProvider) string {
 func (BinaryContent) isPart() {}
 
 type ToolCall struct {
-	ID       string `json:"id"`
-	Name     string `json:"name"`
-	Input    string `json:"input"`
-	Type     string `json:"type"`
-	Finished bool   `json:"finished"`
+	ID               string `json:"id"`
+	Name             string `json:"name"`
+	Input            string `json:"input"`
+	ProviderExecuted bool   `json:"provider_executed"`
+	Type             string `json:"type"`
+	Finished         bool   `json:"finished"`
 }
 
 func (ToolCall) isPart() {}
@@ -98,6 +102,8 @@ type ToolResult struct {
 	ToolCallID string `json:"tool_call_id"`
 	Name       string `json:"name"`
 	Content    string `json:"content"`
+	Data       string `json:"data"`
+	MIMEType   string `json:"mime_type"`
 	Metadata   string `json:"metadata"`
 	IsError    bool   `json:"is_error"`
 }
@@ -384,3 +390,80 @@ func (m *Message) AddImageURL(url, detail string) {
 func (m *Message) AddBinary(mimeType string, data []byte) {
 	m.Parts = append(m.Parts, BinaryContent{MIMEType: mimeType, Data: data})
 }
+
+func (m *Message) ToAIMessage() []ai.Message {
+	var messages []ai.Message
+	switch m.Role {
+	case User:
+		var parts []ai.MessagePart
+		if m.Content().Text != "" {
+			parts = append(parts, ai.TextPart{Text: m.Content().Text})
+		}
+		for _, content := range m.BinaryContent() {
+			parts = append(parts, ai.FilePart{
+				Filename:  content.Path,
+				Data:      content.Data,
+				MediaType: content.MIMEType,
+			})
+		}
+		messages = append(messages, ai.Message{
+			Role:    ai.MessageRoleUser,
+			Content: parts,
+		})
+	case Assistant:
+		var parts []ai.MessagePart
+		if m.Content().Text != "" {
+			parts = append(parts, ai.TextPart{Text: m.Content().Text})
+		}
+		reasoning := m.ReasoningContent()
+		if reasoning.Thinking != "" {
+			reasoningPart := ai.ReasoningPart{Text: reasoning.Thinking, ProviderOptions: ai.ProviderOptions{}}
+			if reasoning.Signature != "" {
+				reasoningPart.ProviderOptions["anthropic"] = &anthropic.ReasoningOptionMetadata{
+					Signature: reasoning.Signature,
+				}
+			}
+			parts = append(parts, reasoningPart)
+		}
+		for _, call := range m.ToolCalls() {
+			parts = append(parts, ai.ToolCallPart{
+				ToolCallID:       call.ID,
+				ToolName:         call.Name,
+				Input:            call.Input,
+				ProviderExecuted: call.ProviderExecuted,
+			})
+		}
+		messages = append(messages, ai.Message{
+			Role:    ai.MessageRoleAssistant,
+			Content: parts,
+		})
+	case Tool:
+		var parts []ai.MessagePart
+		for _, result := range m.ToolResults() {
+			var content ai.ToolResultOutputContent
+			if result.IsError {
+				content = ai.ToolResultOutputContentError{
+					Error: errors.New(result.Content),
+				}
+			} else if result.Data != "" {
+				content = ai.ToolResultOutputContentMedia{
+					Data:      result.Data,
+					MediaType: result.MIMEType,
+				}
+			} else {
+				content = ai.ToolResultOutputContentText{
+					Text: result.Content,
+				}
+			}
+			parts = append(parts, ai.ToolResultPart{
+				ToolCallID: result.ToolCallID,
+				Output:     content,
+			})
+		}
+		messages = append(messages, ai.Message{
+			Role:    ai.MessageRoleTool,
+			Content: parts,
+		})
+	}
+	return messages
+}