test: add more tests and make it easy to add tests for multiple models

Kujtim Hoxha created

Change summary

go.mod                                                                     |   2 
go.sum                                                                     |   4 
internal/agent/.env.sample                                                 |   2 
internal/agent/agent.go                                                    |   6 
internal/agent/agent_test.go                                               | 236 
internal/agent/coder.go                                                    |   4 
internal/agent/common_test.go                                              | 187 
internal/agent/prompt/prompt.go                                            |  26 
internal/agent/testdata/TestCoderAgent/anthropic-sonnet/read_a_file.yaml   |  69 
internal/agent/testdata/TestCoderAgent/anthropic-sonnet/simple_test.yaml   |  57 
internal/agent/testdata/TestCoderAgent/openai-gpt-5/read_a_file.yaml       |  57 
internal/agent/testdata/TestCoderAgent/openai-gpt-5/simple_test.yaml       |  61 
internal/agent/testdata/TestCoderAgent/openrouter-kimi-k2/read_a_file.yaml |  10 
internal/agent/testdata/TestCoderAgent/openrouter-kimi-k2/simple_test.yaml |  49 
internal/agent/testdata/TestCoderAgent/simple_test.yaml                    |  69 
internal/agent/testdata/TestSessionSimpleAgent.yaml                        | 127 
internal/agent/tools/glob.go                                               |   2 
internal/agent/tools/grep.go                                               |   6 
internal/agent/tools/view.go                                               |   4 
19 files changed, 616 insertions(+), 362 deletions(-)

Detailed changes

go.mod 🔗

@@ -69,6 +69,7 @@ require (
 	github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.4 // indirect
 	github.com/aws/aws-sdk-go-v2/service/sts v1.30.3 // indirect
 	github.com/aws/smithy-go v1.20.3 // indirect
+	github.com/charmbracelet/x/json v0.2.0 // indirect
 	github.com/felixge/httpsnoop v1.0.4 // indirect
 	github.com/go-logr/logr v1.4.3 // indirect
 	github.com/go-logr/stdr v1.2.2 // indirect
@@ -77,6 +78,7 @@ require (
 	github.com/google/s2a-go v0.1.8 // indirect
 	github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
 	github.com/gorilla/websocket v1.5.3 // indirect
+	github.com/openai/openai-go/v2 v2.3.0 // indirect
 	go.opencensus.io v0.24.0 // indirect
 	go.opentelemetry.io/auto/sdk v1.1.0 // indirect
 	go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect

go.sum 🔗

@@ -109,6 +109,8 @@ github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHE
 github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
 github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5 h1:DTSZxdV9qQagD4iGcAt9RgaRBZtJl01bfKgdLzUzUPI=
 github.com/charmbracelet/x/exp/slice v0.0.0-20250904123553-b4e2667e5ad5/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
+github.com/charmbracelet/x/json v0.2.0 h1:DqB+ZGx2h+Z+1s98HOuOyli+i97wsFQIxP2ZQANTPrQ=
+github.com/charmbracelet/x/json v0.2.0/go.mod h1:opFIflx2YgXgi49xVUu8gEQ21teFAxyMwvOiZhIvWNM=
 github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
 github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
 github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
@@ -262,6 +264,8 @@ github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY=
 github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc=
 github.com/openai/openai-go v1.12.0 h1:NBQCnXzqOTv5wsgNC36PrFEiskGfO5wccfCWDo9S1U0=
 github.com/openai/openai-go v1.12.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
+github.com/openai/openai-go/v2 v2.3.0 h1:y9U+V1tlHjvvb/5XIswuySqnG5EnKBFAbMxgBvTHXvg=
+github.com/openai/openai-go/v2 v2.3.0/go.mod h1:sIUkR+Cu/PMUVkSKhkk742PRURkQOCFhiwJ7eRSBqmk=
 github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
 github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
 github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=

internal/agent/agent.go 🔗

@@ -184,6 +184,10 @@ func (a *sessionAgent) Run(ctx context.Context, call SessionAgentCall) (*ai.Agen
 			currentAssistant = &assistantMsg
 
 			prepared.Messages = options.Messages
+			// reset all cached items
+			for i := range prepared.Messages {
+				prepared.Messages[i].ProviderOptions = nil
+			}
 
 			queuedCalls, _ := a.messageQueue.Get(call.SessionID)
 			a.messageQueue.Del(call.SessionID)
@@ -206,7 +210,7 @@ func (a *sessionAgent) Run(ctx context.Context, call SessionAgentCall) (*ai.Agen
 					systemMessageUpdated = true
 				}
 				// than add cache control to the last 2 messages
-				if i > len(msgs)-3 {
+				if i > len(prepared.Messages)-3 {
 					prepared.Messages[i].ProviderOptions = a.getCacheControlOptions()
 				}
 			}

internal/agent/agent_test.go 🔗

@@ -1,175 +1,109 @@
 package agent
 
 import (
-	"net/http"
-	"os"
+	"encoding/json"
+	"fmt"
+	"strings"
 	"testing"
 
-	"github.com/charmbracelet/catwalk/pkg/catwalk"
 	"github.com/charmbracelet/crush/internal/agent/tools"
-	"github.com/charmbracelet/crush/internal/config"
-	"github.com/charmbracelet/crush/internal/csync"
-	"github.com/charmbracelet/crush/internal/db"
-	"github.com/charmbracelet/crush/internal/history"
-	"github.com/charmbracelet/crush/internal/lsp"
 	"github.com/charmbracelet/crush/internal/message"
-	"github.com/charmbracelet/crush/internal/permission"
-	"github.com/charmbracelet/crush/internal/session"
 	"github.com/charmbracelet/fantasy/ai"
-	"github.com/charmbracelet/fantasy/anthropic"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
-	"gopkg.in/dnaeon/go-vcr.v4/pkg/recorder"
 
 	_ "github.com/joho/godotenv/autoload"
 )
 
-type env struct {
-	workingDir  string
-	sessions    session.Service
-	messages    message.Service
-	permissions permission.Service
-	history     history.Service
-	lspClients  *csync.Map[string, *lsp.Client]
+var modelPairs = []modelPair{
+	{"anthropic-sonnet", anthropicBuilder("claude-sonnet-4-5-20250929"), anthropicBuilder("claude-3-5-haiku-20241022")},
+	{"openai-gpt-5", openaiBuilder("gpt-5"), openaiBuilder("gpt-4o")},
+	{"openrouter-kimi-k2", openRouterBuilder("moonshotai/kimi-k2-0905"), openRouterBuilder("qwen/qwen3-next-80b-a3b-instruct")},
 }
 
-type builderFunc func(r *recorder.Recorder) (ai.LanguageModel, error)
-
-func TestSessionAgent(t *testing.T) {
-	t.Run("simple test", func(t *testing.T) {
-		r := newRecorder(t)
-		sonnet, err := anthropicBuilder("claude-sonnet-4-5-20250929")(r)
-		require.NoError(t, err)
-		haiku, err := anthropicBuilder("claude-3-5-haiku-20241022")(r)
-		require.NoError(t, err)
-
-		env := testEnv(t)
-		agent := testSessionAgent(env, sonnet, haiku, "You are a helpful assistant")
-		session, err := env.sessions.Create(t.Context(), "New Session")
-		require.NoError(t, err)
-
-		res, err := agent.Run(t.Context(), SessionAgentCall{
-			Prompt:          "Hello",
-			SessionID:       session.ID,
-			MaxOutputTokens: 10000,
-		})
-
-		require.NoError(t, err)
-		assert.NotNil(t, res)
-
-		t.Run("should create session messages", func(t *testing.T) {
-			msgs, err := env.messages.List(t.Context(), session.ID)
-			require.NoError(t, err)
-			// Should have the agent and user message
-			assert.Equal(t, len(msgs), 2)
-		})
-	})
-}
-
-func TestCoderAgent(t *testing.T) {
-	t.Run("simple test", func(t *testing.T) {
-		r := newRecorder(t)
-		sonnet, err := anthropicBuilder("claude-sonnet-4-5-20250929")(r)
-		require.NoError(t, err)
-		haiku, err := anthropicBuilder("claude-3-5-haiku-20241022")(r)
-		require.NoError(t, err)
-
-		env := testEnv(t)
-		agent, err := coderAgent(env, sonnet, haiku)
-		require.NoError(t, err)
-		session, err := env.sessions.Create(t.Context(), "New Session")
-		require.NoError(t, err)
-
-		res, err := agent.Run(t.Context(), SessionAgentCall{
-			Prompt:          "Hello",
-			SessionID:       session.ID,
-			MaxOutputTokens: 10000,
-		})
-
-		require.NoError(t, err)
-		assert.NotNil(t, res)
-
-		msgs, err := env.messages.List(t.Context(), session.ID)
-		require.NoError(t, err)
-		// Should have the agent and user message
-		assert.Equal(t, len(msgs), 2)
-	})
+func getModels(t *testing.T, pair modelPair) (ai.LanguageModel, ai.LanguageModel) {
+	r := newRecorder(t)
+	large, err := pair.largeModel(t, r)
+	require.NoError(t, err)
+	small, err := pair.smallModel(t, r)
+	require.NoError(t, err)
+	return large, small
 }
 
-func anthropicBuilder(model string) builderFunc {
-	return func(r *recorder.Recorder) (ai.LanguageModel, error) {
-		provider := anthropic.New(
-			anthropic.WithAPIKey(os.Getenv("CRUSH_ANTHROPIC_API_KEY")),
-			anthropic.WithHTTPClient(&http.Client{Transport: r}),
-		)
-		return provider.LanguageModel(model)
-	}
-}
+func setupAgent(t *testing.T, pair modelPair) (SessionAgent, env) {
+	large, small := getModels(t, pair)
+	env := testEnv(t)
 
-func testEnv(t *testing.T) env {
-	workingDir := t.TempDir()
-	conn, err := db.Connect(t.Context(), t.TempDir())
+	createSimpleGoProject(t, env.workingDir)
+	agent, err := coderAgent(env, large, small)
 	require.NoError(t, err)
-	q := db.New(conn)
-	sessions := session.NewService(q)
-	messages := message.NewService(q)
-	permissions := permission.NewPermissionService(workingDir, true, []string{})
-	history := history.NewService(q, conn)
-	lspClients := csync.NewMap[string, *lsp.Client]()
-	return env{
-		workingDir,
-		sessions,
-		messages,
-		permissions,
-		history,
-		lspClients,
-	}
-}
-
-func testSessionAgent(env env, large, small ai.LanguageModel, systemPrompt string, tools ...ai.AgentTool) SessionAgent {
-	largeModel := Model{
-		model:  large,
-		config: catwalk.Model{
-			// todo: add values
-		},
-	}
-	smallModel := Model{
-		model:  small,
-		config: catwalk.Model{
-			// todo: add values
-		},
-	}
-	agent := NewSessionAgent(largeModel, smallModel, systemPrompt, env.sessions, env.messages, tools...)
-	return agent
+	return agent, env
 }
 
-func coderAgent(env env, large, small ai.LanguageModel) (SessionAgent, error) {
-	prompt, err := coderPrompt()
-	if err != nil {
-		return nil, err
-	}
-	cfg, err := config.Init(env.workingDir, "", false)
-	if err != nil {
-		return nil, err
-	}
-
-	systemPrompt, err := prompt.Build(large.Provider(), large.Model(), *cfg)
-	if err != nil {
-		return nil, err
-	}
-	allTools := []ai.AgentTool{
-		tools.NewBashTool(env.permissions, env.workingDir, cfg.Options.Attribution),
-		tools.NewDownloadTool(env.permissions, env.workingDir),
-		tools.NewEditTool(env.lspClients, env.permissions, env.history, env.workingDir),
-		tools.NewMultiEditTool(env.lspClients, env.permissions, env.history, env.workingDir),
-		tools.NewFetchTool(env.permissions, env.workingDir),
-		tools.NewGlobTool(env.workingDir),
-		tools.NewGrepTool(env.workingDir),
-		tools.NewLsTool(env.permissions, env.workingDir),
-		tools.NewSourcegraphTool(),
-		tools.NewViewTool(env.lspClients, env.permissions, env.workingDir),
-		tools.NewWriteTool(env.lspClients, env.permissions, env.history, env.workingDir),
+func TestCoderAgent(t *testing.T) {
+	for _, pair := range modelPairs {
+		t.Run(pair.name, func(t *testing.T) {
+			t.Run("simple test", func(t *testing.T) {
+				agent, env := setupAgent(t, pair)
+
+				session, err := env.sessions.Create(t.Context(), "New Session")
+				require.NoError(t, err)
+
+				res, err := agent.Run(t.Context(), SessionAgentCall{
+					Prompt:          "Hello",
+					SessionID:       session.ID,
+					MaxOutputTokens: 10000,
+				})
+				require.NoError(t, err)
+				assert.NotNil(t, res)
+
+				msgs, err := env.messages.List(t.Context(), session.ID)
+				require.NoError(t, err)
+				// Should have the agent and user message
+				assert.Equal(t, len(msgs), 2)
+			})
+			t.Run("read a file", func(t *testing.T) {
+				agent, env := setupAgent(t, pair)
+
+				session, err := env.sessions.Create(t.Context(), "New Session")
+				require.NoError(t, err)
+				res, err := agent.Run(t.Context(), SessionAgentCall{
+					Prompt:          "Read the go mod",
+					SessionID:       session.ID,
+					MaxOutputTokens: 10000,
+				})
+
+				require.NoError(t, err)
+				assert.NotNil(t, res)
+
+				msgs, err := env.messages.List(t.Context(), session.ID)
+				require.NoError(t, err)
+				foundFile := false
+				var tcID string
+			out:
+				for _, msg := range msgs {
+					data, _ := json.Marshal(msg)
+					fmt.Println(string(data))
+					if msg.Role == message.Assistant {
+						for _, tc := range msg.ToolCalls() {
+							if tc.Name == tools.ViewToolName {
+								tcID = tc.ID
+							}
+						}
+					}
+					if msg.Role == message.Tool {
+						for _, tr := range msg.ToolResults() {
+							if tr.ToolCallID == tcID {
+								if strings.Contains(tr.Content, "module example.com/testproject") {
+									foundFile = true
+									break out
+								}
+							}
+						}
+					}
+				}
+				require.True(t, foundFile)
+			})
+		})
 	}
-
-	return testSessionAgent(env, large, small, systemPrompt, allTools...), nil
 }

internal/agent/coder.go 🔗

@@ -9,8 +9,8 @@ import (
 //go:embed templates/coder.gotmpl
 var coderPromptTmpl []byte
 
-func coderPrompt() (*prompt.Prompt, error) {
-	systemPrompt, err := prompt.NewPrompt("coder", string(coderPromptTmpl))
+func coderPrompt(opts ...prompt.Option) (*prompt.Prompt, error) {
+	systemPrompt, err := prompt.NewPrompt("coder", string(coderPromptTmpl), opts...)
 	if err != nil {
 		return nil, err
 	}

internal/agent/common_test.go 🔗

@@ -0,0 +1,187 @@
+package agent
+
+import (
+	"fmt"
+	"net/http"
+	"os"
+	"path/filepath"
+	"testing"
+	"time"
+
+	"github.com/charmbracelet/catwalk/pkg/catwalk"
+	"github.com/charmbracelet/crush/internal/agent/prompt"
+	"github.com/charmbracelet/crush/internal/agent/tools"
+	"github.com/charmbracelet/crush/internal/config"
+	"github.com/charmbracelet/crush/internal/csync"
+	"github.com/charmbracelet/crush/internal/db"
+	"github.com/charmbracelet/crush/internal/history"
+	"github.com/charmbracelet/crush/internal/lsp"
+	"github.com/charmbracelet/crush/internal/message"
+	"github.com/charmbracelet/crush/internal/permission"
+	"github.com/charmbracelet/crush/internal/session"
+	"github.com/charmbracelet/fantasy/ai"
+	"github.com/charmbracelet/fantasy/anthropic"
+	"github.com/charmbracelet/fantasy/openai"
+	"github.com/charmbracelet/fantasy/openrouter"
+	"github.com/stretchr/testify/require"
+	"gopkg.in/dnaeon/go-vcr.v4/pkg/recorder"
+
+	_ "github.com/joho/godotenv/autoload"
+)
+
+type env struct {
+	workingDir  string
+	sessions    session.Service
+	messages    message.Service
+	permissions permission.Service
+	history     history.Service
+	lspClients  *csync.Map[string, *lsp.Client]
+}
+
+type builderFunc func(t *testing.T, r *recorder.Recorder) (ai.LanguageModel, error)
+
+type modelPair struct {
+	name       string
+	largeModel builderFunc
+	smallModel builderFunc
+}
+
+func anthropicBuilder(model string) builderFunc {
+	return func(_ *testing.T, r *recorder.Recorder) (ai.LanguageModel, error) {
+		provider := anthropic.New(
+			anthropic.WithAPIKey(os.Getenv("CRUSH_ANTHROPIC_API_KEY")),
+			anthropic.WithHTTPClient(&http.Client{Transport: r}),
+		)
+		return provider.LanguageModel(model)
+	}
+}
+
+func openaiBuilder(model string) builderFunc {
+	return func(_ *testing.T, r *recorder.Recorder) (ai.LanguageModel, error) {
+		provider := openai.New(
+			openai.WithAPIKey(os.Getenv("CRUSH_OPENAI_API_KEY")),
+			openai.WithHTTPClient(&http.Client{Transport: r}),
+		)
+		return provider.LanguageModel(model)
+	}
+}
+
+func openRouterBuilder(model string) builderFunc {
+	return func(t *testing.T, r *recorder.Recorder) (ai.LanguageModel, error) {
+		tf := func() func() string {
+			id := 0
+			return func() string {
+				id += 1
+				return fmt.Sprintf("%s-%d", t.Name(), id)
+			}
+		}
+		provider := openrouter.New(
+			openrouter.WithAPIKey(os.Getenv("CRUSH_OPENROUTER_API_KEY")),
+			openrouter.WithHTTPClient(&http.Client{Transport: r}),
+			openrouter.WithLanguageUniqueToolCallIds(),
+			openrouter.WithLanguageModelGenerateIDFunc(tf()),
+		)
+		return provider.LanguageModel(model)
+	}
+}
+
+func testEnv(t *testing.T) env {
+	testDir := filepath.Join("/tmp/crush-test/", t.Name())
+	os.RemoveAll(testDir)
+	err := os.MkdirAll(testDir, 0o755)
+	t.Cleanup(func() {
+		os.RemoveAll(testDir)
+	})
+	require.NoError(t, err)
+	workingDir := testDir
+	conn, err := db.Connect(t.Context(), t.TempDir())
+	require.NoError(t, err)
+	q := db.New(conn)
+	sessions := session.NewService(q)
+	messages := message.NewService(q)
+	permissions := permission.NewPermissionService(workingDir, true, []string{})
+	history := history.NewService(q, conn)
+	lspClients := csync.NewMap[string, *lsp.Client]()
+	return env{
+		workingDir,
+		sessions,
+		messages,
+		permissions,
+		history,
+		lspClients,
+	}
+}
+
+func testSessionAgent(env env, large, small ai.LanguageModel, systemPrompt string, tools ...ai.AgentTool) SessionAgent {
+	largeModel := Model{
+		model:  large,
+		config: catwalk.Model{
+			// todo: add values
+		},
+	}
+	smallModel := Model{
+		model:  small,
+		config: catwalk.Model{
+			// todo: add values
+		},
+	}
+	agent := NewSessionAgent(largeModel, smallModel, systemPrompt, env.sessions, env.messages, tools...)
+	return agent
+}
+
+func coderAgent(env env, large, small ai.LanguageModel) (SessionAgent, error) {
+	fixedTime := func() time.Time {
+		t, _ := time.Parse("1/2/2006", "1/1/2025")
+		return t
+	}
+	prompt, err := coderPrompt(prompt.WithTimeFunc(fixedTime))
+	if err != nil {
+		return nil, err
+	}
+	cfg, err := config.Init(env.workingDir, "", false)
+	if err != nil {
+		return nil, err
+	}
+
+	systemPrompt, err := prompt.Build(large.Provider(), large.Model(), *cfg)
+	if err != nil {
+		return nil, err
+	}
+	allTools := []ai.AgentTool{
+		tools.NewBashTool(env.permissions, env.workingDir, cfg.Options.Attribution),
+		tools.NewDownloadTool(env.permissions, env.workingDir),
+		tools.NewEditTool(env.lspClients, env.permissions, env.history, env.workingDir),
+		tools.NewMultiEditTool(env.lspClients, env.permissions, env.history, env.workingDir),
+		tools.NewFetchTool(env.permissions, env.workingDir),
+		tools.NewGlobTool(env.workingDir),
+		tools.NewGrepTool(env.workingDir),
+		tools.NewLsTool(env.permissions, env.workingDir),
+		tools.NewSourcegraphTool(),
+		tools.NewViewTool(env.lspClients, env.permissions, env.workingDir),
+		tools.NewWriteTool(env.lspClients, env.permissions, env.history, env.workingDir),
+	}
+
+	return testSessionAgent(env, large, small, systemPrompt, allTools...), nil
+}
+
+// createSimpleGoProject creates a simple Go project structure in the given directory.
+// It creates a go.mod file and a main.go file with a basic hello world program.
+func createSimpleGoProject(t *testing.T, dir string) {
+	goMod := `module example.com/testproject
+
+go 1.23
+`
+	err := os.WriteFile(dir+"/go.mod", []byte(goMod), 0o644)
+	require.NoError(t, err)
+
+	mainGo := `package main
+
+import "fmt"
+
+func main() {
+	fmt.Println("Hello, World!")
+}
+`
+	err = os.WriteFile(dir+"/main.go", []byte(mainGo), 0o644)
+	require.NoError(t, err)
+}

internal/agent/prompt/prompt.go 🔗

@@ -17,6 +17,7 @@ import (
 type Prompt struct {
 	name     string
 	template string
+	now      func() time.Time
 }
 
 type PromptDat struct {
@@ -34,11 +35,24 @@ type ContextFile struct {
 	Content string
 }
 
-func NewPrompt(name, promptTemplate string) (*Prompt, error) {
-	return &Prompt{
+type Option func(*Prompt)
+
+func WithTimeFunc(fn func() time.Time) Option {
+	return func(p *Prompt) {
+		p.now = fn
+	}
+}
+
+func NewPrompt(name, promptTemplate string, opts ...Option) (*Prompt, error) {
+	p := &Prompt{
 		name:     name,
 		template: promptTemplate,
-	}, nil
+		now:      time.Now,
+	}
+	for _, opt := range opts {
+		opt(p)
+	}
+	return p, nil
 }
 
 func (p *Prompt) Build(provider, model string, cfg config.Config) (string, error) {
@@ -47,7 +61,7 @@ func (p *Prompt) Build(provider, model string, cfg config.Config) (string, error
 		return "", fmt.Errorf("parsing template: %w", err)
 	}
 	var sb strings.Builder
-	if err := t.Execute(&sb, promptData(provider, model, cfg)); err != nil {
+	if err := t.Execute(&sb, p.promptData(provider, model, cfg)); err != nil {
 		return "", fmt.Errorf("executing template: %w", err)
 	}
 
@@ -118,7 +132,7 @@ func expandPath(path string, cfg config.Config) string {
 	return path
 }
 
-func promptData(provider, model string, cfg config.Config) PromptDat {
+func (p *Prompt) promptData(provider, model string, cfg config.Config) PromptDat {
 	return PromptDat{
 		Provider:   provider,
 		Model:      model,
@@ -126,7 +140,7 @@ func promptData(provider, model string, cfg config.Config) PromptDat {
 		WorkingDir: cfg.WorkingDir(),
 		IsGitRepo:  isGitRepo(cfg.WorkingDir()),
 		Platform:   runtime.GOOS,
-		Date:       time.Now().Format("1/2/2006"),
+		Date:       p.now().Format("1/2/2006"),
 	}
 }
 

internal/agent/testdata/TestCoderAgent/anthropic-sonnet/read_a_file.yaml 🔗

@@ -0,0 +1,177 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 630
+    host: ""
+    body: '{"max_tokens":40,"messages":[{"content":[{"text":"Generate a concise title for the following content:\n\nRead the go mod","type":"text"}],"role":"user"}],"model":"claude-3-5-haiku-20241022","system":[{"text":"you will generate a short title based on the first message a user begins a conversation with\n\n- ensure it is not more than 50 characters long\n- the title should be a summary of the user''s message\n- it should be one line long\n- do not use quotes or colons\n- the entire text you return will be used as the title\n- never return anything that is more than one sentence (one line) long\n","type":"text"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - Anthropic/Go 1.12.0
+    url: https://api.anthropic.com/v1/messages
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      event: message_start
+      data: {"type":"message_start","message":{"id":"msg_01E1ouMmiDmjBTmxBzKbijL9","type":"message","role":"assistant","model":"claude-3-5-haiku-20241022","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":111,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":1,"service_tier":"standard"}}           }
+
+      event: content_block_start
+      data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} }
+
+      event: content_block_delta
+      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Review"}}
+
+      event: content_block_delta
+      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Go"}            }
+
+      event: ping
+      data: {"type": "ping"}
+
+      event: content_block_delta
+      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Module"}             }
+
+      event: content_block_delta
+      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Dependencies"}         }
+
+      event: content_block_stop
+      data: {"type":"content_block_stop","index":0           }
+
+      event: message_delta
+      data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":111,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":7}             }
+
+      event: message_stop
+      data: {"type":"message_stop"          }
+
+    headers:
+      Content-Type:
+      - text/event-stream; charset=utf-8
+    status: 200 OK
+    code: 200
+    duration: 581.664125ms
+- id: 1
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 31913
+    host: ""

internal/agent/testdata/TestCoderAgent/anthropic-sonnet/simple_test.yaml 🔗

@@ -0,0 +1,109 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 620
+    host: ""
+    body: '{"max_tokens":40,"messages":[{"content":[{"text":"Generate a concise title for the following content:\n\nHello","type":"text"}],"role":"user"}],"model":"claude-3-5-haiku-20241022","system":[{"text":"you will generate a short title based on the first message a user begins a conversation with\n\n- ensure it is not more than 50 characters long\n- the title should be a summary of the user''s message\n- it should be one line long\n- do not use quotes or colons\n- the entire text you return will be used as the title\n- never return anything that is more than one sentence (one line) long\n","type":"text"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - Anthropic/Go 1.12.0
+    url: https://api.anthropic.com/v1/messages
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      event: message_start
+      data: {"type":"message_start","message":{"id":"msg_01EA1yHe2hGWqG98PVSSf5s8","type":"message","role":"assistant","model":"claude-3-5-haiku-20241022","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":108,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":2,"service_tier":"standard"}} }
+
+      event: content_block_start
+      data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}           }
+
+      event: content_block_delta
+      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Greeting"}               }
+
+      event: content_block_stop
+      data: {"type":"content_block_stop","index":0    }
+
+      event: message_delta
+      data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":108,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":5}              }
+
+      event: message_stop
+      data: {"type":"message_stop"      }
+
+    headers:
+      Content-Type:
+      - text/event-stream; charset=utf-8
+    status: 200 OK
+    code: 200
+    duration: 642.344708ms
+- id: 1
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 31903
+    host: ""

internal/agent/testdata/TestCoderAgent/openai-gpt-5/read_a_file.yaml 🔗

@@ -0,0 +1,273 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 620
+    host: ""
+    body: '{"messages":[{"content":"you will generate a short title based on the first message a user begins a conversation with\n\n- ensure it is not more than 50 characters long\n- the title should be a summary of the user''s message\n- it should be one line long\n- do not use quotes or colons\n- the entire text you return will be used as the title\n- never return anything that is more than one sentence (one line) long\n","role":"system"},{"content":"Generate a concise title for the following content:\n\nRead the go mod","role":"user"}],"model":"gpt-4o","max_tokens":40,"stream_options":{"include_usage":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.3.0
+    url: https://api.openai.com/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"1sl8OkqH6sCvFZ"}
+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[{"index":0,"delta":{"content":"Understanding"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"l13"}
+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[{"index":0,"delta":{"content":" the"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"1Vo2490BdRxb"}
+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[{"index":0,"delta":{"content":" Go"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"aBaqSx7ueCbH0"}
+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[{"index":0,"delta":{"content":" Module"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"jT2j800IA"}
+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[{"index":0,"delta":{"content":" File"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"xq2s7jupXFD"}
+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null,"obfuscation":"XAJrr1rC3o"}
+
+      data: {"id":"chatcmpl-CLnydQKroNZFqVZIZi19ouB0wEbNO","object":"chat.completion.chunk","created":1759313035,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_f33640a400","choices":[],"usage":{"prompt_tokens":112,"completion_tokens":5,"total_tokens":117,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}},"obfuscation":"4dZAIDY5siwB84"}
+
+      data: [DONE]
+
+    headers:
+      Content-Type:
+      - text/event-stream; charset=utf-8
+    status: 200 OK
+    code: 200
+    duration: 694.339416ms
+- id: 1
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 30533
+    host: ""

internal/agent/testdata/TestCoderAgent/openai-gpt-5/simple_test.yaml 🔗

@@ -0,0 +1,105 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 610
+    host: ""
+    body: '{"messages":[{"content":"you will generate a short title based on the first message a user begins a conversation with\n\n- ensure it is not more than 50 characters long\n- the title should be a summary of the user''s message\n- it should be one line long\n- do not use quotes or colons\n- the entire text you return will be used as the title\n- never return anything that is more than one sentence (one line) long\n","role":"system"},{"content":"Generate a concise title for the following content:\n\nHello","role":"user"}],"model":"gpt-4o","max_tokens":40,"stream_options":{"include_usage":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.3.0
+    url: https://api.openai.com/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"FQ2C1HyimwsSIJ"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"content":"User"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"LIC7mLokOZ9E"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"content":" Gre"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"r7SBrJb3ou5J"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"content":"ets"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"zUr2uM9B9K3ob"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"content":" with"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"csC49xwKSOL"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"U2SfAZTiySGrTd"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"content":" Simple"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"kF2mXsyQE"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{"content":" Hello"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"2Ffvui00sv"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null,"obfuscation":"wj1wg1XKPv"}
+
+      data: {"id":"chatcmpl-CLnyZUC3HECSNiCyPUyFNYk3jkToU","object":"chat.completion.chunk","created":1759313031,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_cbf1785567","choices":[],"usage":{"prompt_tokens":109,"completion_tokens":7,"total_tokens":116,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}},"obfuscation":"hss1b1Nj1zXerq"}
+
+      data: [DONE]
+
+    headers:
+      Content-Type:
+      - text/event-stream; charset=utf-8
+    status: 200 OK
+    code: 200
+    duration: 1.173388541s
+- id: 1
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 30523
+    host: ""

internal/agent/testdata/TestCoderAgent/openrouter-kimi-k2/simple_test.yaml 🔗

@@ -0,0 +1,83 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 661
+    host: ""
+    body: '{"messages":[{"content":"you will generate a short title based on the first message a user begins a conversation with\n\n- ensure it is not more than 50 characters long\n- the title should be a summary of the user''s message\n- it should be one line long\n- do not use quotes or colons\n- the entire text you return will be used as the title\n- never return anything that is more than one sentence (one line) long\n","role":"system"},{"content":"Generate a concise title for the following content:\n\nHello","role":"user"}],"model":"qwen/qwen3-next-80b-a3b-instruct","max_tokens":40,"stream_options":{"include_usage":true},"usage":{"include":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.3.0
+    url: https://openrouter.ai/api/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen-1759316452-zjAj0sEKiIFFAKMkb9ry","provider":"Chutes","model":"qwen/qwen3-next-80b-a3b-instruct","object":"chat.completion.chunk","created":1759316452,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]}
+
+      data: {"id":"gen-1759316452-zjAj0sEKiIFFAKMkb9ry","provider":"Chutes","model":"qwen/qwen3-next-80b-a3b-instruct","object":"chat.completion.chunk","created":1759316452,"choices":[{"index":0,"delta":{"role":"assistant","content":"Hello"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]}
+
+      data: {"id":"gen-1759316452-zjAj0sEKiIFFAKMkb9ry","provider":"Chutes","model":"qwen/qwen3-next-80b-a3b-instruct","object":"chat.completion.chunk","created":1759316452,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop","native_finish_reason":"stop","logprobs":null}]}
+
+      data: {"id":"gen-1759316452-zjAj0sEKiIFFAKMkb9ry","provider":"Chutes","model":"qwen/qwen3-next-80b-a3b-instruct","object":"chat.completion.chunk","created":1759316452,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"usage":{"prompt_tokens":113,"completion_tokens":2,"total_tokens":115,"cost":0.0000129,"is_byok":false,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"cost_details":{"upstream_inference_cost":null,"upstream_inference_prompt_cost":0.0000113,"upstream_inference_completions_cost":0.0000016},"completion_tokens_details":{"reasoning_tokens":0,"image_tokens":0}}}
+
+      data: [DONE]
+
+    headers:
+      Content-Type:
+      - text/event-stream
+    status: 200 OK
+    code: 200
+    duration: 1.246887958s
+- id: 1
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 30561
+    host: ""

internal/agent/testdata/TestCoderAgent/simple_test.yaml 🔗

@@ -1,121 +0,0 @@
----
-version: 2
-interactions:
-- id: 0
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 620
-    host: ""
-    body: '{"max_tokens":40,"messages":[{"content":[{"text":"Generate a concise title for the following content:\n\nHello","type":"text"}],"role":"user"}],"model":"claude-3-5-haiku-20241022","system":[{"text":"you will generate a short title based on the first message a user begins a conversation with\n\n- ensure it is not more than 50 characters long\n- the title should be a summary of the user''s message\n- it should be one line long\n- do not use quotes or colons\n- the entire text you return will be used as the title\n- never return anything that is more than one sentence (one line) long\n","type":"text"}],"stream":true}'
-    headers:
-      Accept:
-      - application/json
-      Content-Type:
-      - application/json
-      User-Agent:
-      - Anthropic/Go 1.12.0
-    url: https://api.anthropic.com/v1/messages
-    method: POST
-  response:
-    proto: HTTP/2.0
-    proto_major: 2
-    proto_minor: 0
-    content_length: -1
-    body: |+
-      event: message_start
-      data: {"type":"message_start","message":{"id":"msg_01VNJNnqH4TvCQ2YLgKUQrRT","type":"message","role":"assistant","model":"claude-3-5-haiku-20241022","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":108,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":1,"service_tier":"standard"}}}
-
-      event: content_block_start
-      data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}              }
-
-      event: content_block_delta
-      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"First"}      }
-
-      event: content_block_delta
-      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Contact"}}
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: content_block_stop
-      data: {"type":"content_block_stop","index":0       }
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: message_delta
-      data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":108,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":5}            }
-
-      event: message_stop
-      data: {"type":"message_stop"               }
-
-    headers:
-      Content-Type:
-      - text/event-stream; charset=utf-8
-    status: 200 OK
-    code: 200
-    duration: 619.065458ms
-- id: 1
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 31989
-    host: ""

internal/agent/testdata/TestSessionSimpleAgent.yaml 🔗

@@ -1,127 +0,0 @@
----
-version: 2
-interactions:
-- id: 0
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 620
-    host: ""
-    body: '{"max_tokens":40,"messages":[{"content":[{"text":"Generate a concise title for the following content:\n\nHello","type":"text"}],"role":"user"}],"model":"claude-3-5-haiku-20241022","system":[{"text":"you will generate a short title based on the first message a user begins a conversation with\n\n- ensure it is not more than 50 characters long\n- the title should be a summary of the user''s message\n- it should be one line long\n- do not use quotes or colons\n- the entire text you return will be used as the title\n- never return anything that is more than one sentence (one line) long\n","type":"text"}],"stream":true}'
-    headers:
-      Accept:
-      - application/json
-      Content-Type:
-      - application/json
-      User-Agent:
-      - Anthropic/Go 1.12.0
-    url: https://api.anthropic.com/v1/messages
-    method: POST
-  response:
-    proto: HTTP/2.0
-    proto_major: 2
-    proto_minor: 0
-    content_length: -1
-    body: |+
-      event: message_start
-      data: {"type":"message_start","message":{"id":"msg_011Y36fsS8dYbr8fbjbwBHLA","type":"message","role":"assistant","model":"claude-3-5-haiku-20241022","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":108,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":1,"service_tier":"standard"}}        }
-
-      event: content_block_start
-      data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}      }
-
-      event: content_block_delta
-      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"New"} }
-
-      event: content_block_delta
-      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Conversation"}           }
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: content_block_delta
-      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" Starter"}             }
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: content_block_stop
-      data: {"type":"content_block_stop","index":0               }
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: message_delta
-      data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":108,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":8}       }
-
-      event: message_stop
-      data: {"type":"message_stop" }
-
-    headers:
-      Content-Type:
-      - text/event-stream; charset=utf-8
-    status: 200 OK
-    code: 200
-    duration: 583.077125ms
-- id: 1
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 281
-    host: ""
-    body: '{"max_tokens":10000,"messages":[{"content":[{"text":"Hello","cache_control":{"type":"ephemeral"},"type":"text"}],"role":"user"}],"model":"claude-sonnet-4-5-20250929","system":[{"text":"You are a helpful assistant","cache_control":{"type":"ephemeral"},"type":"text"}],"stream":true}'
-    headers:
-      Accept:
-      - application/json
-      Content-Type:
-      - application/json
-      User-Agent:
-      - Anthropic/Go 1.12.0
-    url: https://api.anthropic.com/v1/messages
-    method: POST
-  response:
-    proto: HTTP/2.0
-    proto_major: 2
-    proto_minor: 0
-    content_length: -1
-    body: |+
-      event: message_start
-      data: {"type":"message_start","message":{"id":"msg_01VPFYRaiH21mFLcGUeUk3Gv","type":"message","role":"assistant","model":"claude-sonnet-4-5-20250929","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":13,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":3,"service_tier":"standard"}}  }
-
-      event: content_block_start
-      data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}           }
-
-      event: content_block_delta
-      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello! How"}            }
-
-      event: content_block_delta
-      data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" can I help you today?"}         }
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: content_block_stop
-      data: {"type":"content_block_stop","index":0     }
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: ping
-      data: {"type": "ping"}
-
-      event: message_delta
-      data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":13,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":12}      }
-
-      event: message_stop
-      data: {"type":"message_stop"     }
-
-    headers:
-      Content-Type:
-      - text/event-stream; charset=utf-8
-    status: 200 OK
-    code: 200
-    duration: 1.658454625s

internal/agent/tools/glob.go 🔗

@@ -22,7 +22,7 @@ var globDescription []byte
 
 type GlobParams struct {
 	Pattern string `json:"pattern" description:"The glob pattern to match files against"`
-	Path    string `json:"path" description:"The directory to search in. Defaults to the current working directory."`
+	Path    string `json:"path,omitempty" description:"The directory to search in. Defaults to the current working directory."`
 }
 
 type GlobResponseMetadata struct {

internal/agent/tools/grep.go 🔗

@@ -72,9 +72,9 @@ var (
 
 type GrepParams struct {
 	Pattern     string `json:"pattern" description:"The regex pattern to search for in file contents"`
-	Path        string `json:"path" description:"The directory to search in. Defaults to the current working directory."`
-	Include     string `json:"include" description:"File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")"`
-	LiteralText bool   `json:"literal_text" description:"If true, the pattern will be treated as literal text with special regex characters escaped. Default is false."`
+	Path        string `json:"path,omitempty" description:"The directory to search in. Defaults to the current working directory."`
+	Include     string `json:"include,omitempty" description:"File pattern to include in the search (e.g. \"*.js\", \"*.{ts,tsx}\")"`
+	LiteralText bool   `json:"literal_text,omitempty" description:"If true, the pattern will be treated as literal text with special regex characters escaped. Default is false."`
 }
 
 type grepMatch struct {

internal/agent/tools/view.go 🔗

@@ -22,8 +22,8 @@ var viewDescription []byte
 
 type ViewParams struct {
 	FilePath string `json:"file_path" description:"The path to the file to read"`
-	Offset   int    `json:"offset" description:"The line number to start reading from (0-based)"`
-	Limit    int    `json:"limit" description:"The number of lines to read (defaults to 2000)"`
+	Offset   int    `json:"offset,omitempty" description:"The line number to start reading from (0-based)"`
+	Limit    int    `json:"limit,omitempty" description:"The number of lines to read (defaults to 2000)"`
 }
 
 type ViewPermissionsParams struct {