feat(tools): add crush_logs tool for reading application logs

Christian Rocha created

Implements a tool that reads Crush's own log files and returns
formatted entries in compact text format. Supports:

- Tail-style reading (last N entries, default 50, max 100)
- Sensitive value redaction (token, secret, api_key, etc.)
- JSON value formatting with proper escaping
- Chronological ordering (oldest first)
- Graceful handling of missing/empty files and malformed lines

Approximate overhead:

• Tool definition cost (always in prompt): ~1,000 bytes ≈ 250 tokens
• Per invocation response (50 lines): ~4,000 bytes ≈ 1,000 tokens
• Per invocation response (100 lines): ~8,000 bytes ≈ 2,000 tokens

💖 Generated with Crush

Assisted-by: Kimi-K2.5 via Crush <crush@charm.land>

Change summary

internal/agent/coordinator.go           |   4 
internal/agent/tools/crush_logs.go      | 496 +++++++++++++++++++++++++
internal/agent/tools/crush_logs.md      |  13 
internal/agent/tools/crush_logs_test.go | 530 +++++++++++++++++++++++++++
internal/config/config.go               |   1 
internal/config/load_test.go            |   4 
6 files changed, 1,046 insertions(+), 2 deletions(-)

Detailed changes

internal/agent/coordinator.go 🔗

@@ -12,6 +12,7 @@ import (
 	"maps"
 	"net/http"
 	"os"
+	"path/filepath"
 	"slices"
 	"strings"
 
@@ -445,9 +446,12 @@ func (c *coordinator) buildTools(ctx context.Context, agent config.Agent) ([]fan
 		}
 	}
 
+	logFile := filepath.Join(c.cfg.Config().Options.DataDirectory, "logs", "crush.log")
+
 	allTools = append(allTools,
 		tools.NewBashTool(c.permissions, c.cfg.WorkingDir(), c.cfg.Config().Options.Attribution, modelName),
 		tools.NewCrushInfoTool(c.cfg, c.lspManager),
+		tools.NewCrushLogsTool(logFile),
 		tools.NewJobOutputTool(),
 		tools.NewJobKillTool(),
 		tools.NewDownloadTool(c.permissions, c.cfg.WorkingDir(), nil),

internal/agent/tools/crush_logs.go 🔗

@@ -0,0 +1,496 @@
+package tools
+
+import (
+	"context"
+	_ "embed"
+	"encoding/json"
+	"fmt"
+	"io"
+	"os"
+	"path/filepath"
+	"sort"
+	"strconv"
+	"strings"
+	"time"
+
+	"charm.land/fantasy"
+)
+
+const CrushLogsToolName = "crush_logs"
+
+//go:embed crush_logs.md
+var crushLogsDescription []byte
+
+// Max line size to prevent memory issues with very long log lines (1 MB).
+const maxLogLineSize = 1024 * 1024
+
+// Default and max line limits.
+const (
+	defaultLogLines = 50
+	maxLogLines     = 100
+)
+
+// Reserved fields that should not appear as extra key=value pairs.
+// Case-insensitive matching is used.
+var reservedFields = map[string]bool{
+	"time":   true,
+	"level":  true,
+	"source": true,
+	"msg":    true,
+}
+
+// Sensitive field keys that should be redacted (matched case-insensitively).
+var sensitiveKeys = []string{
+	"authorization",
+	"api-key",
+	"api_key",
+	"apikey",
+	"token",
+	"secret",
+	"password",
+	"credential",
+}
+
+type CrushLogsParams struct {
+	Lines int `json:"lines,omitempty" description:"Number of recent log entries to return (default 50, max 100)"`
+}
+
+func NewCrushLogsTool(logFile string) fantasy.AgentTool {
+	return fantasy.NewAgentTool(
+		CrushLogsToolName,
+		string(crushLogsDescription),
+		func(ctx context.Context, params CrushLogsParams, call fantasy.ToolCall) (fantasy.ToolResponse, error) {
+			result := runCrushLogs(logFile, params)
+			return fantasy.NewTextResponse(result), nil
+		},
+	)
+}
+
+// runCrushLogs reads and formats the last N log entries from the given file.
+func runCrushLogs(logFile string, params CrushLogsParams) string {
+	// Validate and clamp the lines parameter.
+	lines := params.Lines
+	if lines <= 0 {
+		lines = defaultLogLines
+	}
+	if lines > maxLogLines {
+		lines = maxLogLines
+	}
+
+	// Check if file exists.
+	info, err := os.Stat(logFile)
+	if err != nil {
+		if os.IsNotExist(err) {
+			return "No log file found"
+		}
+		return fmt.Sprintf("Error accessing log file: %v", err)
+	}
+
+	if info.Size() == 0 {
+		return "Log file is empty"
+	}
+
+	// Read the last N lines from the log file.
+	logEntries, err := readLastLines(logFile, lines)
+	if err != nil {
+		return fmt.Sprintf("Error reading log file: %v", err)
+	}
+
+	if len(logEntries) == 0 {
+		return "Log file is empty"
+	}
+
+	// Format and return the entries.
+	formatted := formatLogEntries(logEntries)
+	return strings.Join(formatted, "\n")
+}
+
+// readLastLines reads the last n lines from a file by seeking to the end and
+// scanning backwards. Lines exceeding maxLogLineSize are skipped.
+func readLastLines(filePath string, n int) ([]map[string]any, error) {
+	file, err := os.Open(filePath)
+	if err != nil {
+		return nil, err
+	}
+	defer file.Close()
+
+	stat, err := file.Stat()
+	if err != nil {
+		return nil, err
+	}
+
+	if stat.Size() == 0 {
+		return nil, nil
+	}
+
+	// Seek to end and read chunks backwards.
+	var entries []map[string]any
+	const chunkSize = 8192 // 8KB chunks
+
+	pos := stat.Size()
+	var remainder []byte
+
+	for pos > 0 && len(entries) < n {
+		chunkStart := max(pos-chunkSize, 0)
+
+		chunkLen := int(pos - chunkStart)
+		if chunkLen == 0 {
+			break
+		}
+
+		_, err := file.Seek(chunkStart, 0)
+		if err != nil {
+			return nil, err
+		}
+
+		chunk := make([]byte, chunkLen)
+		_, err = io.ReadFull(file, chunk)
+		if err != nil {
+			return nil, err
+		}
+
+		// Combine with remainder from previous (earlier) chunk.
+		data := append(chunk, remainder...)
+
+		// Split into lines (without the final incomplete line if any).
+		lines := splitLines(data)
+
+		// Keep the incomplete line for next iteration.
+		if len(data) > 0 && data[len(data)-1] != '\n' {
+			remainder = lines[len(lines)-1]
+			lines = lines[:len(lines)-1]
+		} else {
+			remainder = nil
+		}
+
+		// Parse lines from end to start to get most recent first.
+		for i := len(lines) - 1; i >= 0; i-- {
+			if len(lines[i]) > maxLogLineSize {
+				// Skip oversized lines silently.
+				continue
+			}
+
+			// Try to parse as JSON.
+			var entry map[string]any
+			if err := json.Unmarshal(lines[i], &entry); err != nil {
+				// Skip malformed lines silently.
+				continue
+			}
+
+			entries = append(entries, entry)
+			if len(entries) >= n {
+				break
+			}
+		}
+
+		pos = chunkStart
+	}
+
+	// Handle final remainder.
+	if len(remainder) > 0 && len(remainder) <= maxLogLineSize {
+		var entry map[string]any
+		if err := json.Unmarshal(remainder, &entry); err == nil {
+			if len(entries) < n {
+				entries = append(entries, entry)
+			}
+		}
+	}
+
+	// Reverse to get chronological order (oldest first).
+	for i, j := 0, len(entries)-1; i < j; i, j = i+1, j-1 {
+		entries[i], entries[j] = entries[j], entries[i]
+	}
+
+	return entries, nil
+}
+
+// splitLines splits data into lines without allocating strings.
+func splitLines(data []byte) [][]byte {
+	var lines [][]byte
+	start := 0
+	for i := range len(data) {
+		if data[i] == '\n' {
+			lines = append(lines, data[start:i])
+			start = i + 1
+		}
+	}
+	if start < len(data) {
+		lines = append(lines, data[start:])
+	}
+	return lines
+}
+
+// formatLogEntries formats log entries into compact text format.
+func formatLogEntries(entries []map[string]any) []string {
+	var result []string
+	for _, entry := range entries {
+		result = append(result, formatLogEntry(entry))
+	}
+	return result
+}
+
+// formatLogEntry formats a single log entry into compact text format:
+// TIMESTAMP LEVEL SOURCE:LINE MESSAGE key=value...
+func formatLogEntry(entry map[string]any) string {
+	var parts []string
+
+	// Extract and format timestamp (time-only, no date).
+	timeStr := extractTime(entry)
+	parts = append(parts, timeStr)
+
+	// Extract level.
+	level := extractLevel(entry)
+	parts = append(parts, level)
+
+	// Extract source.
+	source := extractSource(entry)
+	parts = append(parts, source)
+
+	// Extract message.
+	msg := extractMessage(entry)
+
+	// Collect extra fields (excluding reserved fields).
+	extraFields := extractExtraFields(entry)
+
+	// Build the output.
+	var b strings.Builder
+	for i, part := range parts {
+		if i > 0 {
+			b.WriteByte(' ')
+		}
+		b.WriteString(part)
+	}
+	b.WriteByte(' ')
+	b.WriteString(msg)
+
+	// Append sorted key=value pairs.
+	if len(extraFields) > 0 {
+		keys := make([]string, 0, len(extraFields))
+		for k := range extraFields {
+			keys = append(keys, k)
+		}
+		sort.Strings(keys)
+
+		for _, k := range keys {
+			b.WriteByte(' ')
+			b.WriteString(k)
+			b.WriteByte('=')
+			b.WriteString(formatValue(extraFields[k], k))
+		}
+	}
+
+	return b.String()
+}
+
+// extractTime extracts and formats the timestamp from a log entry.
+// Returns time-only format (15:04:05).
+func extractTime(entry map[string]any) string {
+	timeVal, ok := entry["time"]
+	if !ok {
+		return "--:--:--"
+	}
+
+	timeStr, ok := timeVal.(string)
+	if !ok {
+		return "--:--:--"
+	}
+
+	// Parse RFC3339 format.
+	t, err := time.Parse(time.RFC3339, timeStr)
+	if err != nil {
+		// Try other common formats.
+		t, err = time.Parse("2006-01-02T15:04:05", timeStr)
+		if err != nil {
+			return "--:--:--"
+		}
+	}
+
+	return t.Format("15:04:05")
+}
+
+// extractLevel extracts and normalizes the log level.
+func extractLevel(entry map[string]any) string {
+	levelVal, ok := entry["level"]
+	if !ok {
+		return "INFO"
+	}
+
+	levelStr, ok := levelVal.(string)
+	if !ok {
+		return "INFO"
+	}
+
+	switch strings.ToUpper(levelStr) {
+	case "DEBUG":
+		return "DEBUG"
+	case "INFO":
+		return "INFO"
+	case "WARN", "WARNING":
+		return "WARN"
+	case "ERROR":
+		return "ERROR"
+	default:
+		return "INFO"
+	}
+}
+
+// extractSource extracts the source file and line from a log entry.
+func extractSource(entry map[string]any) string {
+	sourceVal, ok := entry["source"]
+	if !ok {
+		return "unknown:0"
+	}
+
+	// Source can be a string or an object with "file" and "line".
+	switch s := sourceVal.(type) {
+	case string:
+		return filepath.Base(s)
+	case map[string]any:
+		fileVal, ok := s["file"].(string)
+		if !ok {
+			return "unknown:0"
+		}
+		fileVal = filepath.Base(fileVal)
+
+		lineNum := 0
+		if lineVal, ok := s["line"]; ok {
+			switch l := lineVal.(type) {
+			case float64:
+				lineNum = int(l)
+			case int:
+				lineNum = l
+			case json.Number:
+				if n, err := l.Int64(); err == nil {
+					lineNum = int(n)
+				}
+			}
+		}
+		return fmt.Sprintf("%s:%d", fileVal, lineNum)
+	default:
+		return "unknown:0"
+	}
+}
+
+// extractMessage extracts the log message.
+func extractMessage(entry map[string]any) string {
+	msgVal, ok := entry["msg"]
+	if !ok {
+		return ""
+	}
+
+	if msgStr, ok := msgVal.(string); ok {
+		return msgStr
+	}
+
+	return fmt.Sprintf("%v", msgVal)
+}
+
+// extractExtraFields extracts all non-reserved fields from a log entry.
+func extractExtraFields(entry map[string]any) map[string]any {
+	result := make(map[string]any)
+	for k, v := range entry {
+		// Skip reserved fields (case-insensitive).
+		if isReservedField(k) {
+			continue
+		}
+		// Redact sensitive values.
+		if isSensitiveKey(k) {
+			result[k] = "[REDACTED]"
+		} else {
+			result[k] = v
+		}
+	}
+	return result
+}
+
+// isReservedField checks if a field name is reserved (case-insensitive).
+func isReservedField(name string) bool {
+	lowerName := strings.ToLower(name)
+	return reservedFields[lowerName]
+}
+
+// isSensitiveKey checks if a key contains sensitive information (case-insensitive).
+func isSensitiveKey(name string) bool {
+	lowerName := strings.ToLower(name)
+	for _, sensitive := range sensitiveKeys {
+		if strings.Contains(lowerName, sensitive) {
+			return true
+		}
+	}
+	return false
+}
+
+// formatValue formats a value according to the quoting rules.
+func formatValue(value any, key string) string {
+	// Redact sensitive values (second check for safety).
+	if isSensitiveKey(key) {
+		return "[REDACTED]"
+	}
+
+	switch v := value.(type) {
+	case string:
+		return formatStringValue(v)
+	case float64:
+		// Check if it's actually an integer.
+		if v == float64(int64(v)) {
+			return strconv.FormatInt(int64(v), 10)
+		}
+		return strconv.FormatFloat(v, 'f', -1, 64)
+	case int:
+		return strconv.Itoa(v)
+	case int64:
+		return strconv.FormatInt(v, 10)
+	case bool:
+		return strconv.FormatBool(v)
+	case nil:
+		return "null"
+	case map[string]any, []any:
+		// Objects and arrays are JSON-encoded and quoted.
+		jsonBytes, err := json.Marshal(v)
+		if err != nil {
+			return quoteString(fmt.Sprintf("%v", v))
+		}
+		return quoteString(string(jsonBytes))
+	default:
+		return quoteString(fmt.Sprintf("%v", v))
+	}
+}
+
+// formatStringValue formats a string value with quoting if needed.
+func formatStringValue(s string) string {
+	// Quote if empty, contains spaces, =, newlines, or special chars.
+	needsQuote := len(s) == 0 ||
+		strings.ContainsAny(s, " =\n\r\t\"") ||
+		strings.Contains(s, "\\")
+
+	if !needsQuote {
+		return s
+	}
+
+	return quoteString(s)
+}
+
+// quoteString quotes a string with double quotes and escapes special characters.
+func quoteString(s string) string {
+	var b strings.Builder
+	b.WriteByte('"')
+	for i := 0; i < len(s); i++ {
+		c := s[i]
+		switch c {
+		case '"':
+			b.WriteString("\\\"")
+		case '\\':
+			b.WriteString("\\\\")
+		case '\n':
+			b.WriteString("\\n")
+		case '\r':
+			b.WriteString("\\r")
+		case '\t':
+			b.WriteString("\\t")
+		default:
+			b.WriteByte(c)
+		}
+	}
+	b.WriteByte('"')
+	return b.String()
+}

internal/agent/tools/crush_logs.md 🔗

@@ -0,0 +1,13 @@
+Read Crush's own application logs.
+
+<usage>
+- Returns recent log entries from Crush's internal log file
+- Use to diagnose issues with Crush itself (provider errors, tool failures,
+  LSP problems, MCP connection issues)
+- Entries shown in compact format: TIME LEVEL SOURCE MESSAGE key=value...
+</usage>
+
+<tips>
+- Default returns last 50 entries; use lines parameter for more (max 100)
+- Look for ERROR and WARN entries first when diagnosing problems
+</tips>

internal/agent/tools/crush_logs_test.go 🔗

@@ -0,0 +1,530 @@
+package tools
+
+import (
+	"encoding/json"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/require"
+)
+
+// createTestLogFile creates a temporary log file with the given entries.
+func createTestLogFile(t *testing.T, entries []map[string]any) string {
+	t.Helper()
+	tempDir := t.TempDir()
+	logFile := filepath.Join(tempDir, "crush.log")
+
+	file, err := os.Create(logFile)
+	require.NoError(t, err)
+	defer file.Close()
+
+	for _, entry := range entries {
+		line, err := json.Marshal(entry)
+		require.NoError(t, err)
+		_, err = file.WriteString(string(line) + "\n")
+		require.NoError(t, err)
+	}
+
+	return logFile
+}
+
+// makeLogEntry creates a standard log entry for testing.
+func makeLogEntry(level, msg, source string, line int, extra map[string]any) map[string]any {
+	entry := map[string]any{
+		"time":  time.Date(2024, 1, 15, 10, 30, 0, 0, time.UTC).Format(time.RFC3339),
+		"level": level,
+		"msg":   msg,
+		"source": map[string]any{
+			"file": source,
+			"line": line,
+		},
+	}
+	for k, v := range extra {
+		entry[k] = v
+	}
+	return entry
+}
+
+func TestNewCrushLogsTool(t *testing.T) {
+	t.Parallel()
+	tool := NewCrushLogsTool("/tmp/test.log")
+	require.NotNil(t, tool)
+	require.Equal(t, CrushLogsToolName, tool.Info().Name)
+}
+
+func TestCrushLogs_HappyPath(t *testing.T) {
+	t.Parallel()
+	entries := []map[string]any{
+		makeLogEntry("INFO", "Application started", "app.go", 42, map[string]any{"version": "1.0.0"}),
+		makeLogEntry("DEBUG", "Processing request", "handler.go", 100, map[string]any{"request_id": "abc123"}),
+		makeLogEntry("ERROR", "Database connection failed", "db.go", 55, map[string]any{"retry_count": 3}),
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 3})
+
+	lines := strings.Split(result, "\n")
+	require.Len(t, lines, 3)
+
+	// Verify format: TIMESTAMP LEVEL SOURCE:LINE MESSAGE
+	require.Contains(t, lines[0], "INFO")
+	require.Contains(t, lines[0], "app.go:42")
+	require.Contains(t, lines[0], "Application started")
+	require.Contains(t, lines[0], "version=1.0.0")
+
+	require.Contains(t, lines[1], "DEBUG")
+	require.Contains(t, lines[1], "handler.go:100")
+
+	require.Contains(t, lines[2], "ERROR")
+	require.Contains(t, lines[2], "db.go:55")
+}
+
+func TestCrushLogs_DefaultLines(t *testing.T) {
+	t.Parallel()
+	// Create 100 log entries.
+	var entries []map[string]any
+	for i := 0; i < 100; i++ {
+		entries = append(entries, makeLogEntry("INFO", fmt.Sprintf("Entry %d", i), "app.go", i, nil))
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	// Call with Lines: 0 should default to 50.
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 0})
+
+	lines := strings.Split(result, "\n")
+	require.Len(t, lines, 50)
+
+	// Verify we got the last 50 entries (entry 50-99).
+	require.Contains(t, lines[0], "Entry 50")
+	require.Contains(t, lines[49], "Entry 99")
+}
+
+func TestCrushLogs_MaxCap(t *testing.T) {
+	t.Parallel()
+	// Create 200 log entries.
+	var entries []map[string]any
+	for i := 0; i < 200; i++ {
+		entries = append(entries, makeLogEntry("INFO", fmt.Sprintf("Entry %d", i), "app.go", i, nil))
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	// Request 200 lines, but should only get 100 (max cap).
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 200})
+
+	lines := strings.Split(result, "\n")
+	require.Len(t, lines, 100)
+}
+
+func TestCrushLogs_MissingFile(t *testing.T) {
+	t.Parallel()
+	result := runCrushLogs("/nonexistent/path/crush.log", CrushLogsParams{Lines: 50})
+	require.Contains(t, result, "No log file found")
+}
+
+func TestCrushLogs_EmptyFile(t *testing.T) {
+	t.Parallel()
+	tempDir := t.TempDir()
+	logFile := filepath.Join(tempDir, "crush.log")
+	_, err := os.Create(logFile)
+	require.NoError(t, err)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 50})
+	require.Contains(t, result, "Log file is empty")
+}
+
+func TestCrushLogs_MalformedLines(t *testing.T) {
+	t.Parallel()
+	tempDir := t.TempDir()
+	logFile := filepath.Join(tempDir, "crush.log")
+
+	file, err := os.Create(logFile)
+	require.NoError(t, err)
+
+	// Write some valid and some invalid lines.
+	validEntry := makeLogEntry("INFO", "Valid entry", "app.go", 1, nil)
+	line, _ := json.Marshal(validEntry)
+	file.WriteString(string(line) + "\n")
+	file.WriteString("this is not json\n")
+	file.WriteString(`{"incomplete": "json` + "\n")
+
+	validEntry2 := makeLogEntry("INFO", "Another valid entry", "app.go", 2, nil)
+	line2, _ := json.Marshal(validEntry2)
+	file.WriteString(string(line2) + "\n")
+
+	file.Close()
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 10})
+
+	lines := strings.Split(result, "\n")
+	// Only 2 valid lines should be returned.
+	require.Len(t, lines, 2)
+	require.Contains(t, lines[0], "Valid entry")
+	require.Contains(t, lines[1], "Another valid entry")
+}
+
+func TestCrushLogs_ExtraFieldsSorted(t *testing.T) {
+	t.Parallel()
+	entries := []map[string]any{
+		makeLogEntry("INFO", "Test message", "app.go", 1, map[string]any{
+			"z_field": "last",
+			"a_field": "first",
+			"m_field": "middle",
+		}),
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 1})
+
+	// Fields should be sorted alphabetically.
+	idxA := strings.Index(result, "a_field=first")
+	idxM := strings.Index(result, "m_field=middle")
+	idxZ := strings.Index(result, "z_field=last")
+
+	require.True(t, idxA < idxM, "a_field should come before m_field")
+	require.True(t, idxM < idxZ, "m_field should come before z_field")
+}
+
+func TestCrushLogs_NonStringValues(t *testing.T) {
+	t.Parallel()
+	entry := map[string]any{
+		"time":   time.Now().Format(time.RFC3339),
+		"level":  "INFO",
+		"msg":    "Test values",
+		"source": map[string]any{"file": "app.go", "line": 1},
+		"count":  42,
+		"ratio":  3.14,
+		"active": true,
+		"data":   nil,
+		"obj":    map[string]any{"key": "value"},
+		"arr":    []any{1, 2, 3},
+	}
+
+	logFile := createTestLogFile(t, []map[string]any{entry})
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 1})
+
+	// Numbers should be bare (not quoted).
+	require.Contains(t, result, "count=42")
+	require.Contains(t, result, "ratio=3.14")
+
+	// Booleans should be bare.
+	require.Contains(t, result, "active=true")
+
+	// Null should be bare.
+	require.Contains(t, result, "data=null")
+
+	// Objects and arrays should be JSON-encoded and quoted.
+	require.Contains(t, result, `obj="{`)
+	require.Contains(t, result, `arr="[`)
+}
+
+func TestCrushLogs_Redaction(t *testing.T) {
+	t.Parallel()
+	entries := []map[string]any{
+		makeLogEntry("INFO", "API call", "api.go", 10, map[string]any{
+			"authorization":  "Bearer secret123",
+			"api_key":        "my-api-key",
+			"api-key":        "my-api-key-2",
+			"apikey":         "myapikey",
+			"token":          "mytoken",
+			"secret":         "mysecret",
+			"password":       "mypassword",
+			"credential":     "mycred",
+			"Authorization":  "Bearer secret456",
+			"API_KEY":        "uppercase",
+			"my_token_value": "nestedtoken",
+		}),
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 1})
+
+	// All sensitive fields should be redacted.
+	require.Contains(t, result, "authorization=[REDACTED]")
+	require.Contains(t, result, "api_key=[REDACTED]")
+	require.Contains(t, result, "api-key=[REDACTED]")
+	require.Contains(t, result, "apikey=[REDACTED]")
+	require.Contains(t, result, "token=[REDACTED]")
+	require.Contains(t, result, "secret=[REDACTED]")
+	require.Contains(t, result, "password=[REDACTED]")
+	require.Contains(t, result, "credential=[REDACTED]")
+	require.Contains(t, result, "Authorization=[REDACTED]")
+	require.Contains(t, result, "API_KEY=[REDACTED]")
+	require.Contains(t, result, "my_token_value=[REDACTED]")
+
+	// Original sensitive values should not appear.
+	require.NotContains(t, result, "secret123")
+	require.NotContains(t, result, "my-api-key")
+	require.NotContains(t, result, "mytoken")
+}
+
+func TestCrushLogs_ReservedFields(t *testing.T) {
+	t.Parallel()
+	entries := []map[string]any{
+		{
+			"time":   time.Now().Format(time.RFC3339),
+			"level":  "INFO",
+			"msg":    "Test",
+			"source": map[string]any{"file": "app.go", "line": 1},
+			"Time":   "should be reserved",
+			"LEVEL":  "should be reserved",
+			"Msg":    "should be reserved",
+			"SOURCE": "should be reserved",
+			"extra":  "should appear",
+		},
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 1})
+
+	// Reserved fields (case-insensitive) should not appear in extra fields.
+	require.NotContains(t, result, "Time=")
+	require.NotContains(t, result, "LEVEL=")
+	require.NotContains(t, result, "Msg=")
+	require.NotContains(t, result, "SOURCE=")
+	require.NotContains(t, result, "time=")  // The extra time field
+	require.NotContains(t, result, "level=") // The extra level field
+
+	// Non-reserved field should appear (quoted since it has spaces).
+	require.Contains(t, result, `extra="should appear"`)
+}
+
+func TestCrushLogs_OversizedLines(t *testing.T) {
+	t.Parallel()
+	tempDir := t.TempDir()
+	logFile := filepath.Join(tempDir, "crush.log")
+
+	file, err := os.Create(logFile)
+	require.NoError(t, err)
+
+	// Create a valid entry first.
+	validEntry := makeLogEntry("INFO", "Valid entry", "app.go", 1, nil)
+	line, _ := json.Marshal(validEntry)
+	file.WriteString(string(line) + "\n")
+
+	// Create an oversized line (more than 1 MB).
+	bigValue := strings.Repeat("x", maxLogLineSize+1000)
+	bigEntry := map[string]any{
+		"time":   time.Now().Format(time.RFC3339),
+		"level":  "INFO",
+		"msg":    "Big message",
+		"source": map[string]any{"file": "big.go", "line": 1},
+		"data":   bigValue,
+	}
+	bigLine, _ := json.Marshal(bigEntry)
+	file.WriteString(string(bigLine) + "\n")
+
+	// Create another valid entry.
+	validEntry2 := makeLogEntry("INFO", "Second valid entry", "app.go", 2, nil)
+	line2, _ := json.Marshal(validEntry2)
+	file.WriteString(string(line2) + "\n")
+
+	file.Close()
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 10})
+
+	lines := strings.Split(result, "\n")
+
+	// Only the 2 valid entries should be returned (oversized one skipped).
+	require.Len(t, lines, 2)
+	require.Contains(t, lines[0], "Valid entry")
+	require.Contains(t, lines[1], "Second valid entry")
+}
+
+func TestCrushLogs_PartialTrailingLine(t *testing.T) {
+	t.Parallel()
+	tempDir := t.TempDir()
+	logFile := filepath.Join(tempDir, "crush.log")
+
+	file, err := os.Create(logFile)
+	require.NoError(t, err)
+
+	// Create valid entries.
+	for i := 0; i < 5; i++ {
+		entry := makeLogEntry("INFO", fmt.Sprintf("Entry %d", i), "app.go", i, nil)
+		line, _ := json.Marshal(entry)
+		file.WriteString(string(line) + "\n")
+	}
+
+	// Write a partial/truncated line (no closing brace or newline).
+	file.WriteString(`{"time": "2024-01-15T10:00:00Z", "level": "INFO", "msg": "Truncated`)
+	file.Close()
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 10})
+
+	lines := strings.Split(result, "\n")
+
+	// Should get the 5 valid entries, truncated line is skipped.
+	require.Len(t, lines, 5)
+	for i, line := range lines {
+		require.Contains(t, line, fmt.Sprintf("Entry %d", i))
+	}
+}
+
+func TestCrushLogs_ValueQuoting(t *testing.T) {
+	t.Parallel()
+	entries := []map[string]any{
+		makeLogEntry("INFO", "Test", "app.go", 1, map[string]any{
+			"empty":          "",
+			"with_spaces":    "hello world",
+			"with_equals":    "a=b",
+			"with_newline":   "line1\nline2",
+			"with_quote":     `say "hello"`,
+			"with_backslash": "path\\to\\file",
+			"normal":         "simplevalue",
+		}),
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 1})
+
+	// Empty strings should be quoted.
+	require.Contains(t, result, `empty=""`)
+
+	// Strings with spaces should be quoted.
+	require.Contains(t, result, `with_spaces="hello world"`)
+
+	// Strings with = should be quoted.
+	require.Contains(t, result, `with_equals="a=b"`)
+
+	// Strings with newlines should escape them.
+	require.Contains(t, result, `with_newline="line1\nline2"`)
+
+	// Strings with quotes should escape them.
+	require.Contains(t, result, `with_quote="say \"hello\""`)
+
+	// Strings with backslashes should escape them.
+	require.Contains(t, result, `with_backslash="path\\to\\file"`)
+
+	// Normal strings without special chars should be bare.
+	require.Contains(t, result, "normal=simplevalue")
+}
+
+func TestCrushLogs_ChronologicalOrder(t *testing.T) {
+	t.Parallel()
+	// Create entries with different timestamps.
+	baseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
+	entries := []map[string]any{
+		{
+			"time":   baseTime.Add(0 * time.Second).Format(time.RFC3339),
+			"level":  "INFO",
+			"msg":    "First",
+			"source": map[string]any{"file": "app.go", "line": 1},
+		},
+		{
+			"time":   baseTime.Add(1 * time.Second).Format(time.RFC3339),
+			"level":  "INFO",
+			"msg":    "Second",
+			"source": map[string]any{"file": "app.go", "line": 2},
+		},
+		{
+			"time":   baseTime.Add(2 * time.Second).Format(time.RFC3339),
+			"level":  "INFO",
+			"msg":    "Third",
+			"source": map[string]any{"file": "app.go", "line": 3},
+		},
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 3})
+
+	lines := strings.Split(result, "\n")
+
+	// Verify chronological order (oldest first).
+	require.Len(t, lines, 3)
+	require.Contains(t, lines[0], "First")
+	require.Contains(t, lines[1], "Second")
+	require.Contains(t, lines[2], "Third")
+}
+
+func TestCrushLogs_TimeOnlyFormat(t *testing.T) {
+	t.Parallel()
+	entry := map[string]any{
+		"time":   "2024-01-15T15:04:05Z",
+		"level":  "INFO",
+		"msg":    "Test",
+		"source": map[string]any{"file": "app.go", "line": 1},
+	}
+
+	logFile := createTestLogFile(t, []map[string]any{entry})
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 1})
+
+	// Should show time-only format.
+	require.True(t, strings.HasPrefix(result, "15:04:05"), "Expected time-only format, got: %s", result)
+}
+
+func TestCrushLogs_LevelVariations(t *testing.T) {
+	t.Parallel()
+	entries := []map[string]any{
+		makeLogEntry("DEBUG", "Debug message", "app.go", 1, nil),
+		makeLogEntry("INFO", "Info message", "app.go", 2, nil),
+		makeLogEntry("WARN", "Warn message", "app.go", 3, nil),
+		makeLogEntry("WARNING", "Warning message", "app.go", 4, nil),
+		makeLogEntry("ERROR", "Error message", "app.go", 5, nil),
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 5})
+
+	lines := strings.Split(result, "\n")
+	require.Len(t, lines, 5)
+
+	// Check level normalization.
+	require.Contains(t, lines[0], "DEBUG")
+	require.Contains(t, lines[1], "INFO")
+	require.Contains(t, lines[2], "WARN")
+	require.Contains(t, lines[3], "WARN") // WARNING -> WARN
+	require.Contains(t, lines[4], "ERROR")
+}
+
+func TestCrushLogs_SourceVariations(t *testing.T) {
+	t.Parallel()
+	entries := []map[string]any{
+		// Source as object with file and line.
+		{
+			"time":   time.Now().Format(time.RFC3339),
+			"level":  "INFO",
+			"msg":    "Object source",
+			"source": map[string]any{"file": "/path/to/app.go", "line": 42},
+		},
+		// Source as string.
+		{
+			"time":   time.Now().Format(time.RFC3339),
+			"level":  "INFO",
+			"msg":    "String source",
+			"source": "/path/to/handler.go:100",
+		},
+		// Missing source.
+		{
+			"time":  time.Now().Format(time.RFC3339),
+			"level": "INFO",
+			"msg":   "No source",
+		},
+	}
+
+	logFile := createTestLogFile(t, entries)
+
+	result := runCrushLogs(logFile, CrushLogsParams{Lines: 3})
+
+	lines := strings.Split(result, "\n")
+	require.Len(t, lines, 3)
+
+	// Check source formatting (should use basename only).
+	require.Contains(t, lines[0], "app.go:42")
+	require.Contains(t, lines[1], "handler.go") // String source gets basename too
+	require.Contains(t, lines[2], "unknown:0")  // Missing source
+}

internal/config/config.go 🔗

@@ -463,6 +463,7 @@ func allToolNames() []string {
 		"agent",
 		"bash",
 		"crush_info",
+		"crush_logs",
 		"job_output",
 		"job_kill",
 		"download",

internal/config/load_test.go 🔗

@@ -490,7 +490,7 @@ func TestConfig_setupAgentsWithDisabledTools(t *testing.T) {
 	coderAgent, ok := cfg.Agents[AgentCoder]
 	require.True(t, ok)
 
-	assert.Equal(t, []string{"agent", "bash", "crush_info", "job_output", "job_kill", "multiedit", "lsp_diagnostics", "lsp_references", "lsp_restart", "fetch", "agentic_fetch", "glob", "ls", "sourcegraph", "todos", "view", "write", "list_mcp_resources", "read_mcp_resource"}, coderAgent.AllowedTools)
+	assert.Equal(t, []string{"agent", "bash", "crush_info", "crush_logs", "job_output", "job_kill", "multiedit", "lsp_diagnostics", "lsp_references", "lsp_restart", "fetch", "agentic_fetch", "glob", "ls", "sourcegraph", "todos", "view", "write", "list_mcp_resources", "read_mcp_resource"}, coderAgent.AllowedTools)
 
 	taskAgent, ok := cfg.Agents[AgentTask]
 	require.True(t, ok)
@@ -513,7 +513,7 @@ func TestConfig_setupAgentsWithEveryReadOnlyToolDisabled(t *testing.T) {
 	cfg.SetupAgents()
 	coderAgent, ok := cfg.Agents[AgentCoder]
 	require.True(t, ok)
-	assert.Equal(t, []string{"agent", "bash", "crush_info", "job_output", "job_kill", "download", "edit", "multiedit", "lsp_diagnostics", "lsp_references", "lsp_restart", "fetch", "agentic_fetch", "todos", "write", "list_mcp_resources", "read_mcp_resource"}, coderAgent.AllowedTools)
+	assert.Equal(t, []string{"agent", "bash", "crush_info", "crush_logs", "job_output", "job_kill", "download", "edit", "multiedit", "lsp_diagnostics", "lsp_references", "lsp_restart", "fetch", "agentic_fetch", "todos", "write", "list_mcp_resources", "read_mcp_resource"}, coderAgent.AllowedTools)
 
 	taskAgent, ok := cfg.Agents[AgentTask]
 	require.True(t, ok)