crush_logs.go

  1package tools
  2
  3import (
  4	"context"
  5	_ "embed"
  6	"encoding/json"
  7	"fmt"
  8	"io"
  9	"os"
 10	"path/filepath"
 11	"sort"
 12	"strconv"
 13	"strings"
 14	"time"
 15
 16	"charm.land/fantasy"
 17)
 18
 19const CrushLogsToolName = "crush_logs"
 20
 21//go:embed crush_logs.md
 22var crushLogsDescription []byte
 23
 24// Max line size to prevent memory issues with very long log lines (1 MB).
 25const maxLogLineSize = 1024 * 1024
 26
 27// Default and max line limits.
 28const (
 29	defaultLogLines = 50
 30	maxLogLines     = 100
 31)
 32
 33// Reserved fields that should not appear as extra key=value pairs.
 34// Case-insensitive matching is used.
 35var reservedFields = map[string]bool{
 36	"time":   true,
 37	"level":  true,
 38	"source": true,
 39	"msg":    true,
 40}
 41
 42// Sensitive field keys that should be redacted (matched case-insensitively).
 43var sensitiveKeys = []string{
 44	"authorization",
 45	"api-key",
 46	"api_key",
 47	"apikey",
 48	"token",
 49	"secret",
 50	"password",
 51	"credential",
 52}
 53
 54type CrushLogsParams struct {
 55	Lines int `json:"lines,omitempty" description:"Number of recent log entries to return (default 50, max 100)"`
 56}
 57
 58func NewCrushLogsTool(logFile string) fantasy.AgentTool {
 59	return fantasy.NewAgentTool(
 60		CrushLogsToolName,
 61		string(crushLogsDescription),
 62		func(ctx context.Context, params CrushLogsParams, call fantasy.ToolCall) (fantasy.ToolResponse, error) {
 63			result := runCrushLogs(logFile, params)
 64			return fantasy.NewTextResponse(result), nil
 65		},
 66	)
 67}
 68
 69// runCrushLogs reads and formats the last N log entries from the given file.
 70func runCrushLogs(logFile string, params CrushLogsParams) string {
 71	// Validate and clamp the lines parameter.
 72	lines := params.Lines
 73	if lines <= 0 {
 74		lines = defaultLogLines
 75	}
 76	if lines > maxLogLines {
 77		lines = maxLogLines
 78	}
 79
 80	// Check if file exists.
 81	info, err := os.Stat(logFile)
 82	if err != nil {
 83		if os.IsNotExist(err) {
 84			return "No log file found"
 85		}
 86		return fmt.Sprintf("Error accessing log file: %v", err)
 87	}
 88
 89	if info.Size() == 0 {
 90		return "Log file is empty"
 91	}
 92
 93	// Read the last N lines from the log file.
 94	logEntries, err := readLastLines(logFile, lines)
 95	if err != nil {
 96		return fmt.Sprintf("Error reading log file: %v", err)
 97	}
 98
 99	if len(logEntries) == 0 {
100		return "Log file is empty"
101	}
102
103	// Format and return the entries.
104	formatted := formatLogEntries(logEntries)
105	return strings.Join(formatted, "\n")
106}
107
108// readLastLines reads the last n lines from a file by seeking to the end and
109// scanning backwards. Lines exceeding maxLogLineSize are skipped.
110func readLastLines(filePath string, n int) ([]map[string]any, error) {
111	file, err := os.Open(filePath)
112	if err != nil {
113		return nil, err
114	}
115	defer file.Close()
116
117	stat, err := file.Stat()
118	if err != nil {
119		return nil, err
120	}
121
122	if stat.Size() == 0 {
123		return nil, nil
124	}
125
126	// Seek to end and read chunks backwards.
127	var entries []map[string]any
128	const chunkSize = 8192 // 8KB chunks
129
130	pos := stat.Size()
131	var remainder []byte
132
133	for pos > 0 && len(entries) < n {
134		chunkStart := max(pos-chunkSize, 0)
135
136		chunkLen := int(pos - chunkStart)
137		if chunkLen == 0 {
138			break
139		}
140
141		_, err := file.Seek(chunkStart, 0)
142		if err != nil {
143			return nil, err
144		}
145
146		chunk := make([]byte, chunkLen)
147		_, err = io.ReadFull(file, chunk)
148		if err != nil {
149			return nil, err
150		}
151
152		// Combine with remainder from previous (earlier) chunk.
153		data := append(chunk, remainder...)
154
155		// Split into lines (without the final incomplete line if any).
156		lines := splitLines(data)
157
158		// Keep the incomplete line for next iteration.
159		if len(data) > 0 && data[len(data)-1] != '\n' {
160			remainder = lines[len(lines)-1]
161			lines = lines[:len(lines)-1]
162		} else {
163			remainder = nil
164		}
165
166		// Parse lines from end to start to get most recent first.
167		for i := len(lines) - 1; i >= 0; i-- {
168			if len(lines[i]) > maxLogLineSize {
169				// Skip oversized lines silently.
170				continue
171			}
172
173			// Try to parse as JSON.
174			var entry map[string]any
175			if err := json.Unmarshal(lines[i], &entry); err != nil {
176				// Skip malformed lines silently.
177				continue
178			}
179
180			entries = append(entries, entry)
181			if len(entries) >= n {
182				break
183			}
184		}
185
186		pos = chunkStart
187	}
188
189	// Handle final remainder.
190	if len(remainder) > 0 && len(remainder) <= maxLogLineSize {
191		var entry map[string]any
192		if err := json.Unmarshal(remainder, &entry); err == nil {
193			if len(entries) < n {
194				entries = append(entries, entry)
195			}
196		}
197	}
198
199	// Reverse to get chronological order (oldest first).
200	for i, j := 0, len(entries)-1; i < j; i, j = i+1, j-1 {
201		entries[i], entries[j] = entries[j], entries[i]
202	}
203
204	return entries, nil
205}
206
207// splitLines splits data into lines without allocating strings.
208func splitLines(data []byte) [][]byte {
209	var lines [][]byte
210	start := 0
211	for i := range len(data) {
212		if data[i] == '\n' {
213			lines = append(lines, data[start:i])
214			start = i + 1
215		}
216	}
217	if start < len(data) {
218		lines = append(lines, data[start:])
219	}
220	return lines
221}
222
223// formatLogEntries formats log entries into compact text format.
224func formatLogEntries(entries []map[string]any) []string {
225	var result []string
226	for _, entry := range entries {
227		result = append(result, formatLogEntry(entry))
228	}
229	return result
230}
231
232// formatLogEntry formats a single log entry into compact text format:
233// TIMESTAMP LEVEL SOURCE:LINE MESSAGE key=value...
234func formatLogEntry(entry map[string]any) string {
235	var parts []string
236
237	// Extract and format timestamp (time-only, no date).
238	timeStr := extractTime(entry)
239	parts = append(parts, timeStr)
240
241	// Extract level.
242	level := extractLevel(entry)
243	parts = append(parts, level)
244
245	// Extract source.
246	source := extractSource(entry)
247	parts = append(parts, source)
248
249	// Extract message.
250	msg := extractMessage(entry)
251
252	// Collect extra fields (excluding reserved fields).
253	extraFields := extractExtraFields(entry)
254
255	// Build the output.
256	var b strings.Builder
257	for i, part := range parts {
258		if i > 0 {
259			b.WriteByte(' ')
260		}
261		b.WriteString(part)
262	}
263	b.WriteByte(' ')
264	b.WriteString(msg)
265
266	// Append sorted key=value pairs.
267	if len(extraFields) > 0 {
268		keys := make([]string, 0, len(extraFields))
269		for k := range extraFields {
270			keys = append(keys, k)
271		}
272		sort.Strings(keys)
273
274		for _, k := range keys {
275			b.WriteByte(' ')
276			b.WriteString(k)
277			b.WriteByte('=')
278			b.WriteString(formatValue(extraFields[k], k))
279		}
280	}
281
282	return b.String()
283}
284
285// extractTime extracts and formats the timestamp from a log entry.
286// Returns time-only format (15:04:05).
287func extractTime(entry map[string]any) string {
288	timeVal, ok := entry["time"]
289	if !ok {
290		return "--:--:--"
291	}
292
293	timeStr, ok := timeVal.(string)
294	if !ok {
295		return "--:--:--"
296	}
297
298	// Parse RFC3339 format.
299	t, err := time.Parse(time.RFC3339, timeStr)
300	if err != nil {
301		// Try other common formats.
302		t, err = time.Parse("2006-01-02T15:04:05", timeStr)
303		if err != nil {
304			return "--:--:--"
305		}
306	}
307
308	return t.Format("15:04:05")
309}
310
311// extractLevel extracts and normalizes the log level.
312func extractLevel(entry map[string]any) string {
313	levelVal, ok := entry["level"]
314	if !ok {
315		return "INFO"
316	}
317
318	levelStr, ok := levelVal.(string)
319	if !ok {
320		return "INFO"
321	}
322
323	switch strings.ToUpper(levelStr) {
324	case "DEBUG":
325		return "DEBUG"
326	case "INFO":
327		return "INFO"
328	case "WARN", "WARNING":
329		return "WARN"
330	case "ERROR":
331		return "ERROR"
332	default:
333		return "INFO"
334	}
335}
336
337// extractSource extracts the source file and line from a log entry.
338func extractSource(entry map[string]any) string {
339	sourceVal, ok := entry["source"]
340	if !ok {
341		return "unknown:0"
342	}
343
344	// Source can be a string or an object with "file" and "line".
345	switch s := sourceVal.(type) {
346	case string:
347		return filepath.Base(s)
348	case map[string]any:
349		fileVal, ok := s["file"].(string)
350		if !ok {
351			return "unknown:0"
352		}
353		fileVal = filepath.Base(fileVal)
354
355		lineNum := 0
356		if lineVal, ok := s["line"]; ok {
357			switch l := lineVal.(type) {
358			case float64:
359				lineNum = int(l)
360			case int:
361				lineNum = l
362			case json.Number:
363				if n, err := l.Int64(); err == nil {
364					lineNum = int(n)
365				}
366			}
367		}
368		return fmt.Sprintf("%s:%d", fileVal, lineNum)
369	default:
370		return "unknown:0"
371	}
372}
373
374// extractMessage extracts the log message.
375func extractMessage(entry map[string]any) string {
376	msgVal, ok := entry["msg"]
377	if !ok {
378		return ""
379	}
380
381	if msgStr, ok := msgVal.(string); ok {
382		return msgStr
383	}
384
385	return fmt.Sprintf("%v", msgVal)
386}
387
388// extractExtraFields extracts all non-reserved fields from a log entry.
389func extractExtraFields(entry map[string]any) map[string]any {
390	result := make(map[string]any)
391	for k, v := range entry {
392		// Skip reserved fields (case-insensitive).
393		if isReservedField(k) {
394			continue
395		}
396		// Redact sensitive values.
397		if isSensitiveKey(k) {
398			result[k] = "[REDACTED]"
399		} else {
400			result[k] = v
401		}
402	}
403	return result
404}
405
406// isReservedField checks if a field name is reserved (case-insensitive).
407func isReservedField(name string) bool {
408	lowerName := strings.ToLower(name)
409	return reservedFields[lowerName]
410}
411
412// isSensitiveKey checks if a key contains sensitive information (case-insensitive).
413func isSensitiveKey(name string) bool {
414	lowerName := strings.ToLower(name)
415	for _, sensitive := range sensitiveKeys {
416		if strings.Contains(lowerName, sensitive) {
417			return true
418		}
419	}
420	return false
421}
422
423// formatValue formats a value according to the quoting rules.
424func formatValue(value any, key string) string {
425	// Redact sensitive values (second check for safety).
426	if isSensitiveKey(key) {
427		return "[REDACTED]"
428	}
429
430	switch v := value.(type) {
431	case string:
432		return formatStringValue(v)
433	case float64:
434		// Check if it's actually an integer.
435		if v == float64(int64(v)) {
436			return strconv.FormatInt(int64(v), 10)
437		}
438		return strconv.FormatFloat(v, 'f', -1, 64)
439	case int:
440		return strconv.Itoa(v)
441	case int64:
442		return strconv.FormatInt(v, 10)
443	case bool:
444		return strconv.FormatBool(v)
445	case nil:
446		return "null"
447	case map[string]any, []any:
448		// Objects and arrays are JSON-encoded and quoted.
449		jsonBytes, err := json.Marshal(v)
450		if err != nil {
451			return quoteString(fmt.Sprintf("%v", v))
452		}
453		return quoteString(string(jsonBytes))
454	default:
455		return quoteString(fmt.Sprintf("%v", v))
456	}
457}
458
459// formatStringValue formats a string value with quoting if needed.
460func formatStringValue(s string) string {
461	// Quote if empty, contains spaces, =, newlines, or special chars.
462	needsQuote := len(s) == 0 ||
463		strings.ContainsAny(s, " =\n\r\t\"") ||
464		strings.Contains(s, "\\")
465
466	if !needsQuote {
467		return s
468	}
469
470	return quoteString(s)
471}
472
473// quoteString quotes a string with double quotes and escapes special characters.
474func quoteString(s string) string {
475	var b strings.Builder
476	b.WriteByte('"')
477	for i := 0; i < len(s); i++ {
478		c := s[i]
479		switch c {
480		case '"':
481			b.WriteString("\\\"")
482		case '\\':
483			b.WriteString("\\\\")
484		case '\n':
485			b.WriteString("\\n")
486		case '\r':
487			b.WriteString("\\r")
488		case '\t':
489			b.WriteString("\\t")
490		default:
491			b.WriteByte(c)
492		}
493	}
494	b.WriteByte('"')
495	return b.String()
496}