1package cmd
  2
  3import (
  4	"context"
  5	"encoding/json"
  6	"fmt"
  7	"io"
  8	"os"
  9	"path/filepath"
 10	"slices"
 11	"time"
 12
 13	"github.com/charmbracelet/crush/internal/config"
 14	"github.com/charmbracelet/log/v2"
 15	"github.com/nxadm/tail"
 16	"github.com/spf13/cobra"
 17)
 18
 19const defaultTailLines = 1000
 20
 21var logsCmd = &cobra.Command{
 22	Use:   "logs",
 23	Short: "View crush logs",
 24	Long:  `View the logs generated by Crush. This command allows you to see the log output for debugging and monitoring.`,
 25	RunE: func(cmd *cobra.Command, args []string) error {
 26		cwd, err := cmd.Flags().GetString("cwd")
 27		if err != nil {
 28			return fmt.Errorf("failed to get current working directory: %v", err)
 29		}
 30
 31		follow, err := cmd.Flags().GetBool("follow")
 32		if err != nil {
 33			return fmt.Errorf("failed to get follow flag: %v", err)
 34		}
 35
 36		tailLines, err := cmd.Flags().GetInt("tail")
 37		if err != nil {
 38			return fmt.Errorf("failed to get tail flag: %v", err)
 39		}
 40
 41		log.SetLevel(log.DebugLevel)
 42		log.SetOutput(os.Stdout)
 43
 44		cfg, err := config.Load(cwd, false)
 45		if err != nil {
 46			return fmt.Errorf("failed to load configuration: %v", err)
 47		}
 48		logsFile := filepath.Join(cfg.WorkingDir(), cfg.Options.DataDirectory, "logs", "crush.log")
 49		_, err = os.Stat(logsFile)
 50		if os.IsNotExist(err) {
 51			log.Warn("Looks like you are not in a crush project. No logs found.")
 52			return nil
 53		}
 54
 55		if follow {
 56			return followLogs(cmd.Context(), logsFile, tailLines)
 57		}
 58
 59		return showLogs(logsFile, tailLines)
 60	},
 61}
 62
 63func init() {
 64	logsCmd.Flags().BoolP("follow", "f", false, "Follow log output")
 65	logsCmd.Flags().IntP("tail", "t", defaultTailLines, "Show only the last N lines default: 1000 for performance")
 66	rootCmd.AddCommand(logsCmd)
 67}
 68
 69func followLogs(ctx context.Context, logsFile string, tailLines int) error {
 70	t, err := tail.TailFile(logsFile, tail.Config{
 71		Follow: false,
 72		ReOpen: false,
 73		Logger: tail.DiscardingLogger,
 74	})
 75	if err != nil {
 76		return fmt.Errorf("failed to tail log file: %v", err)
 77	}
 78
 79	var lines []string
 80	for line := range t.Lines {
 81		if line.Err != nil {
 82			continue
 83		}
 84		lines = append(lines, line.Text)
 85		if len(lines) > tailLines {
 86			lines = lines[len(lines)-tailLines:]
 87		}
 88	}
 89	t.Stop()
 90
 91	for _, line := range lines {
 92		printLogLine(line)
 93	}
 94
 95	if len(lines) == tailLines {
 96		fmt.Fprintf(os.Stderr, "\nShowing last %d lines. Full logs available at: %s\n", tailLines, logsFile)
 97		fmt.Fprintf(os.Stderr, "Following new log entries...\n\n")
 98	}
 99
100	t, err = tail.TailFile(logsFile, tail.Config{
101		Follow:   true,
102		ReOpen:   true,
103		Logger:   tail.DiscardingLogger,
104		Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd},
105	})
106	if err != nil {
107		return fmt.Errorf("failed to tail log file: %v", err)
108	}
109	defer t.Stop()
110
111	for {
112		select {
113		case line := <-t.Lines:
114			if line.Err != nil {
115				continue
116			}
117			printLogLine(line.Text)
118		case <-ctx.Done():
119			return nil
120		}
121	}
122}
123
124func showLogs(logsFile string, tailLines int) error {
125	t, err := tail.TailFile(logsFile, tail.Config{
126		Follow:      false,
127		ReOpen:      false,
128		Logger:      tail.DiscardingLogger,
129		MaxLineSize: 0,
130	})
131	if err != nil {
132		return fmt.Errorf("failed to tail log file: %v", err)
133	}
134	defer t.Stop()
135
136	var lines []string
137	for line := range t.Lines {
138		if line.Err != nil {
139			continue
140		}
141		lines = append(lines, line.Text)
142		if len(lines) > tailLines {
143			lines = lines[len(lines)-tailLines:]
144		}
145	}
146
147	for _, line := range lines {
148		printLogLine(line)
149	}
150
151	if len(lines) == tailLines {
152		fmt.Fprintf(os.Stderr, "\nShowing last %d lines. Full logs available at: %s\n", tailLines, logsFile)
153	}
154
155	return nil
156}
157
158func printLogLine(lineText string) {
159	var data map[string]any
160	if err := json.Unmarshal([]byte(lineText), &data); err != nil {
161		return
162	}
163	msg := data["msg"]
164	level := data["level"]
165	otherData := []any{}
166	keys := []string{}
167	for k := range data {
168		keys = append(keys, k)
169	}
170	slices.Sort(keys)
171	for _, k := range keys {
172		switch k {
173		case "msg", "level", "time":
174			continue
175		case "source":
176			source, ok := data[k].(map[string]any)
177			if !ok {
178				continue
179			}
180			sourceFile := fmt.Sprintf("%s:%d", source["file"], int(source["line"].(float64)))
181			otherData = append(otherData, "source", sourceFile)
182
183		default:
184			otherData = append(otherData, k, data[k])
185		}
186	}
187	log.SetTimeFunction(func(_ time.Time) time.Time {
188		// parse the timestamp from the log line if available
189		t, err := time.Parse(time.RFC3339, data["time"].(string))
190		if err != nil {
191			return time.Now() // fallback to current time if parsing fails
192		}
193		return t
194	})
195	switch level {
196	case "INFO":
197		log.Info(msg, otherData...)
198	case "DEBUG":
199		log.Debug(msg, otherData...)
200	case "ERROR":
201		log.Error(msg, otherData...)
202	case "WARN":
203		log.Warn(msg, otherData...)
204	default:
205		log.Info(msg, otherData...)
206	}
207}