1package cmd
2
3import (
4 "context"
5 "encoding/json"
6 "fmt"
7 "io"
8 "os"
9 "path/filepath"
10 "slices"
11 "time"
12
13 "github.com/charmbracelet/crush/internal/config"
14 "github.com/charmbracelet/log/v2"
15 "github.com/nxadm/tail"
16 "github.com/spf13/cobra"
17)
18
19const defaultTailLines = 1000
20
21var logsCmd = &cobra.Command{
22 Use: "logs",
23 Short: "View crush logs",
24 Long: `View the logs generated by Crush. This command allows you to see the log output for debugging and monitoring.`,
25 RunE: func(cmd *cobra.Command, args []string) error {
26 cwd, err := cmd.Flags().GetString("cwd")
27 if err != nil {
28 return fmt.Errorf("failed to get current working directory: %v", err)
29 }
30
31 dataDir, err := cmd.Flags().GetString("data-dir")
32 if err != nil {
33 return fmt.Errorf("failed to get data directory: %v", err)
34 }
35
36 follow, err := cmd.Flags().GetBool("follow")
37 if err != nil {
38 return fmt.Errorf("failed to get follow flag: %v", err)
39 }
40
41 tailLines, err := cmd.Flags().GetInt("tail")
42 if err != nil {
43 return fmt.Errorf("failed to get tail flag: %v", err)
44 }
45
46 log.SetLevel(log.DebugLevel)
47 log.SetOutput(os.Stdout)
48
49 cfg, err := config.Load(cwd, dataDir, false)
50 if err != nil {
51 return fmt.Errorf("failed to load configuration: %v", err)
52 }
53 logsFile := filepath.Join(cfg.Options.DataDirectory, "logs", "crush.log")
54 _, err = os.Stat(logsFile)
55 if os.IsNotExist(err) {
56 log.Warn("Looks like you are not in a crush project. No logs found.")
57 return nil
58 }
59
60 if follow {
61 return followLogs(cmd.Context(), logsFile, tailLines)
62 }
63
64 return showLogs(logsFile, tailLines)
65 },
66}
67
68func init() {
69 logsCmd.Flags().BoolP("follow", "f", false, "Follow log output")
70 logsCmd.Flags().IntP("tail", "t", defaultTailLines, "Show only the last N lines default: 1000 for performance")
71 rootCmd.AddCommand(logsCmd)
72}
73
74func followLogs(ctx context.Context, logsFile string, tailLines int) error {
75 t, err := tail.TailFile(logsFile, tail.Config{
76 Follow: false,
77 ReOpen: false,
78 Logger: tail.DiscardingLogger,
79 })
80 if err != nil {
81 return fmt.Errorf("failed to tail log file: %v", err)
82 }
83
84 var lines []string
85 for line := range t.Lines {
86 if line.Err != nil {
87 continue
88 }
89 lines = append(lines, line.Text)
90 if len(lines) > tailLines {
91 lines = lines[len(lines)-tailLines:]
92 }
93 }
94 t.Stop()
95
96 for _, line := range lines {
97 printLogLine(line)
98 }
99
100 if len(lines) == tailLines {
101 fmt.Fprintf(os.Stderr, "\nShowing last %d lines. Full logs available at: %s\n", tailLines, logsFile)
102 fmt.Fprintf(os.Stderr, "Following new log entries...\n\n")
103 }
104
105 t, err = tail.TailFile(logsFile, tail.Config{
106 Follow: true,
107 ReOpen: true,
108 Logger: tail.DiscardingLogger,
109 Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd},
110 })
111 if err != nil {
112 return fmt.Errorf("failed to tail log file: %v", err)
113 }
114 defer t.Stop()
115
116 for {
117 select {
118 case line := <-t.Lines:
119 if line.Err != nil {
120 continue
121 }
122 printLogLine(line.Text)
123 case <-ctx.Done():
124 return nil
125 }
126 }
127}
128
129func showLogs(logsFile string, tailLines int) error {
130 t, err := tail.TailFile(logsFile, tail.Config{
131 Follow: false,
132 ReOpen: false,
133 Logger: tail.DiscardingLogger,
134 MaxLineSize: 0,
135 })
136 if err != nil {
137 return fmt.Errorf("failed to tail log file: %v", err)
138 }
139 defer t.Stop()
140
141 var lines []string
142 for line := range t.Lines {
143 if line.Err != nil {
144 continue
145 }
146 lines = append(lines, line.Text)
147 if len(lines) > tailLines {
148 lines = lines[len(lines)-tailLines:]
149 }
150 }
151
152 for _, line := range lines {
153 printLogLine(line)
154 }
155
156 if len(lines) == tailLines {
157 fmt.Fprintf(os.Stderr, "\nShowing last %d lines. Full logs available at: %s\n", tailLines, logsFile)
158 }
159
160 return nil
161}
162
163func printLogLine(lineText string) {
164 var data map[string]any
165 if err := json.Unmarshal([]byte(lineText), &data); err != nil {
166 return
167 }
168 msg := data["msg"]
169 level := data["level"]
170 otherData := []any{}
171 keys := []string{}
172 for k := range data {
173 keys = append(keys, k)
174 }
175 slices.Sort(keys)
176 for _, k := range keys {
177 switch k {
178 case "msg", "level", "time":
179 continue
180 case "source":
181 source, ok := data[k].(map[string]any)
182 if !ok {
183 continue
184 }
185 sourceFile := fmt.Sprintf("%s:%d", source["file"], int(source["line"].(float64)))
186 otherData = append(otherData, "source", sourceFile)
187
188 default:
189 otherData = append(otherData, k, data[k])
190 }
191 }
192 log.SetTimeFunction(func(_ time.Time) time.Time {
193 // parse the timestamp from the log line if available
194 t, err := time.Parse(time.RFC3339, data["time"].(string))
195 if err != nil {
196 return time.Now() // fallback to current time if parsing fails
197 }
198 return t
199 })
200 switch level {
201 case "INFO":
202 log.Info(msg, otherData...)
203 case "DEBUG":
204 log.Debug(msg, otherData...)
205 case "ERROR":
206 log.Error(msg, otherData...)
207 case "WARN":
208 log.Warn(msg, otherData...)
209 default:
210 log.Info(msg, otherData...)
211 }
212}