1package cmd
2
3import (
4 "context"
5 "encoding/json"
6 "fmt"
7 "io"
8 "os"
9 "path/filepath"
10 "slices"
11 "time"
12
13 "github.com/charmbracelet/crush/internal/config"
14 "github.com/charmbracelet/log/v2"
15 "github.com/nxadm/tail"
16 "github.com/spf13/cobra"
17)
18
19const defaultTailLines = 1000
20
21var logsCmd = &cobra.Command{
22 Use: "logs",
23 Short: "View crush logs",
24 Long: `View the logs generated by Crush. This command allows you to see the log output for debugging and monitoring.`,
25 RunE: func(cmd *cobra.Command, args []string) error {
26 cwd, err := cmd.Flags().GetString("cwd")
27 if err != nil {
28 return fmt.Errorf("failed to get current working directory: %v", err)
29 }
30
31 follow, err := cmd.Flags().GetBool("follow")
32 if err != nil {
33 return fmt.Errorf("failed to get follow flag: %v", err)
34 }
35
36 tailLines, err := cmd.Flags().GetInt("tail")
37 if err != nil {
38 return fmt.Errorf("failed to get tail flag: %v", err)
39 }
40
41 log.SetLevel(log.DebugLevel)
42 log.SetOutput(os.Stdout)
43
44 cfg, err := config.Load(cwd, false)
45 if err != nil {
46 return fmt.Errorf("failed to load configuration: %v", err)
47 }
48 logsFile := filepath.Join(cfg.WorkingDir(), cfg.Options.DataDirectory, "logs", "crush.log")
49 _, err = os.Stat(logsFile)
50 if os.IsNotExist(err) {
51 log.Warn("Looks like you are not in a crush project. No logs found.")
52 return nil
53 }
54
55 if follow {
56 return followLogs(cmd.Context(), logsFile, tailLines)
57 }
58
59 return showLogs(logsFile, tailLines)
60 },
61}
62
63func init() {
64 logsCmd.Flags().BoolP("follow", "f", false, "Follow log output")
65 logsCmd.Flags().IntP("tail", "t", defaultTailLines, "Show only the last N lines default: 1000 for performance")
66 rootCmd.AddCommand(logsCmd)
67}
68
69func followLogs(ctx context.Context, logsFile string, tailLines int) error {
70 t, err := tail.TailFile(logsFile, tail.Config{
71 Follow: false,
72 ReOpen: false,
73 Logger: tail.DiscardingLogger,
74 })
75 if err != nil {
76 return fmt.Errorf("failed to tail log file: %v", err)
77 }
78
79 var lines []string
80 lineCount := 0
81 for line := range t.Lines {
82 if line.Err != nil {
83 continue
84 }
85 lines = append(lines, line.Text)
86 lineCount++
87 if lineCount >= tailLines {
88 if len(lines) > tailLines {
89 lines = lines[len(lines)-tailLines:]
90 }
91 }
92 }
93 t.Stop()
94
95 for _, line := range lines {
96 printLogLine(line)
97 }
98
99 if len(lines) == tailLines {
100 fmt.Fprintf(os.Stderr, "\nShowing last %d lines. Full logs available at: %s\n", tailLines, logsFile)
101 fmt.Fprintf(os.Stderr, "Following new log entries...\n\n")
102 }
103
104 t, err = tail.TailFile(logsFile, tail.Config{
105 Follow: true,
106 ReOpen: true,
107 Logger: tail.DiscardingLogger,
108 Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd},
109 })
110 if err != nil {
111 return fmt.Errorf("failed to tail log file: %v", err)
112 }
113 defer t.Stop()
114
115 for {
116 select {
117 case line := <-t.Lines:
118 if line.Err != nil {
119 continue
120 }
121 printLogLine(line.Text)
122 case <-ctx.Done():
123 return nil
124 }
125 }
126}
127
128func showLogs(logsFile string, tailLines int) error {
129 t, err := tail.TailFile(logsFile, tail.Config{
130 Follow: false,
131 ReOpen: false,
132 Logger: tail.DiscardingLogger,
133 MaxLineSize: 0,
134 })
135 if err != nil {
136 return fmt.Errorf("failed to tail log file: %v", err)
137 }
138 defer t.Stop()
139
140 var lines []string
141 lineCount := 0
142 for line := range t.Lines {
143 if line.Err != nil {
144 continue
145 }
146 lines = append(lines, line.Text)
147 lineCount++
148 if lineCount >= tailLines {
149 if len(lines) > tailLines {
150 lines = lines[len(lines)-tailLines:]
151 }
152 }
153 }
154
155 for _, line := range lines {
156 printLogLine(line)
157 }
158
159 if len(lines) == tailLines {
160 fmt.Fprintf(os.Stderr, "\nShowing last %d lines. Full logs available at: %s\n", tailLines, logsFile)
161 }
162
163 return nil
164}
165
166func printLogLine(lineText string) {
167 var data map[string]any
168 if err := json.Unmarshal([]byte(lineText), &data); err != nil {
169 return
170 }
171 msg := data["msg"]
172 level := data["level"]
173 otherData := []any{}
174 keys := []string{}
175 for k := range data {
176 keys = append(keys, k)
177 }
178 slices.Sort(keys)
179 for _, k := range keys {
180 switch k {
181 case "msg", "level", "time":
182 continue
183 case "source":
184 source, ok := data[k].(map[string]any)
185 if !ok {
186 continue
187 }
188 sourceFile := fmt.Sprintf("%s:%d", source["file"], int(source["line"].(float64)))
189 otherData = append(otherData, "source", sourceFile)
190
191 default:
192 otherData = append(otherData, k, data[k])
193 }
194 }
195 log.SetTimeFunction(func(_ time.Time) time.Time {
196 // parse the timestamp from the log line if available
197 t, err := time.Parse(time.RFC3339, data["time"].(string))
198 if err != nil {
199 return time.Now() // fallback to current time if parsing fails
200 }
201 return t
202 })
203 switch level {
204 case "INFO":
205 log.Info(msg, otherData...)
206 case "DEBUG":
207 log.Debug(msg, otherData...)
208 case "ERROR":
209 log.Error(msg, otherData...)
210 case "WARN":
211 log.Warn(msg, otherData...)
212 default:
213 log.Info(msg, otherData...)
214 }
215}