1package config
2
3import (
4 "encoding/json"
5 "io/fs"
6 "os"
7 "path/filepath"
8 "testing"
9
10 "github.com/stretchr/testify/require"
11)
12
13// readConfigJSON reads and unmarshals the JSON config file at path.
14func readConfigJSON(t *testing.T, path string) map[string]any {
15 t.Helper()
16 baseDir := filepath.Dir(path)
17 fileName := filepath.Base(path)
18 b, err := fs.ReadFile(os.DirFS(baseDir), fileName)
19 require.NoError(t, err)
20 var out map[string]any
21 require.NoError(t, json.Unmarshal(b, &out))
22 return out
23}
24
25// readRecentModels reads the recent_models section from the config file.
26func readRecentModels(t *testing.T, path string) map[string]any {
27 t.Helper()
28 out := readConfigJSON(t, path)
29 rm, ok := out["recent_models"].(map[string]any)
30 require.True(t, ok)
31 return rm
32}
33
34func TestRecordRecentModel_AddsAndPersists(t *testing.T) {
35 t.Parallel()
36
37 dir := t.TempDir()
38 cfg := &Config{}
39 cfg.setDefaults(dir, "")
40 cfg.dataConfigDir = filepath.Join(dir, "config.json")
41
42 err := cfg.recordRecentModel(SelectedModelTypeLarge, SelectedModel{Provider: "openai", Model: "gpt-4o"})
43 require.NoError(t, err)
44
45 // in-memory state
46 require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 1)
47 require.Equal(t, "openai", cfg.RecentModels[SelectedModelTypeLarge][0].Provider)
48 require.Equal(t, "gpt-4o", cfg.RecentModels[SelectedModelTypeLarge][0].Model)
49
50 // persisted state
51 rm := readRecentModels(t, cfg.dataConfigDir)
52 large, ok := rm[string(SelectedModelTypeLarge)].([]any)
53 require.True(t, ok)
54 require.Len(t, large, 1)
55 item, ok := large[0].(map[string]any)
56 require.True(t, ok)
57 require.Equal(t, "openai", item["provider"])
58 require.Equal(t, "gpt-4o", item["model"])
59}
60
61func TestRecordRecentModel_DedupeAndMoveToFront(t *testing.T) {
62 t.Parallel()
63
64 dir := t.TempDir()
65 cfg := &Config{}
66 cfg.setDefaults(dir, "")
67 cfg.dataConfigDir = filepath.Join(dir, "config.json")
68
69 // Add two entries
70 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, SelectedModel{Provider: "openai", Model: "gpt-4o"}))
71 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, SelectedModel{Provider: "anthropic", Model: "claude"}))
72 // Re-add first; should move to front and not duplicate
73 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, SelectedModel{Provider: "openai", Model: "gpt-4o"}))
74
75 got := cfg.RecentModels[SelectedModelTypeLarge]
76 require.Len(t, got, 2)
77 require.Equal(t, SelectedModel{Provider: "openai", Model: "gpt-4o"}, got[0])
78 require.Equal(t, SelectedModel{Provider: "anthropic", Model: "claude"}, got[1])
79}
80
81func TestRecordRecentModel_TrimsToMax(t *testing.T) {
82 t.Parallel()
83
84 dir := t.TempDir()
85 cfg := &Config{}
86 cfg.setDefaults(dir, "")
87 cfg.dataConfigDir = filepath.Join(dir, "config.json")
88
89 // Insert 6 unique models; max is 5
90 entries := []SelectedModel{
91 {Provider: "p1", Model: "m1"},
92 {Provider: "p2", Model: "m2"},
93 {Provider: "p3", Model: "m3"},
94 {Provider: "p4", Model: "m4"},
95 {Provider: "p5", Model: "m5"},
96 {Provider: "p6", Model: "m6"},
97 }
98 for _, e := range entries {
99 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, e))
100 }
101
102 // in-memory state
103 got := cfg.RecentModels[SelectedModelTypeLarge]
104 require.Len(t, got, 5)
105 // Newest first, capped at 5: p6..p2
106 require.Equal(t, SelectedModel{Provider: "p6", Model: "m6"}, got[0])
107 require.Equal(t, SelectedModel{Provider: "p5", Model: "m5"}, got[1])
108 require.Equal(t, SelectedModel{Provider: "p4", Model: "m4"}, got[2])
109 require.Equal(t, SelectedModel{Provider: "p3", Model: "m3"}, got[3])
110 require.Equal(t, SelectedModel{Provider: "p2", Model: "m2"}, got[4])
111
112 // persisted state: verify trimmed to 5 and newest-first order
113 rm := readRecentModels(t, cfg.dataConfigDir)
114 large, ok := rm[string(SelectedModelTypeLarge)].([]any)
115 require.True(t, ok)
116 require.Len(t, large, 5)
117 // Build provider:model IDs and verify order
118 var ids []string
119 for _, v := range large {
120 m := v.(map[string]any)
121 ids = append(ids, m["provider"].(string)+":"+m["model"].(string))
122 }
123 require.Equal(t, []string{"p6:m6", "p5:m5", "p4:m4", "p3:m3", "p2:m2"}, ids)
124}
125
126func TestRecordRecentModel_SkipsEmptyValues(t *testing.T) {
127 t.Parallel()
128
129 dir := t.TempDir()
130 cfg := &Config{}
131 cfg.setDefaults(dir, "")
132 cfg.dataConfigDir = filepath.Join(dir, "config.json")
133
134 // Missing provider
135 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, SelectedModel{Provider: "", Model: "m"}))
136 // Missing model
137 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, SelectedModel{Provider: "p", Model: ""}))
138
139 _, ok := cfg.RecentModels[SelectedModelTypeLarge]
140 // Map may be initialized, but should have no entries
141 if ok {
142 require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 0)
143 }
144 // No file should be written (stat via fs.FS)
145 baseDir := filepath.Dir(cfg.dataConfigDir)
146 fileName := filepath.Base(cfg.dataConfigDir)
147 _, err := fs.Stat(os.DirFS(baseDir), fileName)
148 require.True(t, os.IsNotExist(err))
149}
150
151func TestRecordRecentModel_NoPersistOnNoop(t *testing.T) {
152 t.Parallel()
153
154 dir := t.TempDir()
155 cfg := &Config{}
156 cfg.setDefaults(dir, "")
157 cfg.dataConfigDir = filepath.Join(dir, "config.json")
158
159 entry := SelectedModel{Provider: "openai", Model: "gpt-4o"}
160 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, entry))
161
162 baseDir := filepath.Dir(cfg.dataConfigDir)
163 fileName := filepath.Base(cfg.dataConfigDir)
164 before, err := fs.ReadFile(os.DirFS(baseDir), fileName)
165 require.NoError(t, err)
166
167 // Get file ModTime to verify no write occurs
168 stBefore, err := fs.Stat(os.DirFS(baseDir), fileName)
169 require.NoError(t, err)
170 beforeMod := stBefore.ModTime()
171
172 // Re-record same entry should be a no-op (no write)
173 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, entry))
174
175 after, err := fs.ReadFile(os.DirFS(baseDir), fileName)
176 require.NoError(t, err)
177 require.Equal(t, string(before), string(after))
178
179 // Verify ModTime unchanged to ensure truly no write occurred
180 stAfter, err := fs.Stat(os.DirFS(baseDir), fileName)
181 require.NoError(t, err)
182 require.True(t, stAfter.ModTime().Equal(beforeMod), "file ModTime should not change on noop")
183}
184
185func TestUpdatePreferredModel_UpdatesRecents(t *testing.T) {
186 t.Parallel()
187
188 dir := t.TempDir()
189 cfg := &Config{}
190 cfg.setDefaults(dir, "")
191 cfg.dataConfigDir = filepath.Join(dir, "config.json")
192
193 sel := SelectedModel{Provider: "openai", Model: "gpt-4o"}
194 require.NoError(t, cfg.UpdatePreferredModel(SelectedModelTypeSmall, sel))
195
196 // in-memory
197 require.Equal(t, sel, cfg.Models[SelectedModelTypeSmall])
198 require.Len(t, cfg.RecentModels[SelectedModelTypeSmall], 1)
199
200 // persisted (read via fs.FS)
201 rm := readRecentModels(t, cfg.dataConfigDir)
202 small, ok := rm[string(SelectedModelTypeSmall)].([]any)
203 require.True(t, ok)
204 require.Len(t, small, 1)
205}
206
207func TestRecordRecentModel_TypeIsolation(t *testing.T) {
208 t.Parallel()
209
210 dir := t.TempDir()
211 cfg := &Config{}
212 cfg.setDefaults(dir, "")
213 cfg.dataConfigDir = filepath.Join(dir, "config.json")
214
215 // Add models to both large and small types
216 largeModel := SelectedModel{Provider: "openai", Model: "gpt-4o"}
217 smallModel := SelectedModel{Provider: "anthropic", Model: "claude"}
218
219 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, largeModel))
220 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeSmall, smallModel))
221
222 // in-memory: verify types maintain separate histories
223 require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 1)
224 require.Len(t, cfg.RecentModels[SelectedModelTypeSmall], 1)
225 require.Equal(t, largeModel, cfg.RecentModels[SelectedModelTypeLarge][0])
226 require.Equal(t, smallModel, cfg.RecentModels[SelectedModelTypeSmall][0])
227
228 // Add another to large, verify small unchanged
229 anotherLarge := SelectedModel{Provider: "google", Model: "gemini"}
230 require.NoError(t, cfg.recordRecentModel(SelectedModelTypeLarge, anotherLarge))
231
232 require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 2)
233 require.Len(t, cfg.RecentModels[SelectedModelTypeSmall], 1)
234 require.Equal(t, smallModel, cfg.RecentModels[SelectedModelTypeSmall][0])
235
236 // persisted state: verify both types exist with correct lengths and contents
237 rm := readRecentModels(t, cfg.dataConfigDir)
238
239 large, ok := rm[string(SelectedModelTypeLarge)].([]any)
240 require.True(t, ok)
241 require.Len(t, large, 2)
242 // Verify newest first for large type
243 require.Equal(t, "google", large[0].(map[string]any)["provider"])
244 require.Equal(t, "gemini", large[0].(map[string]any)["model"])
245 require.Equal(t, "openai", large[1].(map[string]any)["provider"])
246 require.Equal(t, "gpt-4o", large[1].(map[string]any)["model"])
247
248 small, ok := rm[string(SelectedModelTypeSmall)].([]any)
249 require.True(t, ok)
250 require.Len(t, small, 1)
251 require.Equal(t, "anthropic", small[0].(map[string]any)["provider"])
252 require.Equal(t, "claude", small[0].(map[string]any)["model"])
253}