recent_models_test.go

  1package config
  2
  3import (
  4	"encoding/json"
  5	"io/fs"
  6	"os"
  7	"path/filepath"
  8	"testing"
  9
 10	"github.com/stretchr/testify/require"
 11)
 12
 13// readConfigJSON reads and unmarshals the JSON config file at path.
 14func readConfigJSON(t *testing.T, path string) map[string]any {
 15	t.Helper()
 16	baseDir := filepath.Dir(path)
 17	fileName := filepath.Base(path)
 18	b, err := fs.ReadFile(os.DirFS(baseDir), fileName)
 19	require.NoError(t, err)
 20	var out map[string]any
 21	require.NoError(t, json.Unmarshal(b, &out))
 22	return out
 23}
 24
 25// readRecentModels reads the recent_models section from the config file.
 26func readRecentModels(t *testing.T, path string) map[string]any {
 27	t.Helper()
 28	out := readConfigJSON(t, path)
 29	rm, ok := out["recent_models"].(map[string]any)
 30	require.True(t, ok)
 31	return rm
 32}
 33
 34// testStoreWithPath creates a ConfigStore backed by a Config for recent model tests.
 35func testStoreWithPath(cfg *Config, dir string) *ConfigStore {
 36	return &ConfigStore{
 37		config:         cfg,
 38		globalDataPath: filepath.Join(dir, "config.json"),
 39	}
 40}
 41
 42func TestRecordRecentModel_AddsAndPersists(t *testing.T) {
 43	t.Parallel()
 44
 45	dir := t.TempDir()
 46	cfg := &Config{}
 47	cfg.setDefaults(dir, "")
 48	store := testStoreWithPath(cfg, dir)
 49
 50	err := store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, SelectedModel{Provider: "openai", Model: "gpt-4o"})
 51	require.NoError(t, err)
 52
 53	// in-memory state
 54	require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 1)
 55	require.Equal(t, "openai", cfg.RecentModels[SelectedModelTypeLarge][0].Provider)
 56	require.Equal(t, "gpt-4o", cfg.RecentModels[SelectedModelTypeLarge][0].Model)
 57
 58	// persisted state
 59	rm := readRecentModels(t, store.globalDataPath)
 60	large, ok := rm[string(SelectedModelTypeLarge)].([]any)
 61	require.True(t, ok)
 62	require.Len(t, large, 1)
 63	item, ok := large[0].(map[string]any)
 64	require.True(t, ok)
 65	require.Equal(t, "openai", item["provider"])
 66	require.Equal(t, "gpt-4o", item["model"])
 67}
 68
 69func TestRecordRecentModel_DedupeAndMoveToFront(t *testing.T) {
 70	t.Parallel()
 71
 72	dir := t.TempDir()
 73	cfg := &Config{}
 74	cfg.setDefaults(dir, "")
 75	store := testStoreWithPath(cfg, dir)
 76
 77	// Add two entries
 78	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, SelectedModel{Provider: "openai", Model: "gpt-4o"}))
 79	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, SelectedModel{Provider: "anthropic", Model: "claude"}))
 80	// Re-add first; should move to front and not duplicate
 81	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, SelectedModel{Provider: "openai", Model: "gpt-4o"}))
 82
 83	got := cfg.RecentModels[SelectedModelTypeLarge]
 84	require.Len(t, got, 2)
 85	require.Equal(t, SelectedModel{Provider: "openai", Model: "gpt-4o"}, got[0])
 86	require.Equal(t, SelectedModel{Provider: "anthropic", Model: "claude"}, got[1])
 87}
 88
 89func TestRecordRecentModel_TrimsToMax(t *testing.T) {
 90	t.Parallel()
 91
 92	dir := t.TempDir()
 93	cfg := &Config{}
 94	cfg.setDefaults(dir, "")
 95	store := testStoreWithPath(cfg, dir)
 96
 97	// Insert 6 unique models; max is 5
 98	entries := []SelectedModel{
 99		{Provider: "p1", Model: "m1"},
100		{Provider: "p2", Model: "m2"},
101		{Provider: "p3", Model: "m3"},
102		{Provider: "p4", Model: "m4"},
103		{Provider: "p5", Model: "m5"},
104		{Provider: "p6", Model: "m6"},
105	}
106	for _, e := range entries {
107		require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, e))
108	}
109
110	// in-memory state
111	got := cfg.RecentModels[SelectedModelTypeLarge]
112	require.Len(t, got, 5)
113	// Newest first, capped at 5: p6..p2
114	require.Equal(t, SelectedModel{Provider: "p6", Model: "m6"}, got[0])
115	require.Equal(t, SelectedModel{Provider: "p5", Model: "m5"}, got[1])
116	require.Equal(t, SelectedModel{Provider: "p4", Model: "m4"}, got[2])
117	require.Equal(t, SelectedModel{Provider: "p3", Model: "m3"}, got[3])
118	require.Equal(t, SelectedModel{Provider: "p2", Model: "m2"}, got[4])
119
120	// persisted state: verify trimmed to 5 and newest-first order
121	rm := readRecentModels(t, store.globalDataPath)
122	large, ok := rm[string(SelectedModelTypeLarge)].([]any)
123	require.True(t, ok)
124	require.Len(t, large, 5)
125	// Build provider:model IDs and verify order
126	var ids []string
127	for _, v := range large {
128		m := v.(map[string]any)
129		ids = append(ids, m["provider"].(string)+":"+m["model"].(string))
130	}
131	require.Equal(t, []string{"p6:m6", "p5:m5", "p4:m4", "p3:m3", "p2:m2"}, ids)
132}
133
134func TestRecordRecentModel_SkipsEmptyValues(t *testing.T) {
135	t.Parallel()
136
137	dir := t.TempDir()
138	cfg := &Config{}
139	cfg.setDefaults(dir, "")
140	store := testStoreWithPath(cfg, dir)
141
142	// Missing provider
143	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, SelectedModel{Provider: "", Model: "m"}))
144	// Missing model
145	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, SelectedModel{Provider: "p", Model: ""}))
146
147	_, ok := cfg.RecentModels[SelectedModelTypeLarge]
148	// Map may be initialized, but should have no entries
149	if ok {
150		require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 0)
151	}
152	// No file should be written (stat via fs.FS)
153	baseDir := filepath.Dir(store.globalDataPath)
154	fileName := filepath.Base(store.globalDataPath)
155	_, err := fs.Stat(os.DirFS(baseDir), fileName)
156	require.True(t, os.IsNotExist(err))
157}
158
159func TestRecordRecentModel_NoPersistOnNoop(t *testing.T) {
160	t.Parallel()
161
162	dir := t.TempDir()
163	cfg := &Config{}
164	cfg.setDefaults(dir, "")
165	store := testStoreWithPath(cfg, dir)
166
167	entry := SelectedModel{Provider: "openai", Model: "gpt-4o"}
168	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, entry))
169
170	baseDir := filepath.Dir(store.globalDataPath)
171	fileName := filepath.Base(store.globalDataPath)
172	before, err := fs.ReadFile(os.DirFS(baseDir), fileName)
173	require.NoError(t, err)
174
175	// Get file ModTime to verify no write occurs
176	stBefore, err := fs.Stat(os.DirFS(baseDir), fileName)
177	require.NoError(t, err)
178	beforeMod := stBefore.ModTime()
179
180	// Re-record same entry should be a no-op (no write)
181	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, entry))
182
183	after, err := fs.ReadFile(os.DirFS(baseDir), fileName)
184	require.NoError(t, err)
185	require.Equal(t, string(before), string(after))
186
187	// Verify ModTime unchanged to ensure truly no write occurred
188	stAfter, err := fs.Stat(os.DirFS(baseDir), fileName)
189	require.NoError(t, err)
190	require.True(t, stAfter.ModTime().Equal(beforeMod), "file ModTime should not change on noop")
191}
192
193func TestUpdatePreferredModel_UpdatesRecents(t *testing.T) {
194	t.Parallel()
195
196	dir := t.TempDir()
197	cfg := &Config{}
198	cfg.setDefaults(dir, "")
199	store := testStoreWithPath(cfg, dir)
200
201	sel := SelectedModel{Provider: "openai", Model: "gpt-4o"}
202	require.NoError(t, store.UpdatePreferredModel(ScopeGlobal, SelectedModelTypeSmall, sel))
203
204	// in-memory
205	require.Equal(t, sel, cfg.Models[SelectedModelTypeSmall])
206	require.Len(t, cfg.RecentModels[SelectedModelTypeSmall], 1)
207
208	// persisted (read via fs.FS)
209	rm := readRecentModels(t, store.globalDataPath)
210	small, ok := rm[string(SelectedModelTypeSmall)].([]any)
211	require.True(t, ok)
212	require.Len(t, small, 1)
213}
214
215func TestRecordRecentModel_TypeIsolation(t *testing.T) {
216	t.Parallel()
217
218	dir := t.TempDir()
219	cfg := &Config{}
220	cfg.setDefaults(dir, "")
221	store := testStoreWithPath(cfg, dir)
222
223	// Add models to both large and small types
224	largeModel := SelectedModel{Provider: "openai", Model: "gpt-4o"}
225	smallModel := SelectedModel{Provider: "anthropic", Model: "claude"}
226
227	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, largeModel))
228	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeSmall, smallModel))
229
230	// in-memory: verify types maintain separate histories
231	require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 1)
232	require.Len(t, cfg.RecentModels[SelectedModelTypeSmall], 1)
233	require.Equal(t, largeModel, cfg.RecentModels[SelectedModelTypeLarge][0])
234	require.Equal(t, smallModel, cfg.RecentModels[SelectedModelTypeSmall][0])
235
236	// Add another to large, verify small unchanged
237	anotherLarge := SelectedModel{Provider: "google", Model: "gemini"}
238	require.NoError(t, store.recordRecentModel(ScopeGlobal, SelectedModelTypeLarge, anotherLarge))
239
240	require.Len(t, cfg.RecentModels[SelectedModelTypeLarge], 2)
241	require.Len(t, cfg.RecentModels[SelectedModelTypeSmall], 1)
242	require.Equal(t, smallModel, cfg.RecentModels[SelectedModelTypeSmall][0])
243
244	// persisted state: verify both types exist with correct lengths and contents
245	rm := readRecentModels(t, store.globalDataPath)
246
247	large, ok := rm[string(SelectedModelTypeLarge)].([]any)
248	require.True(t, ok)
249	require.Len(t, large, 2)
250	// Verify newest first for large type
251	require.Equal(t, "google", large[0].(map[string]any)["provider"])
252	require.Equal(t, "gemini", large[0].(map[string]any)["model"])
253	require.Equal(t, "openai", large[1].(map[string]any)["provider"])
254	require.Equal(t, "gpt-4o", large[1].(map[string]any)["model"])
255
256	small, ok := rm[string(SelectedModelTypeSmall)].([]any)
257	require.True(t, ok)
258	require.Len(t, small, 1)
259	require.Equal(t, "anthropic", small[0].(map[string]any)["provider"])
260	require.Equal(t, "claude", small[0].(map[string]any)["model"])
261}