conversation_state_test.go

  1package server
  2
  3import (
  4	"context"
  5	"encoding/json"
  6	"net/http"
  7	"net/http/httptest"
  8	"strings"
  9	"testing"
 10	"time"
 11
 12	"shelley.exe.dev/claudetool"
 13	"shelley.exe.dev/db"
 14	"shelley.exe.dev/llm"
 15	"shelley.exe.dev/loop"
 16)
 17
 18// responseRecorderWithClose wraps httptest.ResponseRecorder to support CloseNotify
 19type responseRecorderWithClose struct {
 20	*httptest.ResponseRecorder
 21	closeNotify chan bool
 22}
 23
 24func newResponseRecorderWithClose() *responseRecorderWithClose {
 25	return &responseRecorderWithClose{
 26		ResponseRecorder: httptest.NewRecorder(),
 27		closeNotify:      make(chan bool, 1),
 28	}
 29}
 30
 31func (r *responseRecorderWithClose) CloseNotify() <-chan bool {
 32	return r.closeNotify
 33}
 34
 35func (r *responseRecorderWithClose) Close() {
 36	select {
 37	case r.closeNotify <- true:
 38	default:
 39	}
 40}
 41
 42// TestConversationStateAfterServerRestart verifies that when a conversation is
 43// loaded after a server restart (new manager created), the agent is correctly
 44// reported as not working since the loop isn't running.
 45func TestConversationStateAfterServerRestart(t *testing.T) {
 46	database, cleanup := setupTestDB(t)
 47	defer cleanup()
 48
 49	ctx := context.Background()
 50
 51	// Create a conversation with some messages (simulating previous activity)
 52	conv, err := database.CreateConversation(ctx, nil, true, nil, nil)
 53	if err != nil {
 54		t.Fatalf("Failed to create conversation: %v", err)
 55	}
 56
 57	// Add a user message
 58	userMsg := llm.Message{
 59		Role:    llm.MessageRoleUser,
 60		Content: []llm.Content{{Type: llm.ContentTypeText, Text: "Hello"}},
 61	}
 62	_, err = database.CreateMessage(ctx, db.CreateMessageParams{
 63		ConversationID: conv.ConversationID,
 64		Type:           db.MessageTypeUser,
 65		LLMData:        userMsg,
 66	})
 67	if err != nil {
 68		t.Fatalf("Failed to create user message: %v", err)
 69	}
 70
 71	// Add an agent message (without end_of_turn to simulate mid-conversation)
 72	agentMsg := llm.Message{
 73		Role:      llm.MessageRoleAssistant,
 74		Content:   []llm.Content{{Type: llm.ContentTypeText, Text: "Hi there!"}},
 75		EndOfTurn: false,
 76	}
 77	_, err = database.CreateMessage(ctx, db.CreateMessageParams{
 78		ConversationID: conv.ConversationID,
 79		Type:           db.MessageTypeAgent,
 80		LLMData:        agentMsg,
 81	})
 82	if err != nil {
 83		t.Fatalf("Failed to create agent message: %v", err)
 84	}
 85
 86	// Create a NEW server (simulating server restart - no active managers)
 87	predictableService := loop.NewPredictableService()
 88	llmManager := &testLLMManager{service: predictableService}
 89	toolSetConfig := claudetool.ToolSetConfig{EnableBrowser: false}
 90	server := NewServer(database, llmManager, toolSetConfig, nil, true, "", "predictable", "", nil)
 91
 92	mux := http.NewServeMux()
 93	server.RegisterRoutes(mux)
 94
 95	// Make a streaming request with a context that cancels after we read the first message
 96	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
 97	defer cancel()
 98
 99	req := httptest.NewRequest("GET", "/api/conversation/"+conv.ConversationID+"/stream", nil).WithContext(ctx)
100	req.Header.Set("Accept", "text/event-stream")
101
102	w := newResponseRecorderWithClose()
103
104	// Run handler in goroutine and close connection after getting first response
105	done := make(chan struct{})
106	go func() {
107		defer close(done)
108		mux.ServeHTTP(w, req)
109	}()
110
111	// Wait for some data or timeout
112	time.Sleep(500 * time.Millisecond)
113	w.Close()
114	cancel()
115
116	// Wait for handler to finish
117	<-done
118
119	// Parse the first SSE message
120	body := w.Body.String()
121	if !strings.HasPrefix(body, "data: ") {
122		t.Fatalf("Expected SSE data, got: %s", body)
123	}
124
125	jsonData := strings.TrimPrefix(strings.Split(body, "\n")[0], "data: ")
126	var response StreamResponse
127	if err := json.Unmarshal([]byte(jsonData), &response); err != nil {
128		t.Fatalf("Failed to parse response: %v", err)
129	}
130
131	// Verify conversation state shows agent is NOT working
132	// (because after server restart, no loop is running)
133	if response.ConversationState == nil {
134		t.Fatal("Expected ConversationState in response")
135	}
136	if response.ConversationState.ConversationID != conv.ConversationID {
137		t.Errorf("Expected ConversationID %s, got %s", conv.ConversationID, response.ConversationState.ConversationID)
138	}
139	if response.ConversationState.Working {
140		t.Error("Expected Working=false after server restart (no active loop)")
141	}
142
143	// Verify messages were loaded
144	if len(response.Messages) != 2 {
145		t.Errorf("Expected 2 messages, got %d", len(response.Messages))
146	}
147}
148
149// TestModelRestorationAfterServerRestart verifies that when a conversation is
150// resumed after a server restart, the model is correctly loaded from the database
151// and reported in the ConversationState.
152func TestModelRestorationAfterServerRestart(t *testing.T) {
153	database, cleanup := setupTestDB(t)
154	defer cleanup()
155
156	ctx := context.Background()
157
158	// Create a conversation with a specific model
159	modelID := "claude-sonnet-4-20250514"
160	conv, err := database.CreateConversation(ctx, nil, true, nil, &modelID)
161	if err != nil {
162		t.Fatalf("Failed to create conversation: %v", err)
163	}
164
165	// Add a user message
166	userMsg := llm.Message{
167		Role:    llm.MessageRoleUser,
168		Content: []llm.Content{{Type: llm.ContentTypeText, Text: "Hello"}},
169	}
170	_, err = database.CreateMessage(ctx, db.CreateMessageParams{
171		ConversationID: conv.ConversationID,
172		Type:           db.MessageTypeUser,
173		LLMData:        userMsg,
174	})
175	if err != nil {
176		t.Fatalf("Failed to create user message: %v", err)
177	}
178
179	// Add an agent message
180	agentMsg := llm.Message{
181		Role:      llm.MessageRoleAssistant,
182		Content:   []llm.Content{{Type: llm.ContentTypeText, Text: "Hi there!"}},
183		EndOfTurn: true,
184	}
185	_, err = database.CreateMessage(ctx, db.CreateMessageParams{
186		ConversationID: conv.ConversationID,
187		Type:           db.MessageTypeAgent,
188		LLMData:        agentMsg,
189	})
190	if err != nil {
191		t.Fatalf("Failed to create agent message: %v", err)
192	}
193
194	// Create a NEW server (simulating server restart or different browser session)
195	predictableService := loop.NewPredictableService()
196	llmManager := &testLLMManager{service: predictableService}
197	toolSetConfig := claudetool.ToolSetConfig{EnableBrowser: false}
198	server := NewServer(database, llmManager, toolSetConfig, nil, true, "", "predictable", "", nil)
199
200	mux := http.NewServeMux()
201	server.RegisterRoutes(mux)
202
203	// Make a streaming request
204	ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
205	defer cancel()
206
207	req := httptest.NewRequest("GET", "/api/conversation/"+conv.ConversationID+"/stream", nil).WithContext(ctx)
208	req.Header.Set("Accept", "text/event-stream")
209
210	w := newResponseRecorderWithClose()
211
212	done := make(chan struct{})
213	go func() {
214		defer close(done)
215		mux.ServeHTTP(w, req)
216	}()
217
218	time.Sleep(500 * time.Millisecond)
219	w.Close()
220	cancel()
221	<-done
222
223	// Parse the first SSE message
224	body := w.Body.String()
225	if !strings.HasPrefix(body, "data: ") {
226		t.Fatalf("Expected SSE data, got: %s", body)
227	}
228
229	jsonData := strings.TrimPrefix(strings.Split(body, "\n")[0], "data: ")
230	var response StreamResponse
231	if err := json.Unmarshal([]byte(jsonData), &response); err != nil {
232		t.Fatalf("Failed to parse response: %v", err)
233	}
234
235	// Verify conversation state includes the model from the database
236	if response.ConversationState == nil {
237		t.Fatal("Expected ConversationState in response")
238	}
239	if response.ConversationState.Model != modelID {
240		t.Errorf("Expected Model='%s', got '%s'", modelID, response.ConversationState.Model)
241	}
242}