Detailed changes
  
  
    
    @@ -8,28 +8,26 @@ import (
 // LazySlice is a thread-safe lazy-loaded slice.
 type LazySlice[K any] struct {
 	inner []K
-	mu    sync.Mutex
+	wg    sync.WaitGroup
 }
 
 // NewLazySlice creates a new slice and runs the [load] function in a goroutine
 // to populate it.
 func NewLazySlice[K any](load func() []K) *LazySlice[K] {
 	s := &LazySlice[K]{}
-	s.mu.Lock()
+	s.wg.Add(1)
 	go func() {
 		s.inner = load()
-		s.mu.Unlock()
+		s.wg.Done()
 	}()
 	return s
 }
 
 // Seq returns an iterator that yields elements from the slice.
 func (s *LazySlice[K]) Seq() iter.Seq[K] {
-	s.mu.Lock()
-	inner := s.inner
-	s.mu.Unlock()
+	s.wg.Wait()
 	return func(yield func(K) bool) {
-		for _, v := range inner {
+		for _, v := range s.inner {
 			if !yield(v) {
 				return
 			}
  
  
  
    
    @@ -292,7 +292,7 @@ func (a *agent) generateTitle(ctx context.Context, sessionID string, content str
 				Parts: parts,
 			},
 		},
-		make([]tools.BaseTool, 0),
+		nil,
 	)
 
 	var finalResponse *provider.ProviderResponse
@@ -745,7 +745,7 @@ func (a *agent) Summarize(ctx context.Context, sessionID string) error {
 		response := a.summarizeProvider.StreamResponse(
 			summarizeCtx,
 			msgsWithPrompt,
-			make([]tools.BaseTool, 0),
+			nil,
 		)
 		var finalResponse *provider.ProviderResponse
 		for r := range response {
  
  
  
    
    @@ -188,9 +188,7 @@ func (g *geminiClient) send(ctx context.Context, messages []message.Message, too
 			Parts: []*genai.Part{{Text: g.providerOptions.systemMessage}},
 		},
 	}
-	if len(tools) > 0 {
-		config.Tools = g.convertTools(tools)
-	}
+	config.Tools = g.convertTools(tools)
 	chat, _ := g.client.Chats.Create(ctx, model.ID, config, history)
 
 	attempts := 0
@@ -290,9 +288,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
 			Parts: []*genai.Part{{Text: g.providerOptions.systemMessage}},
 		},
 	}
-	if len(tools) > 0 {
-		config.Tools = g.convertTools(tools)
-	}
+	config.Tools = g.convertTools(tools)
 	chat, _ := g.client.Chats.Create(ctx, model.ID, config, history)
 
 	attempts := 0
  
  
  
    
    @@ -11,7 +11,6 @@ import (
 
 	"github.com/charmbracelet/catwalk/pkg/catwalk"
 	"github.com/charmbracelet/crush/internal/config"
-	"github.com/charmbracelet/crush/internal/llm/tools"
 	"github.com/charmbracelet/crush/internal/message"
 	"github.com/openai/openai-go"
 	"github.com/openai/openai-go/option"
@@ -79,7 +78,7 @@ func TestOpenAIClientStreamChoices(t *testing.T) {
 	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
 	defer cancel()
 
-	eventsChan := client.stream(ctx, messages, []tools.BaseTool{})
+	eventsChan := client.stream(ctx, messages, nil)
 
 	// Collect events - this will panic without the bounds check
 	for event := range eventsChan {