client.go

  1package ollama
  2
  3import (
  4	"context"
  5	"fmt"
  6	"strings"
  7
  8	"github.com/charmbracelet/crush/internal/fur/provider"
  9)
 10
 11// IsRunning checks if Ollama is running by attempting to run a CLI command
 12func IsRunning(ctx context.Context) bool {
 13	_, err := CLIListModels(ctx)
 14	return err == nil
 15}
 16
 17// GetModels retrieves available models from Ollama using CLI
 18func GetModels(ctx context.Context) ([]provider.Model, error) {
 19	ollamaModels, err := CLIListModels(ctx)
 20	if err != nil {
 21		return nil, err
 22	}
 23
 24	models := make([]provider.Model, len(ollamaModels))
 25	for i, ollamaModel := range ollamaModels {
 26		family := extractModelFamily(ollamaModel.Name)
 27		models[i] = provider.Model{
 28			ID:                 ollamaModel.Name,
 29			Model:              ollamaModel.Name,
 30			CostPer1MIn:        0, // Local models have no cost
 31			CostPer1MOut:       0,
 32			CostPer1MInCached:  0,
 33			CostPer1MOutCached: 0,
 34			ContextWindow:      getContextWindow(family),
 35			DefaultMaxTokens:   4096,
 36			CanReason:          false,
 37			HasReasoningEffort: false,
 38			SupportsImages:     supportsImages(family),
 39		}
 40	}
 41
 42	return models, nil
 43}
 44
 45// GetRunningModels returns models that are currently loaded in memory using CLI
 46func GetRunningModels(ctx context.Context) ([]OllamaRunningModel, error) {
 47	runningModelNames, err := CLIListRunningModels(ctx)
 48	if err != nil {
 49		return nil, err
 50	}
 51
 52	var runningModels []OllamaRunningModel
 53	for _, name := range runningModelNames {
 54		runningModels = append(runningModels, OllamaRunningModel{
 55			Name: name,
 56		})
 57	}
 58
 59	return runningModels, nil
 60}
 61
 62// IsModelLoaded checks if a specific model is currently loaded in memory using CLI
 63func IsModelLoaded(ctx context.Context, modelName string) (bool, error) {
 64	return CLIIsModelRunning(ctx, modelName)
 65}
 66
 67// GetProvider returns a provider.Provider for Ollama if it's running
 68func GetProvider(ctx context.Context) (*provider.Provider, error) {
 69	if !IsRunning(ctx) {
 70		return nil, fmt.Errorf("Ollama is not running")
 71	}
 72
 73	models, err := GetModels(ctx)
 74	if err != nil {
 75		return nil, fmt.Errorf("failed to get models: %w", err)
 76	}
 77
 78	return &provider.Provider{
 79		Name:   "Ollama",
 80		ID:     "ollama",
 81		Models: models,
 82	}, nil
 83}
 84
 85// extractModelFamily extracts the model family from a model name
 86func extractModelFamily(modelName string) string {
 87	// Extract the family from model names like "llama3.2:3b" -> "llama"
 88	parts := strings.Split(modelName, ":")
 89	if len(parts) > 0 {
 90		name := parts[0]
 91		// Handle cases like "llama3.2" -> "llama"
 92		if strings.HasPrefix(name, "llama") {
 93			return "llama"
 94		}
 95		if strings.HasPrefix(name, "mistral") {
 96			return "mistral"
 97		}
 98		if strings.HasPrefix(name, "gemma") {
 99			return "gemma"
100		}
101		if strings.HasPrefix(name, "qwen") {
102			return "qwen"
103		}
104		if strings.HasPrefix(name, "phi") {
105			return "phi"
106		}
107		if strings.HasPrefix(name, "codellama") {
108			return "codellama"
109		}
110		if strings.Contains(name, "llava") {
111			return "llava"
112		}
113		if strings.Contains(name, "vision") {
114			return "llama-vision"
115		}
116	}
117	return "unknown"
118}
119
120// getContextWindow returns an estimated context window based on model family
121func getContextWindow(family string) int64 {
122	switch family {
123	case "llama":
124		return 131072 // Llama 3.x context window
125	case "mistral":
126		return 32768
127	case "gemma":
128		return 8192
129	case "qwen", "qwen2":
130		return 131072
131	case "phi":
132		return 131072
133	case "codellama":
134		return 16384
135	default:
136		return 8192 // Conservative default
137	}
138}
139
140// supportsImages returns whether a model family supports image inputs
141func supportsImages(family string) bool {
142	switch family {
143	case "llama-vision", "llava":
144		return true
145	default:
146		return false
147	}
148}