1package ollama
2
3import (
4 "context"
5 "fmt"
6 "os/exec"
7 "syscall"
8 "time"
9)
10
11// StartOllamaService starts the Ollama service if it's not already running
12func StartOllamaService(ctx context.Context) error {
13 if IsRunning(ctx) {
14 return nil // Already running
15 }
16
17 // Set up signal handling for cleanup
18 processManager.setupOnce.Do(func() {
19 setupProcessCleanup()
20 })
21
22 // Start ollama serve
23 cmd := exec.CommandContext(ctx, "ollama", "serve")
24 cmd.Stdout = nil // Suppress output
25 cmd.Stderr = nil // Suppress errors
26 cmd.SysProcAttr = &syscall.SysProcAttr{
27 Setpgid: true, // Create new process group so we can kill it and all children
28 }
29
30 if err := cmd.Start(); err != nil {
31 return fmt.Errorf("failed to start Ollama service: %w", err)
32 }
33
34 // Store the process for cleanup
35 processManager.mu.Lock()
36 processManager.ollamaServer = cmd
37 processManager.crushStartedOllama = true
38 processManager.mu.Unlock()
39
40 // Wait for Ollama to be ready (with timeout)
41 timeout := time.After(10 * time.Second)
42 ticker := time.NewTicker(500 * time.Millisecond)
43 defer ticker.Stop()
44
45 for {
46 select {
47 case <-timeout:
48 return fmt.Errorf("timeout waiting for Ollama service to start")
49 case <-ticker.C:
50 if IsRunning(ctx) {
51 return nil // Ollama is now running
52 }
53 case <-ctx.Done():
54 return ctx.Err()
55 }
56 }
57}
58
59// StartModel starts a model using `ollama run` and keeps it loaded
60func StartModel(ctx context.Context, modelName string) error {
61 // Check if model is already running
62 if loaded, err := IsModelLoaded(ctx, modelName); err != nil {
63 return fmt.Errorf("failed to check if model is loaded: %w", err)
64 } else if loaded {
65 return nil // Model is already running
66 }
67
68 // Set up signal handling for cleanup
69 processManager.setupOnce.Do(func() {
70 setupProcessCleanup()
71 })
72
73 // Start the model in the background
74 cmd := exec.CommandContext(ctx, "ollama", "run", modelName)
75 cmd.Stdin = nil // No interactive input
76 cmd.Stdout = nil // Suppress output
77 cmd.Stderr = nil // Suppress errors
78
79 if err := cmd.Start(); err != nil {
80 return fmt.Errorf("failed to start model %s: %w", modelName, err)
81 }
82
83 // Store the process for cleanup
84 processManager.mu.Lock()
85 processManager.processes[modelName] = cmd
86 processManager.mu.Unlock()
87
88 // Wait for the model to be loaded (with timeout)
89 timeout := time.After(30 * time.Second)
90 ticker := time.NewTicker(1 * time.Second)
91 defer ticker.Stop()
92
93 for {
94 select {
95 case <-timeout:
96 return fmt.Errorf("timeout waiting for model %s to load", modelName)
97 case <-ticker.C:
98 if loaded, err := IsModelLoaded(ctx, modelName); err != nil {
99 return fmt.Errorf("failed to check if model is loaded: %w", err)
100 } else if loaded {
101 return nil // Model is now running
102 }
103 case <-ctx.Done():
104 return ctx.Err()
105 }
106 }
107}
108
109// EnsureOllamaRunning ensures Ollama service is running, starting it if necessary
110func EnsureOllamaRunning(ctx context.Context) error {
111 return StartOllamaService(ctx)
112}
113
114// EnsureModelRunning ensures a model is running, starting it if necessary
115func EnsureModelRunning(ctx context.Context, modelName string) error {
116 return StartModel(ctx, modelName)
117}