feat: configure context paths (#86)

Garrett Ladley created

Change summary

cmd/schema/main.go            |  22 ++++++
internal/config/config.go     |  32 +++++++--
internal/llm/prompt/prompt.go | 113 ++++++++++++++++++++++--------------
opencode-schema.json          | 100 +++++++++++++++++++-------------
4 files changed, 175 insertions(+), 92 deletions(-)

Detailed changes

cmd/schema/main.go 🔗

@@ -77,6 +77,27 @@ func generateSchema() map[string]any {
 		"default":     false,
 	}
 
+	schema["properties"].(map[string]any)["contextPaths"] = map[string]any{
+		"type":        "array",
+		"description": "Context paths for the application",
+		"items": map[string]any{
+			"type": "string",
+		},
+		"default": []string{
+			".github/copilot-instructions.md",
+			".cursorrules",
+			".cursor/rules/",
+			"CLAUDE.md",
+			"CLAUDE.local.md",
+			"opencode.md",
+			"opencode.local.md",
+			"OpenCode.md",
+			"OpenCode.local.md",
+			"OPENCODE.md",
+			"OPENCODE.local.md",
+		},
+	}
+
 	// Add MCP servers
 	schema["properties"].(map[string]any)["mcpServers"] = map[string]any{
 		"type":        "object",
@@ -259,4 +280,3 @@ func generateSchema() map[string]any {
 
 	return schema
 }
-

internal/config/config.go 🔗

@@ -67,14 +67,15 @@ type LSPConfig struct {
 
 // Config is the main configuration structure for the application.
 type Config struct {
-	Data       Data                              `json:"data"`
-	WorkingDir string                            `json:"wd,omitempty"`
-	MCPServers map[string]MCPServer              `json:"mcpServers,omitempty"`
-	Providers  map[models.ModelProvider]Provider `json:"providers,omitempty"`
-	LSP        map[string]LSPConfig              `json:"lsp,omitempty"`
-	Agents     map[AgentName]Agent               `json:"agents"`
-	Debug      bool                              `json:"debug,omitempty"`
-	DebugLSP   bool                              `json:"debugLSP,omitempty"`
+	Data         Data                              `json:"data"`
+	WorkingDir   string                            `json:"wd,omitempty"`
+	MCPServers   map[string]MCPServer              `json:"mcpServers,omitempty"`
+	Providers    map[models.ModelProvider]Provider `json:"providers,omitempty"`
+	LSP          map[string]LSPConfig              `json:"lsp,omitempty"`
+	Agents       map[AgentName]Agent               `json:"agents"`
+	Debug        bool                              `json:"debug,omitempty"`
+	DebugLSP     bool                              `json:"debugLSP,omitempty"`
+	ContextPaths []string                          `json:"contextPaths,omitempty"`
 }
 
 // Application constants
@@ -84,6 +85,20 @@ const (
 	appName              = "opencode"
 )
 
+var defaultContextPaths = []string{
+	".github/copilot-instructions.md",
+	".cursorrules",
+	".cursor/rules/",
+	"CLAUDE.md",
+	"CLAUDE.local.md",
+	"opencode.md",
+	"opencode.local.md",
+	"OpenCode.md",
+	"OpenCode.local.md",
+	"OPENCODE.md",
+	"OPENCODE.local.md",
+}
+
 // Global configuration instance
 var cfg *Config
 
@@ -185,6 +200,7 @@ func configureViper() {
 // setDefaults configures default values for configuration options.
 func setDefaults(debug bool) {
 	viper.SetDefault("data.directory", defaultDataDirectory)
+	viper.SetDefault("contextPaths", defaultContextPaths)
 
 	if debug {
 		viper.SetDefault("debug", true)

internal/llm/prompt/prompt.go 🔗

@@ -5,26 +5,12 @@ import (
 	"os"
 	"path/filepath"
 	"strings"
+	"sync"
 
 	"github.com/opencode-ai/opencode/internal/config"
 	"github.com/opencode-ai/opencode/internal/llm/models"
 )
 
-// contextFiles is a list of potential context files to check for
-var contextFiles = []string{
-	".github/copilot-instructions.md",
-	".cursorrules",
-	".cursor/rules/", // Directory containing multiple rule files
-	"CLAUDE.md",
-	"CLAUDE.local.md",
-	"opencode.md",
-	"opencode.local.md",
-	"OpenCode.md",
-	"OpenCode.local.md",
-	"OPENCODE.md",
-	"OPENCODE.local.md",
-}
-
 func GetAgentPrompt(agentName config.AgentName, provider models.ModelProvider) string {
 	basePrompt := ""
 	switch agentName {
@@ -40,45 +26,86 @@ func GetAgentPrompt(agentName config.AgentName, provider models.ModelProvider) s
 
 	if agentName == config.AgentCoder || agentName == config.AgentTask {
 		// Add context from project-specific instruction files if they exist
-		contextContent := getContextFromFiles()
+		contextContent := getContextFromPaths()
 		if contextContent != "" {
-			return fmt.Sprintf("%s\n\n# Project-Specific Context\n%s", basePrompt, contextContent)
+			return fmt.Sprintf("%s\n\n# Project-Specific Context\n Make sure to follow the instructions in the context below\n%s", basePrompt, contextContent)
 		}
 	}
 	return basePrompt
 }
 
-// getContextFromFiles checks for the existence of context files and returns their content
-func getContextFromFiles() string {
-	workDir := config.WorkingDirectory()
-	var contextContent string
+var (
+	onceContext    sync.Once
+	contextContent string
+)
+
+func getContextFromPaths() string {
+	onceContext.Do(func() {
+		var (
+			cfg          = config.Get()
+			workDir      = cfg.WorkingDir
+			contextPaths = cfg.ContextPaths
+		)
+
+		contextContent = processContextPaths(workDir, contextPaths)
+	})
+
+	return contextContent
+}
+
+func processContextPaths(workDir string, paths []string) string {
+	var (
+		wg       sync.WaitGroup
+		resultCh = make(chan string)
+	)
+
+	for _, path := range paths {
+		wg.Add(1)
+		go func(p string) {
+			defer wg.Done()
 
-	for _, path := range contextFiles {
-		// Check if path ends with a slash (indicating a directory)
-		if strings.HasSuffix(path, "/") {
-			// Handle directory - read all files within it
-			dirPath := filepath.Join(workDir, path)
-			files, err := os.ReadDir(dirPath)
-			if err == nil {
-				for _, file := range files {
-					if !file.IsDir() {
-						filePath := filepath.Join(dirPath, file.Name())
-						content, err := os.ReadFile(filePath)
-						if err == nil {
-							contextContent += fmt.Sprintf("\n# From %s\n%s\n", file.Name(), string(content))
+			if strings.HasSuffix(p, "/") {
+				filepath.WalkDir(filepath.Join(workDir, p), func(path string, d os.DirEntry, err error) error {
+					if err != nil {
+						return err
+					}
+					if !d.IsDir() {
+						if result := processFile(path); result != "" {
+							resultCh <- result
 						}
 					}
+					return nil
+				})
+			} else {
+				result := processFile(filepath.Join(workDir, p))
+				if result != "" {
+					resultCh <- result
 				}
 			}
-		} else {
-			// Handle individual file as before
-			filePath := filepath.Join(workDir, path)
-			content, err := os.ReadFile(filePath)
-			if err == nil {
-				contextContent += fmt.Sprintf("\n%s\n", string(content))
-			}
-		}
+		}(path)
 	}
 
-	return contextContent
+	go func() {
+		wg.Wait()
+		close(resultCh)
+	}()
+
+	var (
+		results = make([]string, len(resultCh))
+		i       int
+	)
+	for result := range resultCh {
+		results[i] = result
+		i++
+	}
+
+	return strings.Join(results, "\n")
 }
+
+func processFile(filePath string) string {
+	content, err := os.ReadFile(filePath)
+	if err != nil {
+		return ""
+	}
+	return "# From:" + filePath + "\n" + string(content)
+}

opencode-schema.json 🔗

@@ -12,33 +12,33 @@
         "model": {
           "description": "Model ID for the agent",
           "enum": [
+            "bedrock.claude-3.7-sonnet",
+            "claude-3-haiku",
             "claude-3.7-sonnet",
-            "claude-3-opus",
-            "gpt-4.1-mini",
-            "gpt-4o",
-            "gpt-4o-mini",
-            "gemini-2.0-flash-lite",
-            "meta-llama/llama-4-maverick-17b-128e-instruct",
-            "gpt-4.1",
+            "claude-3.5-haiku",
+            "o3",
             "gpt-4.5-preview",
-            "o1",
-            "gpt-4.1-nano",
+            "o1-pro",
+            "o4-mini",
+            "gpt-4.1",
             "o3-mini",
+            "gpt-4.1-nano",
+            "gpt-4o-mini",
+            "o1",
             "gemini-2.5-flash",
-            "gemini-2.0-flash",
-            "meta-llama/llama-4-scout-17b-16e-instruct",
-            "bedrock.claude-3.7-sonnet",
-            "o1-pro",
-            "o3",
-            "gemini-2.5",
             "qwen-qwq",
-            "llama-3.3-70b-versatile",
+            "meta-llama/llama-4-maverick-17b-128e-instruct",
+            "claude-3-opus",
+            "gpt-4o",
+            "gemini-2.0-flash-lite",
+            "gemini-2.0-flash",
             "deepseek-r1-distill-llama-70b",
+            "llama-3.3-70b-versatile",
             "claude-3.5-sonnet",
-            "claude-3-haiku",
-            "claude-3.5-haiku",
-            "o4-mini",
-            "o1-mini"
+            "o1-mini",
+            "gpt-4.1-mini",
+            "gemini-2.5",
+            "meta-llama/llama-4-scout-17b-16e-instruct"
           ],
           "type": "string"
         },
@@ -72,33 +72,33 @@
           "model": {
             "description": "Model ID for the agent",
             "enum": [
+              "bedrock.claude-3.7-sonnet",
+              "claude-3-haiku",
               "claude-3.7-sonnet",
-              "claude-3-opus",
-              "gpt-4.1-mini",
-              "gpt-4o",
-              "gpt-4o-mini",
-              "gemini-2.0-flash-lite",
-              "meta-llama/llama-4-maverick-17b-128e-instruct",
-              "gpt-4.1",
+              "claude-3.5-haiku",
+              "o3",
               "gpt-4.5-preview",
-              "o1",
-              "gpt-4.1-nano",
+              "o1-pro",
+              "o4-mini",
+              "gpt-4.1",
               "o3-mini",
+              "gpt-4.1-nano",
+              "gpt-4o-mini",
+              "o1",
               "gemini-2.5-flash",
-              "gemini-2.0-flash",
-              "meta-llama/llama-4-scout-17b-16e-instruct",
-              "bedrock.claude-3.7-sonnet",
-              "o1-pro",
-              "o3",
-              "gemini-2.5",
               "qwen-qwq",
-              "llama-3.3-70b-versatile",
+              "meta-llama/llama-4-maverick-17b-128e-instruct",
+              "claude-3-opus",
+              "gpt-4o",
+              "gemini-2.0-flash-lite",
+              "gemini-2.0-flash",
               "deepseek-r1-distill-llama-70b",
+              "llama-3.3-70b-versatile",
               "claude-3.5-sonnet",
-              "claude-3-haiku",
-              "claude-3.5-haiku",
-              "o4-mini",
-              "o1-mini"
+              "o1-mini",
+              "gpt-4.1-mini",
+              "gemini-2.5",
+              "meta-llama/llama-4-scout-17b-16e-instruct"
             ],
             "type": "string"
           },
@@ -131,6 +131,26 @@
       },
       "type": "object"
     },
+    "contextPaths": {
+      "default": [
+        ".github/copilot-instructions.md",
+        ".cursorrules",
+        ".cursor/rules/",
+        "CLAUDE.md",
+        "CLAUDE.local.md",
+        "opencode.md",
+        "opencode.local.md",
+        "OpenCode.md",
+        "OpenCode.local.md",
+        "OPENCODE.md",
+        "OPENCODE.local.md"
+      ],
+      "description": "Context paths for the application",
+      "items": {
+        "type": "string"
+      },
+      "type": "array"
+    },
     "data": {
       "description": "Storage configuration",
       "properties": {