Update readme & use provided logging package

Pietjan Oostra created

Change summary

README.md                    | 30 ++++++++++++++++++++++++++++++
internal/llm/models/local.go | 33 ++++++++++++++++++++++++---------
2 files changed, 54 insertions(+), 9 deletions(-)

Detailed changes

README.md 🔗

@@ -100,6 +100,7 @@ You can configure OpenCode using environment variables:
 | `AZURE_OPENAI_ENDPOINT`    | For Azure OpenAI models                                |
 | `AZURE_OPENAI_API_KEY`     | For Azure OpenAI models (optional when using Entra ID) |
 | `AZURE_OPENAI_API_VERSION` | For Azure OpenAI models                                |
+| `LOCAL_ENDPOINT`           | For self-hosted models                                 |
 | `SHELL`                    | Default shell to use (if not specified in config)      |
 
 ### Shell Configuration
@@ -566,6 +567,35 @@ The AI assistant can access LSP features through the `diagnostics` tool, allowin
 
 While the LSP client implementation supports the full LSP protocol (including completions, hover, definition, etc.), currently only diagnostics are exposed to the AI assistant.
 
+## Using a self-hosted model provider
+
+OpenCode can also load and use models from a self-hosted (OpenAI-like) provider.
+This is useful for developers who want to experiment with custom models.
+
+### Configuring a self-hosted provider 
+
+You can use a self-hosted model by setting the `LOCAL_ENDPOINT` environment variable.
+This will cause OpenCode to load and use the models from the specified endpoint.
+
+```bash
+LOCAL_ENDPOINT=http://localhost:1235/v1
+```
+
+### Configuring a self-hosted model
+
+You can also configure a self-hosted model in the configuration file under the `agents` section:
+
+```json
+{
+  "agents": {
+    "coder": {
+      "model": "local.granite-3.3-2b-instruct@q8_0",
+      "reasoningEffort": "high"
+    }
+  }
+}
+```
+
 ## Development
 
 ### Prerequisites

internal/llm/models/local.go 🔗

@@ -3,7 +3,6 @@ package models
 import (
 	"cmp"
 	"encoding/json"
-	"log/slog"
 	"net/http"
 	"net/url"
 	"os"
@@ -11,6 +10,7 @@ import (
 	"strings"
 	"unicode"
 
+	"github.com/opencode-ai/opencode/internal/logging"
 	"github.com/spf13/viper"
 )
 
@@ -25,7 +25,7 @@ func init() {
 	if endpoint := os.Getenv("LOCAL_ENDPOINT"); endpoint != "" {
 		localEndpoint, err := url.Parse(endpoint)
 		if err != nil {
-			slog.Debug("Failed to parse local endpoint",
+			logging.Debug("Failed to parse local endpoint",
 				"error", err,
 				"endpoint", endpoint,
 			)
@@ -44,7 +44,7 @@ func init() {
 		}
 
 		if len(models) == 0 {
-			slog.Debug("No local models found",
+			logging.Debug("No local models found",
 				"endpoint", endpoint,
 			)
 			return
@@ -77,7 +77,7 @@ type localModel struct {
 func listLocalModels(modelsEndpoint string) []localModel {
 	res, err := http.Get(modelsEndpoint)
 	if err != nil {
-		slog.Debug("Failed to list local models",
+		logging.Debug("Failed to list local models",
 			"error", err,
 			"endpoint", modelsEndpoint,
 		)
@@ -85,7 +85,7 @@ func listLocalModels(modelsEndpoint string) []localModel {
 	defer res.Body.Close()
 
 	if res.StatusCode != http.StatusOK {
-		slog.Debug("Failed to list local models",
+		logging.Debug("Failed to list local models",
 			"status", res.StatusCode,
 			"endpoint", modelsEndpoint,
 		)
@@ -93,7 +93,7 @@ func listLocalModels(modelsEndpoint string) []localModel {
 
 	var modelList localModelList
 	if err = json.NewDecoder(res.Body).Decode(&modelList); err != nil {
-		slog.Debug("Failed to list local models",
+		logging.Debug("Failed to list local models",
 			"error", err,
 			"endpoint", modelsEndpoint,
 		)
@@ -103,7 +103,7 @@ func listLocalModels(modelsEndpoint string) []localModel {
 	for _, model := range modelList.Data {
 		if strings.HasSuffix(modelsEndpoint, lmStudioBetaModelsPath) {
 			if model.Object != "model" || model.Type != "llm" {
-				slog.Debug("Skipping unsupported LMStudio model",
+				logging.Debug("Skipping unsupported LMStudio model",
 					"endpoint", modelsEndpoint,
 					"id", model.ID,
 					"object", model.Object,
@@ -125,7 +125,7 @@ func loadLocalModels(models []localModel) {
 		model := convertLocalModel(m)
 		SupportedModels[model.ID] = model
 
-		if i == 1 || m.State == "loaded" {
+		if i == 0 || m.State == "loaded" {
 			viper.SetDefault("agents.coder.model", model.ID)
 			viper.SetDefault("agents.summarizer.model", model.ID)
 			viper.SetDefault("agents.task.model", model.ID)
@@ -150,7 +150,19 @@ func convertLocalModel(model localModel) Model {
 var modelInfoRegex = regexp.MustCompile(`(?i)^([a-z0-9]+)(?:[-_]?([rv]?\d[\.\d]*))?(?:[-_]?([a-z]+))?.*`)
 
 func friendlyModelName(modelID string) string {
-	match := modelInfoRegex.FindStringSubmatch(modelID)
+	mainID := modelID
+	tag := ""
+
+	if slash := strings.LastIndex(mainID, "/"); slash != -1 {
+		mainID = mainID[slash+1:]
+	}
+
+	if at := strings.Index(modelID, "@"); at != -1 {
+		mainID = modelID[:at]
+		tag = modelID[at+1:]
+	}
+
+	match := modelInfoRegex.FindStringSubmatch(mainID)
 	if match == nil {
 		return modelID
 	}
@@ -186,6 +198,9 @@ func friendlyModelName(modelID string) string {
 	if label != "" {
 		parts = append(parts, label)
 	}
+	if tag != "" {
+		parts = append(parts, tag)
+	}
 
 	return strings.Join(parts, " ")
 }