From 12a98d6940d9dbe46d889a0bf60d881ffc56ba66 Mon Sep 17 00:00:00 2001 From: Pietjan Oostra Date: Fri, 16 May 2025 19:38:53 +0200 Subject: [PATCH] Update readme & use provided logging package --- README.md | 30 ++++++++++++++++++++++++++++++ internal/llm/models/local.go | 33 ++++++++++++++++++++++++--------- 2 files changed, 54 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 39c301872c076fd44249d4166558cf3b3e8d991d..c3e6a95a3ea32c84c695874287a0964c6b066e44 100644 --- a/README.md +++ b/README.md @@ -100,6 +100,7 @@ You can configure OpenCode using environment variables: | `AZURE_OPENAI_ENDPOINT` | For Azure OpenAI models | | `AZURE_OPENAI_API_KEY` | For Azure OpenAI models (optional when using Entra ID) | | `AZURE_OPENAI_API_VERSION` | For Azure OpenAI models | +| `LOCAL_ENDPOINT` | For self-hosted models | | `SHELL` | Default shell to use (if not specified in config) | ### Shell Configuration @@ -566,6 +567,35 @@ The AI assistant can access LSP features through the `diagnostics` tool, allowin While the LSP client implementation supports the full LSP protocol (including completions, hover, definition, etc.), currently only diagnostics are exposed to the AI assistant. +## Using a self-hosted model provider + +OpenCode can also load and use models from a self-hosted (OpenAI-like) provider. +This is useful for developers who want to experiment with custom models. + +### Configuring a self-hosted provider + +You can use a self-hosted model by setting the `LOCAL_ENDPOINT` environment variable. +This will cause OpenCode to load and use the models from the specified endpoint. + +```bash +LOCAL_ENDPOINT=http://localhost:1235/v1 +``` + +### Configuring a self-hosted model + +You can also configure a self-hosted model in the configuration file under the `agents` section: + +```json +{ + "agents": { + "coder": { + "model": "local.granite-3.3-2b-instruct@q8_0", + "reasoningEffort": "high" + } + } +} +``` + ## Development ### Prerequisites diff --git a/internal/llm/models/local.go b/internal/llm/models/local.go index 252f6a9f95a8f023b4f23275e37b08b1d66f8862..5d8412c86a0f3f4ccf305763171f6acfdaea6eb1 100644 --- a/internal/llm/models/local.go +++ b/internal/llm/models/local.go @@ -3,7 +3,6 @@ package models import ( "cmp" "encoding/json" - "log/slog" "net/http" "net/url" "os" @@ -11,6 +10,7 @@ import ( "strings" "unicode" + "github.com/opencode-ai/opencode/internal/logging" "github.com/spf13/viper" ) @@ -25,7 +25,7 @@ func init() { if endpoint := os.Getenv("LOCAL_ENDPOINT"); endpoint != "" { localEndpoint, err := url.Parse(endpoint) if err != nil { - slog.Debug("Failed to parse local endpoint", + logging.Debug("Failed to parse local endpoint", "error", err, "endpoint", endpoint, ) @@ -44,7 +44,7 @@ func init() { } if len(models) == 0 { - slog.Debug("No local models found", + logging.Debug("No local models found", "endpoint", endpoint, ) return @@ -77,7 +77,7 @@ type localModel struct { func listLocalModels(modelsEndpoint string) []localModel { res, err := http.Get(modelsEndpoint) if err != nil { - slog.Debug("Failed to list local models", + logging.Debug("Failed to list local models", "error", err, "endpoint", modelsEndpoint, ) @@ -85,7 +85,7 @@ func listLocalModels(modelsEndpoint string) []localModel { defer res.Body.Close() if res.StatusCode != http.StatusOK { - slog.Debug("Failed to list local models", + logging.Debug("Failed to list local models", "status", res.StatusCode, "endpoint", modelsEndpoint, ) @@ -93,7 +93,7 @@ func listLocalModels(modelsEndpoint string) []localModel { var modelList localModelList if err = json.NewDecoder(res.Body).Decode(&modelList); err != nil { - slog.Debug("Failed to list local models", + logging.Debug("Failed to list local models", "error", err, "endpoint", modelsEndpoint, ) @@ -103,7 +103,7 @@ func listLocalModels(modelsEndpoint string) []localModel { for _, model := range modelList.Data { if strings.HasSuffix(modelsEndpoint, lmStudioBetaModelsPath) { if model.Object != "model" || model.Type != "llm" { - slog.Debug("Skipping unsupported LMStudio model", + logging.Debug("Skipping unsupported LMStudio model", "endpoint", modelsEndpoint, "id", model.ID, "object", model.Object, @@ -125,7 +125,7 @@ func loadLocalModels(models []localModel) { model := convertLocalModel(m) SupportedModels[model.ID] = model - if i == 1 || m.State == "loaded" { + if i == 0 || m.State == "loaded" { viper.SetDefault("agents.coder.model", model.ID) viper.SetDefault("agents.summarizer.model", model.ID) viper.SetDefault("agents.task.model", model.ID) @@ -150,7 +150,19 @@ func convertLocalModel(model localModel) Model { var modelInfoRegex = regexp.MustCompile(`(?i)^([a-z0-9]+)(?:[-_]?([rv]?\d[\.\d]*))?(?:[-_]?([a-z]+))?.*`) func friendlyModelName(modelID string) string { - match := modelInfoRegex.FindStringSubmatch(modelID) + mainID := modelID + tag := "" + + if slash := strings.LastIndex(mainID, "/"); slash != -1 { + mainID = mainID[slash+1:] + } + + if at := strings.Index(modelID, "@"); at != -1 { + mainID = modelID[:at] + tag = modelID[at+1:] + } + + match := modelInfoRegex.FindStringSubmatch(mainID) if match == nil { return modelID } @@ -186,6 +198,9 @@ func friendlyModelName(modelID string) string { if label != "" { parts = append(parts, label) } + if tag != "" { + parts = append(parts, tag) + } return strings.Join(parts, " ") }