From 8940ba1ddf3d686e2e8b3f29a04ab904ec2e8c66 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Sat, 2 Aug 2025 09:16:05 +0200 Subject: [PATCH 1/9] fix: fix openai compatible provider some providers expect simple content differently Co-authored-by: Peter Steinberger --- internal/llm/provider/openai.go | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go index b001353c9d94acebdf3eba9707c1525b65a38098..7075ddcc4dd8bceb14e8fa6837d2df391e9a1298 100644 --- a/internal/llm/provider/openai.go +++ b/internal/llm/provider/openai.go @@ -70,8 +70,9 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag systemMessage = o.providerOptions.systemPromptPrefix + "\n" + systemMessage } - systemTextBlock := openai.ChatCompletionContentPartTextParam{Text: systemMessage} + system := openai.SystemMessage(systemMessage) if isAnthropicModel && !o.providerOptions.disableCache { + systemTextBlock := openai.ChatCompletionContentPartTextParam{Text: systemMessage} systemTextBlock.SetExtraFields( map[string]any{ "cache_control": map[string]string{ @@ -79,10 +80,10 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag }, }, ) + var content []openai.ChatCompletionContentPartTextParam + content = append(content, systemTextBlock) + system = openai.SystemMessage(content) } - var content []openai.ChatCompletionContentPartTextParam - content = append(content, systemTextBlock) - system := openai.SystemMessage(content) openaiMessages = append(openaiMessages, system) for i, msg := range messages { @@ -93,9 +94,12 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag switch msg.Role { case message.User: var content []openai.ChatCompletionContentPartUnionParam + textBlock := openai.ChatCompletionContentPartTextParam{Text: msg.Content().String()} content = append(content, openai.ChatCompletionContentPartUnionParam{OfText: &textBlock}) + hasBinaryContent := false for _, binaryContent := range msg.BinaryContent() { + hasBinaryContent = true imageURL := openai.ChatCompletionContentPartImageImageURLParam{URL: binaryContent.String(catwalk.InferenceProviderOpenAI)} imageBlock := openai.ChatCompletionContentPartImageParam{ImageURL: imageURL} @@ -108,8 +112,11 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag }, }) } - - openaiMessages = append(openaiMessages, openai.UserMessage(content)) + if hasBinaryContent || (isAnthropicModel && !o.providerOptions.disableCache) { + openaiMessages = append(openaiMessages, openai.UserMessage(content)) + } else { + openaiMessages = append(openaiMessages, openai.UserMessage(msg.Content().String())) + } case message.Assistant: assistantMsg := openai.ChatCompletionAssistantMessageParam{ @@ -134,13 +141,15 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag }, }, } + if !isAnthropicModel { + assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{ + OfString: param.NewOpt(msg.Content().String()), + } + } } if len(msg.ToolCalls()) > 0 { hasContent = true - assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{ - OfString: param.NewOpt(msg.Content().String()), - } assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(msg.ToolCalls())) for i, call := range msg.ToolCalls() { assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{ From 7f7749601a0792f772c0fffdbc73e05121b5055e Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Sat, 2 Aug 2025 10:42:08 +0200 Subject: [PATCH 2/9] fix: handle nil body in http log --- internal/log/http.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/internal/log/http.go b/internal/log/http.go index 1091e5706c09be374e6775f8906c91505e10b33f..2c74bd05201ad9cbd8d60c6e3c3db3f637fb99b3 100644 --- a/internal/log/http.go +++ b/internal/log/http.go @@ -79,6 +79,9 @@ func (h *HTTPRoundTripLogger) RoundTrip(req *http.Request) (*http.Response, erro } func bodyToString(body io.ReadCloser) string { + if body == nil { + return "" + } src, err := io.ReadAll(body) if err != nil { slog.Error("Failed to read body", "error", err) From 5177982f3ff60f98cf55e5cb33215cc742e48927 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Sat, 2 Aug 2025 14:23:41 +0200 Subject: [PATCH 3/9] chore: fix models --- internal/tui/components/chat/splash/splash.go | 12 ++---------- internal/tui/components/dialogs/models/models.go | 12 ++---------- 2 files changed, 4 insertions(+), 20 deletions(-) diff --git a/internal/tui/components/chat/splash/splash.go b/internal/tui/components/chat/splash/splash.go index 9a74e79b30bbdcc9e0049f9fea55c23607fbc00a..c87f965f47298a9de726ce20a16587ffde7971ca 100644 --- a/internal/tui/components/chat/splash/splash.go +++ b/internal/tui/components/chat/splash/splash.go @@ -3,7 +3,6 @@ package splash import ( "fmt" "os" - "slices" "strings" "time" @@ -107,16 +106,9 @@ func (s *splashCmp) SetOnboarding(onboarding bool) { return } filteredProviders := []catwalk.Provider{} - simpleProviders := []string{ - "anthropic", - "openai", - "gemini", - "xai", - "groq", - "openrouter", - } + for _, p := range providers { - if slices.Contains(simpleProviders, string(p.ID)) { + if strings.HasPrefix(p.APIKey, "$") && p.ID != catwalk.InferenceProviderAzure { filteredProviders = append(filteredProviders, p) } } diff --git a/internal/tui/components/dialogs/models/models.go b/internal/tui/components/dialogs/models/models.go index d863e1998ad91164822d11253c960d50b8d8199a..bdeaae4f3c7d86cfc7c68605574132c653022407 100644 --- a/internal/tui/components/dialogs/models/models.go +++ b/internal/tui/components/dialogs/models/models.go @@ -2,7 +2,7 @@ package models import ( "fmt" - "slices" + "strings" "time" "github.com/charmbracelet/bubbles/v2/help" @@ -99,16 +99,8 @@ func (m *modelDialogCmp) Init() tea.Cmd { providers, err := config.Providers() if err == nil { filteredProviders := []catwalk.Provider{} - simpleProviders := []string{ - "anthropic", - "openai", - "gemini", - "xai", - "groq", - "openrouter", - } for _, p := range providers { - if slices.Contains(simpleProviders, string(p.ID)) { + if strings.HasPrefix(p.APIKey, "$") && p.ID != catwalk.InferenceProviderAzure { filteredProviders = append(filteredProviders, p) } } From 0de5b24655432cfaf8268e1387251e902940dfef Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Mon, 4 Aug 2025 14:14:38 +0200 Subject: [PATCH 4/9] fix: some openai providers Co-authored-by: Peter Steinberger --- internal/llm/provider/openai.go | 98 ++++++++++++++------------------- 1 file changed, 41 insertions(+), 57 deletions(-) diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go index 7075ddcc4dd8bceb14e8fa6837d2df391e9a1298..70bbe128663ce6163a93a2eb172e6d23f5873af3 100644 --- a/internal/llm/provider/openai.go +++ b/internal/llm/provider/openai.go @@ -6,6 +6,7 @@ import ( "fmt" "io" "log/slog" + "slices" "strings" "time" @@ -14,6 +15,7 @@ import ( "github.com/charmbracelet/crush/internal/llm/tools" "github.com/charmbracelet/crush/internal/log" "github.com/charmbracelet/crush/internal/message" + "github.com/google/uuid" "github.com/openai/openai-go" "github.com/openai/openai-go/option" "github.com/openai/openai-go/packages/param" @@ -338,21 +340,16 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t acc := openai.ChatCompletionAccumulator{} currentContent := "" toolCalls := make([]message.ToolCall, 0) - - var currentToolCallID string - var currentToolCall openai.ChatCompletionMessageToolCall var msgToolCalls []openai.ChatCompletionMessageToolCall - currentToolIndex := 0 for openaiStream.Next() { chunk := openaiStream.Current() // Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index if len(chunk.Choices) > 0 && len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 { - chunk.Choices[0].Delta.ToolCalls[0].Index = int64(currentToolIndex) - currentToolIndex++ + chunk.Choices[0].Delta.ToolCalls[0].Index = 0 } acc.AddChunk(chunk) // This fixes multiple tool calls for some providers - for _, choice := range chunk.Choices { + for i, choice := range chunk.Choices { if choice.Delta.Content != "" { eventChan <- ProviderEvent{ Type: EventContentDelta, @@ -361,63 +358,50 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t currentContent += choice.Delta.Content } else if len(choice.Delta.ToolCalls) > 0 { toolCall := choice.Delta.ToolCalls[0] - // Detect tool use start - if currentToolCallID == "" { - if toolCall.ID != "" { - currentToolCallID = toolCall.ID - eventChan <- ProviderEvent{ - Type: EventToolUseStart, - ToolCall: &message.ToolCall{ - ID: toolCall.ID, - Name: toolCall.Function.Name, - Finished: false, - }, + newToolCall := false + if len(msgToolCalls)-1 >= int(toolCall.Index) { // tool call exists + existingToolCall := msgToolCalls[toolCall.Index] + if toolCall.ID != "" && toolCall.ID != existingToolCall.ID { + found := false + // try to find the tool based on the ID + for i, tool := range msgToolCalls { + if tool.ID == toolCall.ID { + msgToolCalls[i].Function.Arguments += toolCall.Function.Arguments + found = true + } } - currentToolCall = openai.ChatCompletionMessageToolCall{ - ID: toolCall.ID, - Type: "function", - Function: openai.ChatCompletionMessageToolCallFunction{ - Name: toolCall.Function.Name, - Arguments: toolCall.Function.Arguments, - }, + if !found { + newToolCall = true } - } - } else { - // Delta tool use - if toolCall.ID == "" || toolCall.ID == currentToolCallID { - currentToolCall.Function.Arguments += toolCall.Function.Arguments } else { - // Detect new tool use - if toolCall.ID != currentToolCallID { - msgToolCalls = append(msgToolCalls, currentToolCall) - currentToolCallID = toolCall.ID - eventChan <- ProviderEvent{ - Type: EventToolUseStart, - ToolCall: &message.ToolCall{ - ID: toolCall.ID, - Name: toolCall.Function.Name, - Finished: false, - }, - } - currentToolCall = openai.ChatCompletionMessageToolCall{ - ID: toolCall.ID, - Type: "function", - Function: openai.ChatCompletionMessageToolCallFunction{ - Name: toolCall.Function.Name, - Arguments: toolCall.Function.Arguments, - }, - } - } + msgToolCalls[toolCall.Index].Function.Arguments += toolCall.Function.Arguments } + } else { + newToolCall = true } - } - // Kujtim: some models send finish stop even for tool calls - if choice.FinishReason == "tool_calls" || (choice.FinishReason == "stop" && currentToolCallID != "") { - msgToolCalls = append(msgToolCalls, currentToolCall) - if len(acc.Choices) > 0 { - acc.Choices[0].Message.ToolCalls = msgToolCalls + if newToolCall { // new tool call + if toolCall.ID == "" { + toolCall.ID = uuid.NewString() + } + eventChan <- ProviderEvent{ + Type: EventToolUseStart, + ToolCall: &message.ToolCall{ + ID: toolCall.ID, + Name: toolCall.Function.Name, + Finished: false, + }, + } + msgToolCalls = append(msgToolCalls, openai.ChatCompletionMessageToolCall{ + ID: toolCall.ID, + Type: "function", + Function: openai.ChatCompletionMessageToolCallFunction{ + Name: toolCall.Function.Name, + Arguments: toolCall.Function.Arguments, + }, + }) } } + acc.Choices[i].Message.ToolCalls = slices.Clone(msgToolCalls) } } From d9b0062208beee9e71cfb3e606a7de377b8c9493 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Mon, 4 Aug 2025 14:21:53 +0200 Subject: [PATCH 5/9] chore: simplify model filtering --- internal/tui/components/chat/splash/splash.go | 14 -------------- internal/tui/components/dialogs/models/list.go | 16 ++++++++++++---- internal/tui/components/dialogs/models/models.go | 11 ----------- 3 files changed, 12 insertions(+), 29 deletions(-) diff --git a/internal/tui/components/chat/splash/splash.go b/internal/tui/components/chat/splash/splash.go index c87f965f47298a9de726ce20a16587ffde7971ca..b0681c798631ee8cf3960fda26bfc2416aff2692 100644 --- a/internal/tui/components/chat/splash/splash.go +++ b/internal/tui/components/chat/splash/splash.go @@ -100,20 +100,6 @@ func New() Splash { func (s *splashCmp) SetOnboarding(onboarding bool) { s.isOnboarding = onboarding - if onboarding { - providers, err := config.Providers() - if err != nil { - return - } - filteredProviders := []catwalk.Provider{} - - for _, p := range providers { - if strings.HasPrefix(p.APIKey, "$") && p.ID != catwalk.InferenceProviderAzure { - filteredProviders = append(filteredProviders, p) - } - } - s.modelList.SetProviders(filteredProviders) - } } func (s *splashCmp) SetProjectInit(needsInit bool) { diff --git a/internal/tui/components/dialogs/models/list.go b/internal/tui/components/dialogs/models/list.go index 4d2986d2192568c9aac4db76f9011552ac61ee34..8c22094323ca2274a421ba50d668beaa42616041 100644 --- a/internal/tui/components/dialogs/models/list.go +++ b/internal/tui/components/dialogs/models/list.go @@ -3,10 +3,12 @@ package models import ( "fmt" "slices" + "strings" tea "github.com/charmbracelet/bubbletea/v2" "github.com/charmbracelet/catwalk/pkg/catwalk" "github.com/charmbracelet/crush/internal/config" + "github.com/charmbracelet/crush/internal/env" "github.com/charmbracelet/crush/internal/tui/exp/list" "github.com/charmbracelet/crush/internal/tui/styles" "github.com/charmbracelet/crush/internal/tui/util" @@ -49,6 +51,16 @@ func (m *ModelListComponent) Init() tea.Cmd { var cmds []tea.Cmd if len(m.providers) == 0 { providers, err := config.Providers() + filteredProviders := []catwalk.Provider{} + for _, p := range providers { + hasApiKeyEnv := strings.HasPrefix(p.APIKey, "$") + resolver := config.NewEnvironmentVariableResolver(env.New()) + endpoint, _ := resolver.ResolveValue(p.APIEndpoint) + if endpoint != "" && hasApiKeyEnv { + filteredProviders = append(filteredProviders, p) + } + } + m.providers = providers if err != nil { cmds = append(cmds, util.ReportError(err)) @@ -242,7 +254,3 @@ func (m *ModelListComponent) GetModelType() int { func (m *ModelListComponent) SetInputPlaceholder(placeholder string) { m.list.SetInputPlaceholder(placeholder) } - -func (m *ModelListComponent) SetProviders(providers []catwalk.Provider) { - m.providers = providers -} diff --git a/internal/tui/components/dialogs/models/models.go b/internal/tui/components/dialogs/models/models.go index bdeaae4f3c7d86cfc7c68605574132c653022407..e09b040a52ebf911ceefc455b0892c7c9ceba754 100644 --- a/internal/tui/components/dialogs/models/models.go +++ b/internal/tui/components/dialogs/models/models.go @@ -2,7 +2,6 @@ package models import ( "fmt" - "strings" "time" "github.com/charmbracelet/bubbles/v2/help" @@ -96,16 +95,6 @@ func NewModelDialogCmp() ModelDialog { } func (m *modelDialogCmp) Init() tea.Cmd { - providers, err := config.Providers() - if err == nil { - filteredProviders := []catwalk.Provider{} - for _, p := range providers { - if strings.HasPrefix(p.APIKey, "$") && p.ID != catwalk.InferenceProviderAzure { - filteredProviders = append(filteredProviders, p) - } - } - m.modelList.SetProviders(filteredProviders) - } return tea.Batch(m.modelList.Init(), m.apiKeyInput.Init()) } From bfd089d4193cbd3da9d81a4ac52852ab08eeee89 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Mon, 4 Aug 2025 14:54:55 +0200 Subject: [PATCH 6/9] chore: fix missing assignment --- internal/tui/components/dialogs/models/list.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/tui/components/dialogs/models/list.go b/internal/tui/components/dialogs/models/list.go index 8c22094323ca2274a421ba50d668beaa42616041..bcec9e981228202d8eb10c86e34642043b499634 100644 --- a/internal/tui/components/dialogs/models/list.go +++ b/internal/tui/components/dialogs/models/list.go @@ -61,7 +61,7 @@ func (m *ModelListComponent) Init() tea.Cmd { } } - m.providers = providers + m.providers = filteredProviders if err != nil { cmds = append(cmds, util.ReportError(err)) } From 1b50cb61c81702d0a94bded3b0007ec3d4c348c1 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Mon, 4 Aug 2025 16:05:09 +0200 Subject: [PATCH 7/9] chore: reasoning for openai providers that support it --- internal/llm/provider/openai.go | 13 ++++++++++++- internal/tui/components/chat/messages/messages.go | 3 +++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go index 70bbe128663ce6163a93a2eb172e6d23f5873af3..9b612b64d63a55d278a686ba5b3f7bda7f973c69 100644 --- a/internal/llm/provider/openai.go +++ b/internal/llm/provider/openai.go @@ -2,6 +2,7 @@ package provider import ( "context" + "encoding/json" "errors" "fmt" "io" @@ -348,8 +349,18 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t chunk.Choices[0].Delta.ToolCalls[0].Index = 0 } acc.AddChunk(chunk) - // This fixes multiple tool calls for some providers for i, choice := range chunk.Choices { + reasoning, ok := choice.Delta.JSON.ExtraFields["reasoning"] + if ok && reasoning.Raw() != "" { + reasoningStr := "" + json.Unmarshal([]byte(reasoning.Raw()), &reasoningStr) + if reasoningStr != "" { + eventChan <- ProviderEvent{ + Type: EventThinkingDelta, + Thinking: reasoningStr, + } + } + } if choice.Delta.Content != "" { eventChan <- ProviderEvent{ Type: EventContentDelta, diff --git a/internal/tui/components/chat/messages/messages.go b/internal/tui/components/chat/messages/messages.go index 17bb582dcadbea1f314b976bc31a31639f8d9609..17c157df5292280c6f094ec4e0f95bee82c6a77b 100644 --- a/internal/tui/components/chat/messages/messages.go +++ b/internal/tui/components/chat/messages/messages.go @@ -274,6 +274,9 @@ func (m *messageCmp) renderThinkingContent() string { if reasoningContent.StartedAt > 0 { duration := m.message.ThinkingDuration() if reasoningContent.FinishedAt > 0 { + if duration.String() == "0s" { + return "" + } m.anim.SetLabel("") opts := core.StatusOpts{ Title: "Thought for", From 75fd9510edbf29acf9507ff82dcf8230bd33c257 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Mon, 4 Aug 2025 23:14:44 +0200 Subject: [PATCH 8/9] chore: update catwalk --- go.mod | 4 ++-- go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 7e41d27cb2c2b8c7382ebfefaeab5188c5dc1e67..6f12853c9f7f70b78544049f950b1264af918c68 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/charlievieth/fastwalk v1.0.11 github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5 github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d - github.com/charmbracelet/catwalk v0.3.5 + github.com/charmbracelet/catwalk v0.4.5 github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0 @@ -56,7 +56,7 @@ require ( github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect - golang.org/x/oauth2 v0.25.0 // indirect + golang.org/x/oauth2 v0.30.0 // indirect golang.org/x/time v0.8.0 // indirect google.golang.org/api v0.211.0 // indirect ) diff --git a/go.sum b/go.sum index 37c3c7ed4509a19f5732262c904cf01ad329a51b..7913df3dee8435f583171fc1d4ec404ca9ab5134 100644 --- a/go.sum +++ b/go.sum @@ -78,8 +78,8 @@ github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5 github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw= github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d h1:YMXLZHSo8DjytVY/b5dK8LDuyQsVUmBK3ydQMpu2Ui4= github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d/go.mod h1:XIQ1qQfRph6Z5o2EikCydjumo0oDInQySRHuPATzbZc= -github.com/charmbracelet/catwalk v0.3.5 h1:ChMvA5ooTNZhDKFagmGNQgIZvZp8XjpdaJ+cDmhgCgA= -github.com/charmbracelet/catwalk v0.3.5/go.mod h1:gUUCqqZ8bk4D7ZzGTu3I77k7cC2x4exRuJBN1H2u2pc= +github.com/charmbracelet/catwalk v0.4.5 h1:Kv3PadDe8IF8gpcYTfAJdCee5Bv4HufvtNT61FXtq5g= +github.com/charmbracelet/catwalk v0.4.5/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc= github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40= github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0= github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0= @@ -332,8 +332,8 @@ golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= -golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70= -golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= From ec4c6f7898169a9948340617ddbacd25e978b760 Mon Sep 17 00:00:00 2001 From: Kujtim Hoxha Date: Mon, 4 Aug 2025 23:26:17 +0200 Subject: [PATCH 9/9] chore: small fix --- internal/tui/components/dialogs/models/list.go | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/internal/tui/components/dialogs/models/list.go b/internal/tui/components/dialogs/models/list.go index bcec9e981228202d8eb10c86e34642043b499634..66b55d85b299cb0bacb4cc2466c7b4146248ba05 100644 --- a/internal/tui/components/dialogs/models/list.go +++ b/internal/tui/components/dialogs/models/list.go @@ -8,7 +8,6 @@ import ( tea "github.com/charmbracelet/bubbletea/v2" "github.com/charmbracelet/catwalk/pkg/catwalk" "github.com/charmbracelet/crush/internal/config" - "github.com/charmbracelet/crush/internal/env" "github.com/charmbracelet/crush/internal/tui/exp/list" "github.com/charmbracelet/crush/internal/tui/styles" "github.com/charmbracelet/crush/internal/tui/util" @@ -53,10 +52,8 @@ func (m *ModelListComponent) Init() tea.Cmd { providers, err := config.Providers() filteredProviders := []catwalk.Provider{} for _, p := range providers { - hasApiKeyEnv := strings.HasPrefix(p.APIKey, "$") - resolver := config.NewEnvironmentVariableResolver(env.New()) - endpoint, _ := resolver.ResolveValue(p.APIEndpoint) - if endpoint != "" && hasApiKeyEnv { + hasAPIKeyEnv := strings.HasPrefix(p.APIKey, "$") + if hasAPIKeyEnv && p.ID != catwalk.InferenceProviderAzure { filteredProviders = append(filteredProviders, p) } }