fix: lint issues, server timeout, etc

Carlos Alexandro Becker created

Change summary

cmd/openrouter/main.go          | 35 +++++++++++++++++++++++------------
internal/providers/providers.go |  6 +++++-
main.go                         | 20 ++++++++++++++++----
pkg/client/client.go            | 17 +++++++++--------
pkg/provider/provider.go        | 32 ++++++++++++++++++--------------
5 files changed, 71 insertions(+), 39 deletions(-)

Detailed changes

cmd/openrouter/main.go 🔗

@@ -1,6 +1,9 @@
+// Package main provides a command-line tool to fetch models from OpenRouter
+// and generate a configuration file for the provider.
 package main
 
 import (
+	"context"
 	"encoding/json"
 	"fmt"
 	"io"
@@ -14,7 +17,7 @@ import (
 	"github.com/charmbracelet/fur/pkg/provider"
 )
 
-// Model represents the complete model configuration
+// Model represents the complete model configuration.
 type Model struct {
 	ID              string       `json:"id"`
 	CanonicalSlug   string       `json:"canonical_slug"`
@@ -29,7 +32,7 @@ type Model struct {
 	SupportedParams []string     `json:"supported_parameters"`
 }
 
-// Architecture defines the model's architecture details
+// Architecture defines the model's architecture details.
 type Architecture struct {
 	Modality         string   `json:"modality"`
 	InputModalities  []string `json:"input_modalities"`
@@ -38,7 +41,7 @@ type Architecture struct {
 	InstructType     *string  `json:"instruct_type"`
 }
 
-// Pricing contains the pricing information for different operations
+// Pricing contains the pricing information for different operations.
 type Pricing struct {
 	Prompt            string `json:"prompt"`
 	Completion        string `json:"completion"`
@@ -50,16 +53,20 @@ type Pricing struct {
 	InputCacheWrite   string `json:"input_cache_write"`
 }
 
-// TopProvider describes the top provider's capabilities
+// TopProvider describes the top provider's capabilities.
 type TopProvider struct {
 	ContextLength       int64  `json:"context_length"`
 	MaxCompletionTokens *int64 `json:"max_completion_tokens"`
 	IsModerated         bool   `json:"is_moderated"`
 }
 
+// ModelsResponse is the response structure for the models API.
 type ModelsResponse struct {
 	Data []Model `json:"data"`
 }
+
+// ModelPricing is the pricing structure for a model, detailing costs per
+// million tokens for input and output, both cached and uncached.
 type ModelPricing struct {
 	CostPer1MIn        float64 `json:"cost_per_1m_in"`
 	CostPer1MOut       float64 `json:"cost_per_1m_out"`
@@ -95,25 +102,30 @@ func getPricing(model Model) ModelPricing {
 
 func fetchOpenRouterModels() (*ModelsResponse, error) {
 	client := &http.Client{Timeout: 30 * time.Second}
-	req, _ := http.NewRequest("GET", "https://openrouter.ai/api/v1/models", nil)
+	req, _ := http.NewRequestWithContext(
+		context.Background(),
+		"GET",
+		"https://openrouter.ai/api/v1/models",
+		nil,
+	)
 	req.Header.Set("User-Agent", "Crush-Client/1.0")
 	resp, err := client.Do(req)
 	if err != nil {
-		return nil, err
+		return nil, err //nolint:wrapcheck
 	}
-	defer resp.Body.Close()
+	defer resp.Body.Close() //nolint:errcheck
 	if resp.StatusCode != 200 {
 		body, _ := io.ReadAll(resp.Body)
 		return nil, fmt.Errorf("status %d: %s", resp.StatusCode, body)
 	}
 	var mr ModelsResponse
 	if err := json.NewDecoder(resp.Body).Decode(&mr); err != nil {
-		return nil, err
+		return nil, err //nolint:wrapcheck
 	}
 	return &mr, nil
 }
 
-// This is used to generate the openrouter.json config file
+// This is used to generate the openrouter.json config file.
 func main() {
 	modelsResp, err := fetchOpenRouterModels()
 	if err != nil {
@@ -125,7 +137,7 @@ func main() {
 		ID:             "openrouter",
 		APIKey:         "$OPENROUTER_API_KEY",
 		APIEndpoint:    "https://openrouter.ai/api/v1",
-		Type:           provider.ProviderTypeOpenAI,
+		Type:           provider.TypeOpenAI,
 		DefaultModelID: "anthropic/claude-sonnet-4",
 		Models:         []provider.Model{},
 	}
@@ -165,8 +177,7 @@ func main() {
 		log.Fatal("Error marshaling OpenRouter provider:", err)
 	}
 	// write to file
-	err = os.WriteFile("internal/providers/configs/openrouter.json", data, 0o644)
-	if err != nil {
+	if err := os.WriteFile("internal/providers/configs/openrouter.json", data, 0o600); err != nil {
 		log.Fatal("Error writing OpenRouter provider config:", err)
 	}
 }

internal/providers/providers.go 🔗

@@ -1,3 +1,4 @@
+// Package providers provides a registry of inference providers
 package providers
 
 import (
@@ -32,7 +33,7 @@ var xAIConfig []byte
 //go:embed configs/bedrock.json
 var bedrockConfig []byte
 
-// ProviderFunc is a function that returns a Provider
+// ProviderFunc is a function that returns a Provider.
 type ProviderFunc func() provider.Provider
 
 var providerRegistry = map[provider.InferenceProvider]ProviderFunc{
@@ -46,6 +47,7 @@ var providerRegistry = map[provider.InferenceProvider]ProviderFunc{
 	provider.InferenceProviderOpenRouter: openRouterProvider,
 }
 
+// GetAll returns all registered providers.
 func GetAll() []provider.Provider {
 	providers := make([]provider.Provider, 0, len(providerRegistry))
 	for _, providerFunc := range providerRegistry {
@@ -54,6 +56,7 @@ func GetAll() []provider.Provider {
 	return providers
 }
 
+// GetByID returns a provider by its ID.
 func GetByID(id provider.InferenceProvider) (provider.Provider, bool) {
 	providerFunc, exists := providerRegistry[id]
 	if !exists {
@@ -62,6 +65,7 @@ func GetByID(id provider.InferenceProvider) (provider.Provider, bool) {
 	return providerFunc(), true
 }
 
+// GetAvailableIDs returns a slice of all available provider IDs.
 func GetAvailableIDs() []provider.InferenceProvider {
 	ids := make([]provider.InferenceProvider, 0, len(providerRegistry))
 	for id := range providerRegistry {

main.go 🔗

@@ -1,9 +1,12 @@
+// Package main is the main entry point for the HTTP server that serves
+// inference providers.
 package main
 
 import (
 	"encoding/json"
 	"log"
 	"net/http"
+	"time"
 
 	"github.com/charmbracelet/fur/internal/providers"
 )
@@ -24,14 +27,23 @@ func providersHandler(w http.ResponseWriter, r *http.Request) {
 }
 
 func main() {
-	http.HandleFunc("/providers", providersHandler)
-	http.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) {
-		_, _ = w.Write([]byte("OK"))
+	mux := http.NewServeMux()
+	mux.HandleFunc("/providers", providersHandler)
+	mux.HandleFunc("/healthz", func(w http.ResponseWriter, _ *http.Request) {
 		w.WriteHeader(http.StatusOK)
+		_, _ = w.Write([]byte("OK"))
 	})
 
+	server := &http.Server{
+		Addr:         ":8080",
+		Handler:      mux,
+		ReadTimeout:  15 * time.Second,
+		WriteTimeout: 15 * time.Second,
+		IdleTimeout:  60 * time.Second,
+	}
+
 	log.Println("Server starting on :8080")
-	if err := http.ListenAndServe(":8080", nil); err != nil {
+	if err := server.ListenAndServe(); err != nil {
 		log.Fatal("Server failed to start:", err)
 	}
 }

pkg/client/client.go 🔗

@@ -1,3 +1,4 @@
+// Package client provides a client for interacting with the fur service.
 package client
 
 import (
@@ -11,14 +12,14 @@ import (
 
 const defaultURL = "http://localhost:8080"
 
-// Client represents a client for the fur service
+// Client represents a client for the fur service.
 type Client struct {
 	baseURL    string
 	httpClient *http.Client
 }
 
 // New creates a new client instance
-// Uses FUR_URL environment variable or falls back to localhost:8080
+// Uses FUR_URL environment variable or falls back to localhost:8080.
 func New() *Client {
 	baseURL := os.Getenv("FUR_URL")
 	if baseURL == "" {
@@ -31,7 +32,7 @@ func New() *Client {
 	}
 }
 
-// NewWithURL creates a new client with a specific URL
+// NewWithURL creates a new client with a specific URL.
 func NewWithURL(url string) *Client {
 	return &Client{
 		baseURL:    url,
@@ -39,15 +40,15 @@ func NewWithURL(url string) *Client {
 	}
 }
 
-// GetProviders retrieves all available providers from the service
+// GetProviders retrieves all available providers from the service.
 func (c *Client) GetProviders() ([]provider.Provider, error) {
 	url := fmt.Sprintf("%s/providers", c.baseURL)
-	
-	resp, err := c.httpClient.Get(url)
+
+	resp, err := c.httpClient.Get(url) //nolint:noctx
 	if err != nil {
 		return nil, fmt.Errorf("failed to make request: %w", err)
 	}
-	defer resp.Body.Close()
+	defer resp.Body.Close() //nolint:errcheck
 
 	if resp.StatusCode != http.StatusOK {
 		return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
@@ -59,4 +60,4 @@ func (c *Client) GetProviders() ([]provider.Provider, error) {
 	}
 
 	return providers, nil
-}
+}

pkg/provider/provider.go 🔗

@@ -1,22 +1,25 @@
+// Package provider provides types and constants for AI providers.
 package provider
 
-// ProviderType represents the type of AI provider
-type ProviderType string
+// Type represents the type of AI provider.
+type Type string
 
+// All the supported AI provider types.
 const (
-	ProviderTypeOpenAI     ProviderType = "openai"
-	ProviderTypeAnthropic  ProviderType = "anthropic"
-	ProviderTypeGemini     ProviderType = "gemini"
-	ProviderTypeAzure      ProviderType = "azure"
-	ProviderTypeBedrock    ProviderType = "bedrock"
-	ProviderTypeVertexAI   ProviderType = "vertexai"
-	ProviderTypeXAI        ProviderType = "xai"
-	ProviderTypeOpenRouter ProviderType = "openrouter"
+	TypeOpenAI     Type = "openai"
+	TypeAnthropic  Type = "anthropic"
+	TypeGemini     Type = "gemini"
+	TypeAzure      Type = "azure"
+	TypeBedrock    Type = "bedrock"
+	TypeVertexAI   Type = "vertexai"
+	TypeXAI        Type = "xai"
+	TypeOpenRouter Type = "openrouter"
 )
 
-// InferenceProvider represents the inference provider identifier
+// InferenceProvider represents the inference provider identifier.
 type InferenceProvider string
 
+// All the inference providers supported by the system.
 const (
 	InferenceProviderOpenAI     InferenceProvider = "openai"
 	InferenceProviderAnthropic  InferenceProvider = "anthropic"
@@ -28,18 +31,18 @@ const (
 	InferenceProviderOpenRouter InferenceProvider = "openrouter"
 )
 
-// Provider represents an AI provider configuration
+// Provider represents an AI provider configuration.
 type Provider struct {
 	Name           string            `json:"name"`
 	ID             InferenceProvider `json:"id"`
 	APIKey         string            `json:"api_key,omitempty"`
 	APIEndpoint    string            `json:"api_endpoint,omitempty"`
-	Type           ProviderType      `json:"type,omitempty"`
+	Type           Type              `json:"type,omitempty"`
 	DefaultModelID string            `json:"default_model_id,omitempty"`
 	Models         []Model           `json:"models,omitempty"`
 }
 
-// Model represents an AI model configuration
+// Model represents an AI model configuration.
 type Model struct {
 	ID                 string  `json:"id"`
 	Name               string  `json:"model"`
@@ -53,6 +56,7 @@ type Model struct {
 	SupportsImages     bool    `json:"supports_attachments"`
 }
 
+// KnownProviders returns all the known inference providers.
 func KnownProviders() []InferenceProvider {
 	return []InferenceProvider{
 		InferenceProviderOpenAI,