Detailed changes
@@ -28,6 +28,15 @@ const (
ProviderBuiltIn Provider = "builtin"
)
+// ModelSource describes where a model's configuration comes from
+type ModelSource string
+
+const (
+ SourceGateway ModelSource = "exe.dev gateway"
+ SourceEnvVar ModelSource = "env" // Will be combined with env var name
+ SourceCustom ModelSource = "custom" // User-configured custom model
+)
+
// Model represents a configured LLM model in Shelley
type Model struct {
// ID is the user-facing identifier for this model
@@ -46,6 +55,48 @@ type Model struct {
Factory func(config *Config, httpc *http.Client) (llm.Service, error)
}
+// Source returns a human-readable description of where this model's configuration comes from.
+// For example: "exe.dev gateway", "$ANTHROPIC_API_KEY", etc.
+func (m Model) Source(cfg *Config) string {
+ // Predictable model has no source
+ if m.ID == "predictable" {
+ return ""
+ }
+
+ // Check if using gateway with implicit keys
+ if cfg.Gateway != "" {
+ // Gateway is configured - check if this model is using gateway (implicit key)
+ switch m.Provider {
+ case ProviderAnthropic:
+ if cfg.AnthropicAPIKey == "implicit" {
+ return string(SourceGateway)
+ }
+ return "$ANTHROPIC_API_KEY"
+ case ProviderOpenAI:
+ if cfg.OpenAIAPIKey == "implicit" {
+ return string(SourceGateway)
+ }
+ return "$OPENAI_API_KEY"
+ case ProviderFireworks:
+ if cfg.FireworksAPIKey == "implicit" {
+ return string(SourceGateway)
+ }
+ return "$FIREWORKS_API_KEY"
+ case ProviderGemini:
+ if cfg.GeminiAPIKey == "implicit" {
+ return string(SourceGateway)
+ }
+ return "$GEMINI_API_KEY"
+ }
+ }
+
+ // No gateway - use env var names based on RequiredEnvVars
+ if len(m.RequiredEnvVars) > 0 {
+ return "$" + m.RequiredEnvVars[0]
+ }
+ return ""
+}
+
// Config holds the configuration needed to create LLM services
type Config struct {
// API keys for each provider
@@ -266,16 +317,21 @@ func Default() Model {
// Manager manages LLM services for all configured models
type Manager struct {
- services map[string]serviceEntry
- logger *slog.Logger
- db *db.DB // for custom models and LLM request recording
- httpc *http.Client // HTTP client with recording middleware
+ services map[string]serviceEntry
+ modelOrder []string // ordered list of model IDs (built-in first, then custom)
+ logger *slog.Logger
+ db *db.DB // for custom models and LLM request recording
+ httpc *http.Client // HTTP client with recording middleware
+ cfg *Config // retained for refreshing custom models
}
type serviceEntry struct {
- service llm.Service
- provider Provider
- modelID string
+ service llm.Service
+ provider Provider
+ modelID string
+ source string // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY")
+ displayName string // For custom models, the user-provided display name
+ tags string // For custom models, user-provided tags
}
// ConfigInfo is an optional interface that services can implement to provide configuration details for logging
@@ -439,125 +495,148 @@ func NewManager(cfg *Config) (*Manager, error) {
httpc = llmhttp.NewClient(nil, nil)
}
- // Store the HTTP client for use with custom models
+ // Store the HTTP client and config for use with custom models
manager.httpc = httpc
+ manager.cfg = cfg
+ // Load built-in models first
for _, model := range All() {
svc, err := model.Factory(cfg, httpc)
if err != nil {
// Model not available (e.g., missing API key) - skip it
continue
}
+
manager.services[model.ID] = serviceEntry{
- service: svc,
- provider: model.Provider,
- modelID: model.ID,
+ service: svc,
+ provider: model.Provider,
+ modelID: model.ID,
+ source: model.Source(cfg),
+ displayName: model.ID, // built-in models use ID as display name
}
+ manager.modelOrder = append(manager.modelOrder, model.ID)
+ }
+
+ // Load custom models from database
+ if err := manager.loadCustomModels(); err != nil && cfg.Logger != nil {
+ cfg.Logger.Warn("Failed to load custom models", "error", err)
}
return manager, nil
}
-// GetService returns the LLM service for the given model ID, wrapped with logging
-func (m *Manager) GetService(modelID string) (llm.Service, error) {
- // Check custom models first if we have a database
- if m.db != nil {
- dbModels, err := m.db.GetModels(context.Background())
- if err == nil && len(dbModels) > 0 {
- // Custom models exist - only serve custom models, not built-in ones
- for _, model := range dbModels {
- if model.ModelID == modelID {
- svc := m.createServiceFromModel(&model)
- if svc != nil {
- if m.logger != nil {
- return &loggingService{
- service: svc,
- logger: m.logger,
- modelID: modelID,
- provider: Provider(model.ProviderType),
- db: m.db,
- }, nil
- }
- return svc, nil
- }
- }
- }
- // Custom models exist but this model ID wasn't found among them
- return nil, fmt.Errorf("unsupported model: %s", modelID)
- }
+// loadCustomModels loads custom models from the database into the manager.
+// It adds them after built-in models in the order.
+func (m *Manager) loadCustomModels() error {
+ if m.db == nil {
+ return nil
}
- // No custom models - fall back to built-in models
- if entry, ok := m.services[modelID]; ok {
- // Wrap with logging if we have a logger
- if m.logger != nil {
- return &loggingService{
- service: entry.service,
- logger: m.logger,
- modelID: entry.modelID,
- provider: entry.provider,
- db: m.db,
- }, nil
+ dbModels, err := m.db.GetModels(context.Background())
+ if err != nil {
+ return err
+ }
+
+ for _, model := range dbModels {
+ // Skip if this model ID is already registered (built-in takes precedence)
+ if _, exists := m.services[model.ModelID]; exists {
+ continue
+ }
+
+ svc := m.createServiceFromModel(&model)
+ if svc == nil {
+ continue
+ }
+
+ m.services[model.ModelID] = serviceEntry{
+ service: svc,
+ provider: Provider(model.ProviderType),
+ modelID: model.ModelID,
+ source: string(SourceCustom),
+ displayName: model.DisplayName,
+ tags: model.Tags,
}
- return entry.service, nil
+ m.modelOrder = append(m.modelOrder, model.ModelID)
}
- return nil, fmt.Errorf("unsupported model: %s", modelID)
+
+ return nil
}
-// GetAvailableModels returns a list of available model IDs in the same order as All()
-func (m *Manager) GetAvailableModels() []string {
- var ids []string
+// RefreshCustomModels reloads custom models from the database.
+// Call this after adding or removing custom models via the UI.
+func (m *Manager) RefreshCustomModels() error {
+ if m.db == nil {
+ return nil
+ }
- // If we have custom models in the database, use ONLY those
- if m.db != nil {
- if dbModels, err := m.db.GetModels(context.Background()); err == nil && len(dbModels) > 0 {
- for _, model := range dbModels {
- ids = append(ids, model.ModelID)
- }
- return ids
+ // Remove existing custom models from services and modelOrder
+ newOrder := make([]string, 0, len(m.modelOrder))
+ for _, id := range m.modelOrder {
+ entry, ok := m.services[id]
+ if ok && entry.source != string(SourceCustom) {
+ newOrder = append(newOrder, id)
+ } else {
+ delete(m.services, id)
}
}
+ m.modelOrder = newOrder
- // No custom models - fall back to built-in models in the same order as All()
- all := All()
- for _, model := range all {
- if _, ok := m.services[model.ID]; ok {
- ids = append(ids, model.ID)
- }
+ // Reload custom models
+ return m.loadCustomModels()
+}
+
+// GetService returns the LLM service for the given model ID, wrapped with logging
+func (m *Manager) GetService(modelID string) (llm.Service, error) {
+ entry, ok := m.services[modelID]
+ if !ok {
+ return nil, fmt.Errorf("unsupported model: %s", modelID)
}
- return ids
+
+ // Wrap with logging if we have a logger
+ if m.logger != nil {
+ return &loggingService{
+ service: entry.service,
+ logger: m.logger,
+ modelID: entry.modelID,
+ provider: entry.provider,
+ db: m.db,
+ }, nil
+ }
+ return entry.service, nil
+}
+
+// GetAvailableModels returns a list of available model IDs.
+// Returns union of built-in models (in order) followed by custom models.
+func (m *Manager) GetAvailableModels() []string {
+ // Return a copy to prevent external modification
+ result := make([]string, len(m.modelOrder))
+ copy(result, m.modelOrder)
+ return result
}
// HasModel reports whether the manager has a service for the given model ID
func (m *Manager) HasModel(modelID string) bool {
- // Check custom models first
- if m.db != nil {
- if model, err := m.db.GetModel(context.Background(), modelID); err == nil && model != nil {
- return true
- }
- }
_, ok := m.services[modelID]
return ok
}
-// ModelInfo contains display name and tags for a model
+// ModelInfo contains display name, tags, and source for a model
type ModelInfo struct {
DisplayName string
Tags string
+ Source string // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY", "custom")
}
-// GetModelInfo returns the display name and tags for a model
+// GetModelInfo returns the display name, tags, and source for a model
func (m *Manager) GetModelInfo(modelID string) *ModelInfo {
- if m.db == nil {
- return nil
- }
- model, err := m.db.GetModel(context.Background(), modelID)
- if err != nil {
+ entry, ok := m.services[modelID]
+ if !ok {
return nil
}
return &ModelInfo{
- DisplayName: model.DisplayName,
- Tags: model.Tags,
+ DisplayName: entry.displayName,
+ Tags: entry.tags,
+ Source: entry.source,
}
}
@@ -404,3 +404,110 @@ func TestHTTPClientPassedToFactory(t *testing.T) {
t.Fatal("Factory returned nil service")
}
}
+
+func TestGetModelSource(t *testing.T) {
+ tests := []struct {
+ name string
+ cfg *Config
+ modelID string
+ want string
+ }{
+ {
+ name: "anthropic with env var only",
+ cfg: &Config{AnthropicAPIKey: "test-key"},
+ modelID: "claude-opus-4.5",
+ want: "$ANTHROPIC_API_KEY",
+ },
+ {
+ name: "anthropic with gateway implicit key",
+ cfg: &Config{Gateway: "https://gateway.example.com", AnthropicAPIKey: "implicit"},
+ modelID: "claude-opus-4.5",
+ want: "exe.dev gateway",
+ },
+ {
+ name: "anthropic with gateway but explicit key",
+ cfg: &Config{Gateway: "https://gateway.example.com", AnthropicAPIKey: "actual-key"},
+ modelID: "claude-opus-4.5",
+ want: "$ANTHROPIC_API_KEY",
+ },
+ {
+ name: "fireworks with env var only",
+ cfg: &Config{FireworksAPIKey: "test-key"},
+ modelID: "qwen3-coder-fireworks",
+ want: "$FIREWORKS_API_KEY",
+ },
+ {
+ name: "fireworks with gateway implicit key",
+ cfg: &Config{Gateway: "https://gateway.example.com", FireworksAPIKey: "implicit"},
+ modelID: "qwen3-coder-fireworks",
+ want: "exe.dev gateway",
+ },
+ {
+ name: "openai with env var only",
+ cfg: &Config{OpenAIAPIKey: "test-key"},
+ modelID: "gpt-5.2-codex",
+ want: "$OPENAI_API_KEY",
+ },
+ {
+ name: "gemini with env var only",
+ cfg: &Config{GeminiAPIKey: "test-key"},
+ modelID: "gemini-3-pro",
+ want: "$GEMINI_API_KEY",
+ },
+ {
+ name: "predictable has no source",
+ cfg: &Config{},
+ modelID: "predictable",
+ want: "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ manager, err := NewManager(tt.cfg)
+ if err != nil {
+ t.Fatalf("NewManager failed: %v", err)
+ }
+
+ info := manager.GetModelInfo(tt.modelID)
+ if info == nil {
+ t.Fatalf("GetModelInfo(%q) returned nil", tt.modelID)
+ }
+ if info.Source != tt.want {
+ t.Errorf("GetModelInfo(%q).Source = %q, want %q", tt.modelID, info.Source, tt.want)
+ }
+ })
+ }
+}
+
+func TestGetAvailableModelsUnion(t *testing.T) {
+ // Test that GetAvailableModels returns both built-in and custom models
+ // This test just verifies the union behavior with built-in models only
+ // (testing with custom models requires a database)
+ cfg := &Config{
+ AnthropicAPIKey: "test-key",
+ FireworksAPIKey: "test-key",
+ }
+
+ manager, err := NewManager(cfg)
+ if err != nil {
+ t.Fatalf("NewManager failed: %v", err)
+ }
+
+ models := manager.GetAvailableModels()
+
+ // Should have anthropic models and fireworks models, plus predictable
+ expectedModels := []string{"claude-opus-4.5", "qwen3-coder-fireworks", "glm-4p6-fireworks", "claude-sonnet-4.5", "claude-haiku-4.5", "predictable"}
+ for _, expected := range expectedModels {
+ found := false
+ for _, m := range models {
+ if m == expected {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Errorf("Expected model %q not found in available models: %v", expected, models)
+ }
+ }
+}
@@ -552,6 +552,10 @@ func (m *claudeLLMManager) GetModelInfo(modelID string) *models.ModelInfo {
return nil
}
+func (m *claudeLLMManager) RefreshCustomModels() error {
+ return nil
+}
+
// TestClaudeCancelDuringToolCall tests cancellation during tool execution with Claude
func TestClaudeCancelDuringToolCall(t *testing.T) {
h := NewClaudeTestHarness(t)
@@ -411,3 +411,7 @@ func (m *testLLMManager) HasModel(modelID string) bool {
func (m *testLLMManager) GetModelInfo(modelID string) *models.ModelInfo {
return nil
}
+
+func (m *testLLMManager) RefreshCustomModels() error {
+ return nil
+}
@@ -141,6 +141,11 @@ func (s *Server) handleCreateModel(w http.ResponseWriter, r *http.Request) {
return
}
+ // Refresh the model manager's cache
+ if err := s.llmManager.RefreshCustomModels(); err != nil {
+ s.logger.Warn("Failed to refresh custom models cache", "error", err)
+ }
+
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusCreated)
json.NewEncoder(w).Encode(toModelAPI(*model))
@@ -234,6 +239,11 @@ func (s *Server) handleUpdateModel(w http.ResponseWriter, r *http.Request, model
return
}
+ // Refresh the model manager's cache
+ if err := s.llmManager.RefreshCustomModels(); err != nil {
+ s.logger.Warn("Failed to refresh custom models cache", "error", err)
+ }
+
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(toModelAPI(*model))
}
@@ -245,6 +255,11 @@ func (s *Server) handleDeleteModel(w http.ResponseWriter, r *http.Request, model
return
}
+ // Refresh the model manager's cache
+ if err := s.llmManager.RefreshCustomModels(); err != nil {
+ s.logger.Warn("Failed to refresh custom models cache", "error", err)
+ }
+
w.WriteHeader(http.StatusNoContent)
}
@@ -292,6 +307,11 @@ func (s *Server) handleDuplicateModel(w http.ResponseWriter, r *http.Request, mo
return
}
+ // Refresh the model manager's cache
+ if err := s.llmManager.RefreshCustomModels(); err != nil {
+ s.logger.Warn("Failed to refresh custom models cache", "error", err)
+ }
+
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusCreated)
json.NewEncoder(w).Encode(toModelAPI(*model))
@@ -361,25 +361,29 @@ func (s *Server) serveIndexWithInit(w http.ResponseWriter, r *http.Request, fs h
modelList := s.getModelList()
// Select default model - use configured default if available, otherwise first ready model
- defaultModel := s.defaultModel
- if defaultModel == "" {
- defaultModel = models.Default().ID
- }
- defaultModelAvailable := false
- for _, m := range modelList {
- if m.ID == defaultModel && m.Ready {
- defaultModelAvailable = true
- break
+ // If no models are available, default_model should be empty
+ defaultModel := ""
+ if len(modelList) > 0 {
+ defaultModel = s.defaultModel
+ if defaultModel == "" {
+ defaultModel = models.Default().ID
}
- }
- if !defaultModelAvailable {
- // Fall back to first ready model
+ defaultModelAvailable := false
for _, m := range modelList {
- if m.Ready {
- defaultModel = m.ID
+ if m.ID == defaultModel && m.Ready {
+ defaultModelAvailable = true
break
}
}
+ if !defaultModelAvailable {
+ // Fall back to first ready model
+ for _, m := range modelList {
+ if m.Ready {
+ defaultModel = m.ID
+ break
+ }
+ }
+ }
}
// Get hostname (add .exe.xyz suffix if no dots, matching system_prompt.go)
@@ -1082,6 +1086,7 @@ func (s *Server) handleVersion(w http.ResponseWriter, r *http.Request) {
type ModelInfo struct {
ID string `json:"id"`
DisplayName string `json:"display_name,omitempty"`
+ Source string `json:"source,omitempty"` // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY")
Ready bool `json:"ready"`
MaxContextTokens int `json:"max_context_tokens,omitempty"`
}
@@ -1104,9 +1109,10 @@ func (s *Server) getModelList() []ModelInfo {
maxCtx = svc.TokenContextWindow()
}
info := ModelInfo{ID: id, Ready: err == nil, MaxContextTokens: maxCtx}
- // Add display name from model info
+ // Add display name and source from model info
if modelInfo := s.llmManager.GetModelInfo(id); modelInfo != nil {
info.DisplayName = modelInfo.DisplayName
+ info.Source = modelInfo.Source
}
modelList = append(modelList, info)
}
@@ -71,6 +71,7 @@ type LLMProvider interface {
GetAvailableModels() []string
HasModel(modelID string) bool
GetModelInfo(modelID string) *models.ModelInfo
+ RefreshCustomModels() error
}
// NewLLMServiceManager creates a new LLM service manager from config
@@ -912,6 +912,10 @@ func (m *inspectableLLMManager) GetModelInfo(modelID string) *models.ModelInfo {
return nil
}
+func (m *inspectableLLMManager) RefreshCustomModels() error {
+ return nil
+}
+
func TestVersionEndpoint(t *testing.T) {
// Create temp DB-backed server
ctx := context.Background()
@@ -489,7 +489,13 @@ function ChatInterface({
const [sending, setSending] = useState(false);
const [error, setError] = useState<string | null>(null);
const [models, setModels] = useState<
- Array<{ id: string; display_name?: string; ready: boolean; max_context_tokens?: number }>
+ Array<{
+ id: string;
+ display_name?: string;
+ source?: string;
+ ready: boolean;
+ max_context_tokens?: number;
+ }>
>(window.__SHELLEY_INIT__?.models || []);
const [selectedModel, setSelectedModelState] = useState<string>(() => {
// First check localStorage for a sticky model preference
@@ -1208,7 +1214,11 @@ function ChatInterface({
{models.length === 0 ? (
<div className="add-model-hint">
<p className="text-sm" style={{ color: "var(--text-secondary)" }}>
- No AI models configured. Press <kbd>Ctrl</kbd><span>+</span><kbd>K</kbd> or <kbd>⌘</kbd><span>+</span><kbd>K</kbd> to add a model.
+ No AI models configured. Press <kbd>Ctrl</kbd>
+ <span>+</span>
+ <kbd>K</kbd> or <kbd>⌘</kbd>
+ <span>+</span>
+ <kbd>K</kbd> to add a model.
</p>
</div>
) : (
@@ -61,6 +61,10 @@ function ModelPicker({
const selectedModelObj = models.find((m) => m.id === selectedModel);
const displayName = selectedModelObj?.display_name || selectedModel;
+ const displayWithSource =
+ selectedModelObj?.source && selectedModelObj.source !== "custom"
+ ? `${displayName} (${selectedModelObj.source})`
+ : displayName;
const handleSelect = (modelId: string) => {
onSelectModel(modelId);
@@ -80,7 +84,7 @@ function ModelPicker({
disabled={disabled}
type="button"
>
- <span className="model-picker-value">{displayName}</span>
+ <span className="model-picker-value">{displayWithSource}</span>
<svg
className={`model-picker-chevron ${isOpen ? "open" : ""}`}
width="12"
@@ -108,7 +112,12 @@ function ModelPicker({
disabled={!model.ready}
type="button"
>
- <span className="model-picker-option-name">{model.display_name || model.id}</span>
+ <div className="model-picker-option-content">
+ <span className="model-picker-option-name">{model.display_name || model.id}</span>
+ {model.source && (
+ <span className="model-picker-option-source">{model.source}</span>
+ )}
+ </div>
{!model.ready && <span className="model-picker-option-badge">not ready</span>}
{model.id === selectedModel && (
<svg
@@ -43,6 +43,14 @@ const DEFAULT_MODELS: Record<ProviderType, { name: string; model_name: string }[
],
};
+// Built-in model info from init data
+interface BuiltInModel {
+ id: string;
+ display_name?: string;
+ source?: string;
+ ready: boolean;
+}
+
interface FormData {
display_name: string;
provider_type: ProviderType;
@@ -69,7 +77,7 @@ function ModelsModal({ isOpen, onClose, onModelsChanged }: ModelsModalProps) {
const [models, setModels] = useState<CustomModel[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
- const [builtInModels, setBuiltInModels] = useState<string[]>([]);
+ const [builtInModels, setBuiltInModels] = useState<BuiltInModel[]>([]);
// Form state
const [showForm, setShowForm] = useState(false);
@@ -99,10 +107,13 @@ function ModelsModal({ isOpen, onClose, onModelsChanged }: ModelsModalProps) {
useEffect(() => {
if (isOpen) {
loadModels();
- // Get built-in models from init data
+ // Get built-in models from init data (those with non-custom source)
const initData = window.__SHELLEY_INIT__;
if (initData?.models) {
- setBuiltInModels(initData.models.map((m) => m.id));
+ const builtIn = initData.models.filter(
+ (m: BuiltInModel) => m.source && m.source !== "custom",
+ );
+ setBuiltInModels(builtIn);
}
}
}, [isOpen, loadModels]);
@@ -485,104 +496,114 @@ function ModelsModal({ isOpen, onClose, onModelsChanged }: ModelsModalProps) {
) : (
// Model List
<>
- {models.length === 0 &&
- builtInModels.length > 0 &&
- builtInModels[0] !== "predictable" && (
- <div className="models-info">
- <p>Built-in models available:</p>
- <ul className="builtin-list">
- {builtInModels
- .filter((m) => m !== "predictable")
- .map((model) => (
- <li key={model}>{model}</li>
- ))}
- </ul>
- </div>
- )}
-
- {models.length > 0 && (
- <div className="models-list">
- {models.map((model) => (
- <div key={model.model_id} className="model-card">
+ <div className="models-list">
+ {/* Built-in models (from env vars or gateway) - read only */}
+ {builtInModels
+ .filter((m) => m.id !== "predictable")
+ .map((model) => (
+ <div key={model.id} className="model-card model-card-builtin">
<div className="model-header">
<div className="model-info">
- <span className="model-name">{model.display_name}</span>
- <span className="model-provider">
- {PROVIDER_LABELS[model.provider_type]}
- </span>
- {model.tags && (
- <span className="model-badge" title={model.tags}>
- {model.tags.split(",")[0]}
- </span>
- )}
- </div>
- <div className="model-actions">
- <button
- className="btn-icon"
- onClick={() => handleDuplicate(model)}
- title="Duplicate"
- >
- <svg
- fill="none"
- stroke="currentColor"
- viewBox="0 0 24 24"
- width="16"
- height="16"
- >
- <path
- strokeLinecap="round"
- strokeLinejoin="round"
- strokeWidth={2}
- d="M8 16H6a2 2 0 01-2-2V6a2 2 0 012-2h8a2 2 0 012 2v2m-6 12h8a2 2 0 002-2v-8a2 2 0 00-2-2h-8a2 2 0 00-2 2v8a2 2 0 002 2z"
- />
- </svg>
- </button>
- <button className="btn-icon" onClick={() => handleEdit(model)} title="Edit">
- <svg
- fill="none"
- stroke="currentColor"
- viewBox="0 0 24 24"
- width="16"
- height="16"
- >
- <path
- strokeLinecap="round"
- strokeLinejoin="round"
- strokeWidth={2}
- d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z"
- />
- </svg>
- </button>
- <button
- className="btn-icon btn-danger"
- onClick={() => handleDelete(model.model_id)}
- title="Delete"
- >
- <svg
- fill="none"
- stroke="currentColor"
- viewBox="0 0 24 24"
- width="16"
- height="16"
- >
- <path
- strokeLinecap="round"
- strokeLinejoin="round"
- strokeWidth={2}
- d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"
- />
- </svg>
- </button>
+ <span className="model-name">{model.display_name || model.id}</span>
+ <span className="model-source">{model.source}</span>
</div>
</div>
<div className="model-details">
- <span className="model-api-name">{model.model_name}</span>
- <span className="model-endpoint">{model.endpoint}</span>
+ <span className="model-api-name">{model.id}</span>
</div>
</div>
))}
- </div>
- )}
+
+ {/* Custom models - editable */}
+ {models.map((model) => (
+ <div key={model.model_id} className="model-card">
+ <div className="model-header">
+ <div className="model-info">
+ <span className="model-name">{model.display_name}</span>
+ <span className="model-provider">{PROVIDER_LABELS[model.provider_type]}</span>
+ {model.tags && (
+ <span className="model-badge" title={model.tags}>
+ {model.tags.split(",")[0]}
+ </span>
+ )}
+ </div>
+ <div className="model-actions">
+ <button
+ className="btn-icon"
+ onClick={() => handleDuplicate(model)}
+ title="Duplicate"
+ >
+ <svg
+ fill="none"
+ stroke="currentColor"
+ viewBox="0 0 24 24"
+ width="16"
+ height="16"
+ >
+ <path
+ strokeLinecap="round"
+ strokeLinejoin="round"
+ strokeWidth={2}
+ d="M8 16H6a2 2 0 01-2-2V6a2 2 0 012-2h8a2 2 0 012 2v2m-6 12h8a2 2 0 002-2v-8a2 2 0 00-2-2h-8a2 2 0 00-2 2v8a2 2 0 002 2z"
+ />
+ </svg>
+ </button>
+ <button className="btn-icon" onClick={() => handleEdit(model)} title="Edit">
+ <svg
+ fill="none"
+ stroke="currentColor"
+ viewBox="0 0 24 24"
+ width="16"
+ height="16"
+ >
+ <path
+ strokeLinecap="round"
+ strokeLinejoin="round"
+ strokeWidth={2}
+ d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z"
+ />
+ </svg>
+ </button>
+ <button
+ className="btn-icon btn-danger"
+ onClick={() => handleDelete(model.model_id)}
+ title="Delete"
+ >
+ <svg
+ fill="none"
+ stroke="currentColor"
+ viewBox="0 0 24 24"
+ width="16"
+ height="16"
+ >
+ <path
+ strokeLinecap="round"
+ strokeLinejoin="round"
+ strokeWidth={2}
+ d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"
+ />
+ </svg>
+ </button>
+ </div>
+ </div>
+ <div className="model-details">
+ <span className="model-api-name">{model.model_name}</span>
+ <span className="model-endpoint">{model.endpoint}</span>
+ </div>
+ </div>
+ ))}
+
+ {/* Empty state when no models at all */}
+ {builtInModels.length === 0 && models.length === 0 && (
+ <div className="models-empty">
+ <p>No models configured.</p>
+ <p className="models-empty-hint">
+ Set environment variables like ANTHROPIC_API_KEY, or use the -gateway flag, or
+ add a custom model below.
+ </p>
+ </div>
+ )}
+ </div>
</>
)}
</div>
@@ -28,7 +28,13 @@ class ApiService {
}
async getModels(): Promise<
- Array<{ id: string; display_name?: string; ready: boolean; max_context_tokens?: number }>
+ Array<{
+ id: string;
+ display_name?: string;
+ source?: string;
+ ready: boolean;
+ max_context_tokens?: number;
+ }>
> {
const response = await fetch(`${this.baseUrl}/models`);
if (!response.ok) {
@@ -2848,8 +2848,23 @@ svg {
cursor: not-allowed;
}
-.model-picker-option-name {
+.model-picker-option-content {
flex: 1;
+ display: flex;
+ flex-direction: column;
+ gap: 0.125rem;
+ min-width: 0;
+}
+
+.model-picker-option-name {
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+}
+
+.model-picker-option-source {
+ font-size: 0.65rem;
+ color: var(--text-secondary);
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
@@ -4273,6 +4288,31 @@ svg {
color: var(--text-secondary);
}
+.model-source {
+ font-size: 0.75rem;
+ padding: 0.125rem 0.5rem;
+ background: var(--bg-tertiary);
+ border-radius: 0.25rem;
+ color: var(--text-secondary);
+}
+
+.model-card-builtin {
+ opacity: 0.8;
+ background: var(--bg-secondary);
+}
+
+.models-empty {
+ padding: 2rem;
+ text-align: center;
+ color: var(--text-secondary);
+}
+
+.models-empty-hint {
+ font-size: 0.875rem;
+ margin-top: 0.5rem;
+ color: var(--text-tertiary);
+}
+
.model-badge {
font-size: 0.625rem;
text-transform: uppercase;
@@ -50,6 +50,7 @@ export interface LLMContent {
export interface Model {
id: string;
display_name?: string;
+ source?: string; // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY")
ready: boolean;
max_context_tokens?: number;
}