add support for images (#144)

phantomreactor created

Change summary

go.mod                                       |  10 
go.sum                                       |  11 
internal/llm/agent/agent.go                  |  45 +
internal/llm/models/anthropic.go             | 107 ++--
internal/llm/models/azure.go                 | 241 +++++-----
internal/llm/models/gemini.go                |  84 ++-
internal/llm/models/groq.go                  |  81 +-
internal/llm/models/models.go                |  23 
internal/llm/models/openai.go                | 260 ++++++-----
internal/llm/prompt/title.go                 |   3 
internal/llm/provider/anthropic.go           |  16 
internal/llm/provider/bedrock.go             |   9 
internal/llm/provider/gemini.go              |   9 
internal/llm/provider/openai.go              |  13 
internal/message/attachment.go               |   8 
internal/message/content.go                  |   9 
internal/message/message.go                  |   1 
internal/tui/components/chat/chat.go         |   4 
internal/tui/components/chat/editor.go       | 125 +++++
internal/tui/components/chat/list.go         |  16 
internal/tui/components/chat/message.go      |  24 +
internal/tui/components/dialog/filepicker.go | 477 ++++++++++++++++++++++
internal/tui/components/dialog/help.go       |   2 
internal/tui/components/dialog/models.go     |  13 
internal/tui/components/dialog/permission.go |   3 
internal/tui/image/images.go                 |  72 +++
internal/tui/page/chat.go                    |  11 
internal/tui/styles/icons.go                 |  17 
internal/tui/styles/styles.go                |   5 
internal/tui/tui.go                          | 111 ++++-
30 files changed, 1,326 insertions(+), 484 deletions(-)

Detailed changes

go.mod 🔗

@@ -2,8 +2,6 @@ module github.com/opencode-ai/opencode
 
 go 1.24.0
 
-toolchain go1.24.2
-
 require (
 	github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0
 	github.com/JohannesKaufmann/html-to-markdown v1.6.0
@@ -16,7 +14,6 @@ require (
 	github.com/charmbracelet/bubbles v0.20.0
 	github.com/charmbracelet/bubbletea v1.3.4
 	github.com/charmbracelet/glamour v0.9.1
-	github.com/charmbracelet/huh v0.6.0
 	github.com/charmbracelet/lipgloss v1.1.0
 	github.com/charmbracelet/x/ansi v0.8.0
 	github.com/fsnotify/fsnotify v1.8.0
@@ -68,11 +65,10 @@ require (
 	github.com/aymerick/douceur v0.2.0 // indirect
 	github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
 	github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
-	github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 // indirect
 	github.com/charmbracelet/x/term v0.2.1 // indirect
 	github.com/davecgh/go-spew v1.1.1 // indirect
+	github.com/disintegration/imaging v1.6.2
 	github.com/dlclark/regexp2 v1.11.4 // indirect
-	github.com/dustin/go-humanize v1.0.1 // indirect
 	github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
 	github.com/felixge/httpsnoop v1.0.4 // indirect
 	github.com/go-logr/logr v1.4.2 // indirect
@@ -85,13 +81,12 @@ require (
 	github.com/gorilla/css v1.0.1 // indirect
 	github.com/inconshreveable/mousetrap v1.1.0 // indirect
 	github.com/kylelemons/godebug v1.1.0 // indirect
-	github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
+	github.com/lucasb-eyer/go-colorful v1.2.0
 	github.com/mattn/go-isatty v0.0.20 // indirect
 	github.com/mattn/go-localereader v0.0.1 // indirect
 	github.com/mattn/go-runewidth v0.0.16 // indirect
 	github.com/mfridman/interpolate v0.0.2 // indirect
 	github.com/microcosm-cc/bluemonday v1.0.27 // indirect
-	github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect
 	github.com/muesli/cancelreader v0.2.2 // indirect
 	github.com/ncruces/julianday v1.0.0 // indirect
 	github.com/pelletier/go-toml/v2 v2.2.3 // indirect
@@ -123,6 +118,7 @@ require (
 	go.opentelemetry.io/otel/trace v1.35.0 // indirect
 	go.uber.org/multierr v1.11.0 // indirect
 	golang.org/x/crypto v0.37.0 // indirect
+	golang.org/x/image v0.26.0 // indirect
 	golang.org/x/net v0.39.0 // indirect
 	golang.org/x/oauth2 v0.25.0 // indirect
 	golang.org/x/sync v0.13.0 // indirect

go.sum 🔗

@@ -82,8 +82,6 @@ github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4p
 github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
 github.com/charmbracelet/glamour v0.9.1 h1:11dEfiGP8q1BEqvGoIjivuc2rBk+5qEXdPtaQ2WoiCM=
 github.com/charmbracelet/glamour v0.9.1/go.mod h1:+SHvIS8qnwhgTpVMiXwn7OfGomSqff1cHBCI8jLOetk=
-github.com/charmbracelet/huh v0.6.0 h1:mZM8VvZGuE0hoDXq6XLxRtgfWyTI3b2jZNKh0xWmax8=
-github.com/charmbracelet/huh v0.6.0/go.mod h1:GGNKeWCeNzKpEOh/OJD8WBwTQjV3prFAtQPpLv+AVwU=
 github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
 github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
 github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
@@ -92,14 +90,14 @@ github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0G
 github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
 github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b h1:MnAMdlwSltxJyULnrYbkZpp4k58Co7Tah3ciKhSNo0Q=
 github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
-github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 h1:qko3AQ4gK1MTS/de7F5hPGx6/k1u0w4TeYmBFwzYVP4=
-github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0/go.mod h1:pBhA0ybfXv6hDjQUZ7hk1lVxBiUbupdw5R31yPUViVQ=
 github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
 github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
 github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
+github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
 github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
 github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
 github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
@@ -169,8 +167,6 @@ github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6B
 github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
 github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
 github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
-github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
-github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
 github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
 github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
 github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
@@ -283,6 +279,9 @@ golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
 golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
 golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
 golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
+golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
+golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
 golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
 golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=

internal/llm/agent/agent.go 🔗

@@ -38,7 +38,7 @@ func (e *AgentEvent) Response() message.Message {
 }
 
 type Service interface {
-	Run(ctx context.Context, sessionID string, content string) (<-chan AgentEvent, error)
+	Run(ctx context.Context, sessionID string, content string, attachments ...message.Attachment) (<-chan AgentEvent, error)
 	Cancel(sessionID string)
 	IsSessionBusy(sessionID string) bool
 	IsBusy() bool
@@ -117,6 +117,9 @@ func (a *agent) IsSessionBusy(sessionID string) bool {
 }
 
 func (a *agent) generateTitle(ctx context.Context, sessionID string, content string) error {
+	if content == "" {
+		return nil
+	}
 	if a.titleProvider == nil {
 		return nil
 	}
@@ -124,16 +127,13 @@ func (a *agent) generateTitle(ctx context.Context, sessionID string, content str
 	if err != nil {
 		return err
 	}
+	parts := []message.ContentPart{message.TextContent{Text: content}}
 	response, err := a.titleProvider.SendMessages(
 		ctx,
 		[]message.Message{
 			{
-				Role: message.User,
-				Parts: []message.ContentPart{
-					message.TextContent{
-						Text: content,
-					},
-				},
+				Role:  message.User,
+				Parts: parts,
 			},
 		},
 		make([]tools.BaseTool, 0),
@@ -158,7 +158,10 @@ func (a *agent) err(err error) AgentEvent {
 	}
 }
 
-func (a *agent) Run(ctx context.Context, sessionID string, content string) (<-chan AgentEvent, error) {
+func (a *agent) Run(ctx context.Context, sessionID string, content string, attachments ...message.Attachment) (<-chan AgentEvent, error) {
+	if !a.provider.Model().SupportsAttachments && attachments != nil {
+		attachments = nil
+	}
 	events := make(chan AgentEvent)
 	if a.IsSessionBusy(sessionID) {
 		return nil, ErrSessionBusy
@@ -172,10 +175,13 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string) (<-ch
 		defer logging.RecoverPanic("agent.Run", func() {
 			events <- a.err(fmt.Errorf("panic while running the agent"))
 		})
-
-		result := a.processGeneration(genCtx, sessionID, content)
+		var attachmentParts []message.ContentPart
+		for _, attachment := range attachments {
+			attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
+		}
+		result := a.processGeneration(genCtx, sessionID, content, attachmentParts)
 		if result.Err() != nil && !errors.Is(result.Err(), ErrRequestCancelled) && !errors.Is(result.Err(), context.Canceled) {
-			logging.ErrorPersist(fmt.Sprintf("Generation error for session %s: %v", sessionID, result))
+			logging.ErrorPersist(result.Err().Error())
 		}
 		logging.Debug("Request completed", "sessionID", sessionID)
 		a.activeRequests.Delete(sessionID)
@@ -186,7 +192,7 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string) (<-ch
 	return events, nil
 }
 
-func (a *agent) processGeneration(ctx context.Context, sessionID, content string) AgentEvent {
+func (a *agent) processGeneration(ctx context.Context, sessionID, content string, attachmentParts []message.ContentPart) AgentEvent {
 	// List existing messages; if none, start title generation asynchronously.
 	msgs, err := a.messages.List(ctx, sessionID)
 	if err != nil {
@@ -204,13 +210,13 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
 		}()
 	}
 
-	userMsg, err := a.createUserMessage(ctx, sessionID, content)
+	userMsg, err := a.createUserMessage(ctx, sessionID, content, attachmentParts)
 	if err != nil {
 		return a.err(fmt.Errorf("failed to create user message: %w", err))
 	}
-
 	// Append the new user message to the conversation history.
 	msgHistory := append(msgs, userMsg)
+
 	for {
 		// Check for cancellation before each iteration
 		select {
@@ -240,12 +246,12 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
 	}
 }
 
-func (a *agent) createUserMessage(ctx context.Context, sessionID, content string) (message.Message, error) {
+func (a *agent) createUserMessage(ctx context.Context, sessionID, content string, attachmentParts []message.ContentPart) (message.Message, error) {
+	parts := []message.ContentPart{message.TextContent{Text: content}}
+	parts = append(parts, attachmentParts...)
 	return a.messages.Create(ctx, sessionID, message.CreateMessageParams{
-		Role: message.User,
-		Parts: []message.ContentPart{
-			message.TextContent{Text: content},
-		},
+		Role:  message.User,
+		Parts: parts,
 	})
 }
 
@@ -310,7 +316,6 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
 				}
 				continue
 			}
-
 			toolResult, toolErr := tool.Run(ctx, tools.ToolCall{
 				ID:    toolCall.ID,
 				Name:  toolCall.Name,

internal/llm/models/anthropic.go 🔗

@@ -14,64 +14,69 @@ const (
 // https://docs.anthropic.com/en/docs/about-claude/models/all-models
 var AnthropicModels = map[ModelID]Model{
 	Claude35Sonnet: {
-		ID:                 Claude35Sonnet,
-		Name:               "Claude 3.5 Sonnet",
-		Provider:           ProviderAnthropic,
-		APIModel:           "claude-3-5-sonnet-latest",
-		CostPer1MIn:        3.0,
-		CostPer1MInCached:  3.75,
-		CostPer1MOutCached: 0.30,
-		CostPer1MOut:       15.0,
-		ContextWindow:      200000,
-		DefaultMaxTokens:   5000,
+		ID:                  Claude35Sonnet,
+		Name:                "Claude 3.5 Sonnet",
+		Provider:            ProviderAnthropic,
+		APIModel:            "claude-3-5-sonnet-latest",
+		CostPer1MIn:         3.0,
+		CostPer1MInCached:   3.75,
+		CostPer1MOutCached:  0.30,
+		CostPer1MOut:        15.0,
+		ContextWindow:       200000,
+		DefaultMaxTokens:    5000,
+		SupportsAttachments: true,
 	},
 	Claude3Haiku: {
-		ID:                 Claude3Haiku,
-		Name:               "Claude 3 Haiku",
-		Provider:           ProviderAnthropic,
-		APIModel:           "claude-3-haiku-20240307", // doesn't support "-latest"
-		CostPer1MIn:        0.25,
-		CostPer1MInCached:  0.30,
-		CostPer1MOutCached: 0.03,
-		CostPer1MOut:       1.25,
-		ContextWindow:      200000,
-		DefaultMaxTokens:   4096,
+		ID:                  Claude3Haiku,
+		Name:                "Claude 3 Haiku",
+		Provider:            ProviderAnthropic,
+		APIModel:            "claude-3-haiku-20240307", // doesn't support "-latest"
+		CostPer1MIn:         0.25,
+		CostPer1MInCached:   0.30,
+		CostPer1MOutCached:  0.03,
+		CostPer1MOut:        1.25,
+		ContextWindow:       200000,
+		DefaultMaxTokens:    4096,
+		SupportsAttachments: true,
 	},
 	Claude37Sonnet: {
-		ID:                 Claude37Sonnet,
-		Name:               "Claude 3.7 Sonnet",
-		Provider:           ProviderAnthropic,
-		APIModel:           "claude-3-7-sonnet-latest",
-		CostPer1MIn:        3.0,
-		CostPer1MInCached:  3.75,
-		CostPer1MOutCached: 0.30,
-		CostPer1MOut:       15.0,
-		ContextWindow:      200000,
-		DefaultMaxTokens:   50000,
-		CanReason:          true,
+		ID:                  Claude37Sonnet,
+		Name:                "Claude 3.7 Sonnet",
+		Provider:            ProviderAnthropic,
+		APIModel:            "claude-3-7-sonnet-latest",
+		CostPer1MIn:         3.0,
+		CostPer1MInCached:   3.75,
+		CostPer1MOutCached:  0.30,
+		CostPer1MOut:        15.0,
+		ContextWindow:       200000,
+		DefaultMaxTokens:    50000,
+		CanReason:           true,
+		SupportsAttachments: true,
 	},
 	Claude35Haiku: {
-		ID:                 Claude35Haiku,
-		Name:               "Claude 3.5 Haiku",
-		Provider:           ProviderAnthropic,
-		APIModel:           "claude-3-5-haiku-latest",
-		CostPer1MIn:        0.80,
-		CostPer1MInCached:  1.0,
-		CostPer1MOutCached: 0.08,
-		CostPer1MOut:       4.0,
-		ContextWindow:      200000,
-		DefaultMaxTokens:   4096,
+		ID:                  Claude35Haiku,
+		Name:                "Claude 3.5 Haiku",
+		Provider:            ProviderAnthropic,
+		APIModel:            "claude-3-5-haiku-latest",
+		CostPer1MIn:         0.80,
+		CostPer1MInCached:   1.0,
+		CostPer1MOutCached:  0.08,
+		CostPer1MOut:        4.0,
+		ContextWindow:       200000,
+		DefaultMaxTokens:    4096,
+		SupportsAttachments: true,
 	},
 	Claude3Opus: {
-		ID:                 Claude3Opus,
-		Name:               "Claude 3 Opus",
-		Provider:           ProviderAnthropic,
-		APIModel:           "claude-3-opus-latest",
-		CostPer1MIn:        15.0,
-		CostPer1MInCached:  18.75,
-		CostPer1MOutCached: 1.50,
-		CostPer1MOut:       75.0,
-		ContextWindow:      200000,
-		DefaultMaxTokens:   4096,
+		ID:                  Claude3Opus,
+		Name:                "Claude 3 Opus",
+		Provider:            ProviderAnthropic,
+		APIModel:            "claude-3-opus-latest",
+		CostPer1MIn:         15.0,
+		CostPer1MInCached:   18.75,
+		CostPer1MOutCached:  1.50,
+		CostPer1MOut:        75.0,
+		ContextWindow:       200000,
+		DefaultMaxTokens:    4096,
+		SupportsAttachments: true,
 	},
 }

internal/llm/models/azure.go 🔗

@@ -18,140 +18,151 @@ const (
 
 var AzureModels = map[ModelID]Model{
 	AzureGPT41: {
-		ID:                 AzureGPT41,
-		Name:               "Azure OpenAI – GPT 4.1",
-		Provider:           ProviderAzure,
-		APIModel:           "gpt-4.1",
-		CostPer1MIn:        OpenAIModels[GPT41].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[GPT41].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[GPT41].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[GPT41].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[GPT41].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[GPT41].DefaultMaxTokens,
+		ID:                  AzureGPT41,
+		Name:                "Azure OpenAI – GPT 4.1",
+		Provider:            ProviderAzure,
+		APIModel:            "gpt-4.1",
+		CostPer1MIn:         OpenAIModels[GPT41].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[GPT41].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[GPT41].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[GPT41].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[GPT41].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[GPT41].DefaultMaxTokens,
+		SupportsAttachments: true,
 	},
 	AzureGPT41Mini: {
-		ID:                 AzureGPT41Mini,
-		Name:               "Azure OpenAI – GPT 4.1 mini",
-		Provider:           ProviderAzure,
-		APIModel:           "gpt-4.1-mini",
-		CostPer1MIn:        OpenAIModels[GPT41Mini].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[GPT41Mini].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[GPT41Mini].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[GPT41Mini].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[GPT41Mini].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[GPT41Mini].DefaultMaxTokens,
+		ID:                  AzureGPT41Mini,
+		Name:                "Azure OpenAI – GPT 4.1 mini",
+		Provider:            ProviderAzure,
+		APIModel:            "gpt-4.1-mini",
+		CostPer1MIn:         OpenAIModels[GPT41Mini].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[GPT41Mini].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[GPT41Mini].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[GPT41Mini].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[GPT41Mini].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[GPT41Mini].DefaultMaxTokens,
+		SupportsAttachments: true,
 	},
 	AzureGPT41Nano: {
-		ID:                 AzureGPT41Nano,
-		Name:               "Azure OpenAI – GPT 4.1 nano",
-		Provider:           ProviderAzure,
-		APIModel:           "gpt-4.1-nano",
-		CostPer1MIn:        OpenAIModels[GPT41Nano].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[GPT41Nano].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[GPT41Nano].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[GPT41Nano].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[GPT41Nano].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[GPT41Nano].DefaultMaxTokens,
+		ID:                  AzureGPT41Nano,
+		Name:                "Azure OpenAI – GPT 4.1 nano",
+		Provider:            ProviderAzure,
+		APIModel:            "gpt-4.1-nano",
+		CostPer1MIn:         OpenAIModels[GPT41Nano].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[GPT41Nano].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[GPT41Nano].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[GPT41Nano].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[GPT41Nano].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[GPT41Nano].DefaultMaxTokens,
+		SupportsAttachments: true,
 	},
 	AzureGPT45Preview: {
-		ID:                 AzureGPT45Preview,
-		Name:               "Azure OpenAI – GPT 4.5 preview",
-		Provider:           ProviderAzure,
-		APIModel:           "gpt-4.5-preview",
-		CostPer1MIn:        OpenAIModels[GPT45Preview].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[GPT45Preview].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[GPT45Preview].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[GPT45Preview].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[GPT45Preview].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[GPT45Preview].DefaultMaxTokens,
+		ID:                  AzureGPT45Preview,
+		Name:                "Azure OpenAI – GPT 4.5 preview",
+		Provider:            ProviderAzure,
+		APIModel:            "gpt-4.5-preview",
+		CostPer1MIn:         OpenAIModels[GPT45Preview].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[GPT45Preview].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[GPT45Preview].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[GPT45Preview].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[GPT45Preview].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[GPT45Preview].DefaultMaxTokens,
+		SupportsAttachments: true,
 	},
 	AzureGPT4o: {
-		ID:                 AzureGPT4o,
-		Name:               "Azure OpenAI – GPT-4o",
-		Provider:           ProviderAzure,
-		APIModel:           "gpt-4o",
-		CostPer1MIn:        OpenAIModels[GPT4o].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[GPT4o].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[GPT4o].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[GPT4o].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[GPT4o].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[GPT4o].DefaultMaxTokens,
+		ID:                  AzureGPT4o,
+		Name:                "Azure OpenAI – GPT-4o",
+		Provider:            ProviderAzure,
+		APIModel:            "gpt-4o",
+		CostPer1MIn:         OpenAIModels[GPT4o].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[GPT4o].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[GPT4o].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[GPT4o].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[GPT4o].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[GPT4o].DefaultMaxTokens,
+		SupportsAttachments: true,
 	},
 	AzureGPT4oMini: {
-		ID:                 AzureGPT4oMini,
-		Name:               "Azure OpenAI – GPT-4o mini",
-		Provider:           ProviderAzure,
-		APIModel:           "gpt-4o-mini",
-		CostPer1MIn:        OpenAIModels[GPT4oMini].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[GPT4oMini].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[GPT4oMini].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[GPT4oMini].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[GPT4oMini].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[GPT4oMini].DefaultMaxTokens,
+		ID:                  AzureGPT4oMini,
+		Name:                "Azure OpenAI – GPT-4o mini",
+		Provider:            ProviderAzure,
+		APIModel:            "gpt-4o-mini",
+		CostPer1MIn:         OpenAIModels[GPT4oMini].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[GPT4oMini].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[GPT4oMini].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[GPT4oMini].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[GPT4oMini].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[GPT4oMini].DefaultMaxTokens,
+		SupportsAttachments: true,
 	},
 	AzureO1: {
-		ID:                 AzureO1,
-		Name:               "Azure OpenAI – O1",
-		Provider:           ProviderAzure,
-		APIModel:           "o1",
-		CostPer1MIn:        OpenAIModels[O1].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[O1].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[O1].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[O1].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[O1].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[O1].DefaultMaxTokens,
-		CanReason:          OpenAIModels[O1].CanReason,
+		ID:                  AzureO1,
+		Name:                "Azure OpenAI – O1",
+		Provider:            ProviderAzure,
+		APIModel:            "o1",
+		CostPer1MIn:         OpenAIModels[O1].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[O1].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[O1].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[O1].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[O1].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[O1].DefaultMaxTokens,
+		CanReason:           OpenAIModels[O1].CanReason,
+		SupportsAttachments: true,
 	},
 	AzureO1Mini: {
-		ID:                 AzureO1Mini,
-		Name:               "Azure OpenAI – O1 mini",
-		Provider:           ProviderAzure,
-		APIModel:           "o1-mini",
-		CostPer1MIn:        OpenAIModels[O1Mini].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[O1Mini].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[O1Mini].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[O1Mini].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[O1Mini].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[O1Mini].DefaultMaxTokens,
-		CanReason:          OpenAIModels[O1Mini].CanReason,
+		ID:                  AzureO1Mini,
+		Name:                "Azure OpenAI – O1 mini",
+		Provider:            ProviderAzure,
+		APIModel:            "o1-mini",
+		CostPer1MIn:         OpenAIModels[O1Mini].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[O1Mini].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[O1Mini].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[O1Mini].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[O1Mini].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[O1Mini].DefaultMaxTokens,
+		CanReason:           OpenAIModels[O1Mini].CanReason,
+		SupportsAttachments: true,
 	},
 	AzureO3: {
-		ID:                 AzureO3,
-		Name:               "Azure OpenAI – O3",
-		Provider:           ProviderAzure,
-		APIModel:           "o3",
-		CostPer1MIn:        OpenAIModels[O3].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[O3].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[O3].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[O3].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[O3].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[O3].DefaultMaxTokens,
-		CanReason:          OpenAIModels[O3].CanReason,
+		ID:                  AzureO3,
+		Name:                "Azure OpenAI – O3",
+		Provider:            ProviderAzure,
+		APIModel:            "o3",
+		CostPer1MIn:         OpenAIModels[O3].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[O3].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[O3].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[O3].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[O3].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[O3].DefaultMaxTokens,
+		CanReason:           OpenAIModels[O3].CanReason,
+		SupportsAttachments: true,
 	},
 	AzureO3Mini: {
-		ID:                 AzureO3Mini,
-		Name:               "Azure OpenAI – O3 mini",
-		Provider:           ProviderAzure,
-		APIModel:           "o3-mini",
-		CostPer1MIn:        OpenAIModels[O3Mini].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[O3Mini].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[O3Mini].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[O3Mini].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[O3Mini].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[O3Mini].DefaultMaxTokens,
-		CanReason:          OpenAIModels[O3Mini].CanReason,
+		ID:                  AzureO3Mini,
+		Name:                "Azure OpenAI – O3 mini",
+		Provider:            ProviderAzure,
+		APIModel:            "o3-mini",
+		CostPer1MIn:         OpenAIModels[O3Mini].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[O3Mini].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[O3Mini].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[O3Mini].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[O3Mini].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[O3Mini].DefaultMaxTokens,
+		CanReason:           OpenAIModels[O3Mini].CanReason,
+		SupportsAttachments: false,
 	},
 	AzureO4Mini: {
-		ID:                 AzureO4Mini,
-		Name:               "Azure OpenAI – O4 mini",
-		Provider:           ProviderAzure,
-		APIModel:           "o4-mini",
-		CostPer1MIn:        OpenAIModels[O4Mini].CostPer1MIn,
-		CostPer1MInCached:  OpenAIModels[O4Mini].CostPer1MInCached,
-		CostPer1MOut:       OpenAIModels[O4Mini].CostPer1MOut,
-		CostPer1MOutCached: OpenAIModels[O4Mini].CostPer1MOutCached,
-		ContextWindow:      OpenAIModels[O4Mini].ContextWindow,
-		DefaultMaxTokens:   OpenAIModels[O4Mini].DefaultMaxTokens,
-		CanReason:          OpenAIModels[O4Mini].CanReason,
+		ID:                  AzureO4Mini,
+		Name:                "Azure OpenAI – O4 mini",
+		Provider:            ProviderAzure,
+		APIModel:            "o4-mini",
+		CostPer1MIn:         OpenAIModels[O4Mini].CostPer1MIn,
+		CostPer1MInCached:   OpenAIModels[O4Mini].CostPer1MInCached,
+		CostPer1MOut:        OpenAIModels[O4Mini].CostPer1MOut,
+		CostPer1MOutCached:  OpenAIModels[O4Mini].CostPer1MOutCached,
+		ContextWindow:       OpenAIModels[O4Mini].ContextWindow,
+		DefaultMaxTokens:    OpenAIModels[O4Mini].DefaultMaxTokens,
+		CanReason:           OpenAIModels[O4Mini].CanReason,
+		SupportsAttachments: true,
 	},
 }

internal/llm/models/gemini.go 🔗

@@ -12,52 +12,56 @@ const (
 
 var GeminiModels = map[ModelID]Model{
 	Gemini25Flash: {
-		ID:                 Gemini25Flash,
-		Name:               "Gemini 2.5 Flash",
-		Provider:           ProviderGemini,
-		APIModel:           "gemini-2.5-flash-preview-04-17",
-		CostPer1MIn:        0.15,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       0.60,
-		ContextWindow:      1000000,
-		DefaultMaxTokens:   50000,
+		ID:                  Gemini25Flash,
+		Name:                "Gemini 2.5 Flash",
+		Provider:            ProviderGemini,
+		APIModel:            "gemini-2.5-flash-preview-04-17",
+		CostPer1MIn:         0.15,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        0.60,
+		ContextWindow:       1000000,
+		DefaultMaxTokens:    50000,
+		SupportsAttachments: true,
 	},
 	Gemini25: {
-		ID:                 Gemini25,
-		Name:               "Gemini 2.5 Pro",
-		Provider:           ProviderGemini,
-		APIModel:           "gemini-2.5-pro-preview-03-25",
-		CostPer1MIn:        1.25,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       10,
-		ContextWindow:      1000000,
-		DefaultMaxTokens:   50000,
+		ID:                  Gemini25,
+		Name:                "Gemini 2.5 Pro",
+		Provider:            ProviderGemini,
+		APIModel:            "gemini-2.5-pro-preview-03-25",
+		CostPer1MIn:         1.25,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        10,
+		ContextWindow:       1000000,
+		DefaultMaxTokens:    50000,
+		SupportsAttachments: true,
 	},
 
 	Gemini20Flash: {
-		ID:                 Gemini20Flash,
-		Name:               "Gemini 2.0 Flash",
-		Provider:           ProviderGemini,
-		APIModel:           "gemini-2.0-flash",
-		CostPer1MIn:        0.10,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       0.40,
-		ContextWindow:      1000000,
-		DefaultMaxTokens:   6000,
+		ID:                  Gemini20Flash,
+		Name:                "Gemini 2.0 Flash",
+		Provider:            ProviderGemini,
+		APIModel:            "gemini-2.0-flash",
+		CostPer1MIn:         0.10,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        0.40,
+		ContextWindow:       1000000,
+		DefaultMaxTokens:    6000,
+		SupportsAttachments: true,
 	},
 	Gemini20FlashLite: {
-		ID:                 Gemini20FlashLite,
-		Name:               "Gemini 2.0 Flash Lite",
-		Provider:           ProviderGemini,
-		APIModel:           "gemini-2.0-flash-lite",
-		CostPer1MIn:        0.05,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       0.30,
-		ContextWindow:      1000000,
-		DefaultMaxTokens:   6000,
+		ID:                  Gemini20FlashLite,
+		Name:                "Gemini 2.0 Flash Lite",
+		Provider:            ProviderGemini,
+		APIModel:            "gemini-2.0-flash-lite",
+		CostPer1MIn:         0.05,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        0.30,
+		ContextWindow:       1000000,
+		DefaultMaxTokens:    6000,
+		SupportsAttachments: true,
 	},
 }

internal/llm/models/groq.go 🔗

@@ -28,55 +28,60 @@ var GroqModels = map[ModelID]Model{
 		ContextWindow:      128_000,
 		DefaultMaxTokens:   50000,
 		// for some reason, the groq api doesn't like the reasoningEffort parameter
-		CanReason: false,
+		CanReason:           false,
+		SupportsAttachments: false,
 	},
 
 	Llama4Scout: {
-		ID:                 Llama4Scout,
-		Name:               "Llama4Scout",
-		Provider:           ProviderGROQ,
-		APIModel:           "meta-llama/llama-4-scout-17b-16e-instruct",
-		CostPer1MIn:        0.11,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       0.34,
-		ContextWindow:      128_000, // 10M when?
+		ID:                  Llama4Scout,
+		Name:                "Llama4Scout",
+		Provider:            ProviderGROQ,
+		APIModel:            "meta-llama/llama-4-scout-17b-16e-instruct",
+		CostPer1MIn:         0.11,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        0.34,
+		ContextWindow:       128_000, // 10M when?
+		SupportsAttachments: true,
 	},
 
 	Llama4Maverick: {
-		ID:                 Llama4Maverick,
-		Name:               "Llama4Maverick",
-		Provider:           ProviderGROQ,
-		APIModel:           "meta-llama/llama-4-maverick-17b-128e-instruct",
-		CostPer1MIn:        0.20,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       0.20,
-		ContextWindow:      128_000,
+		ID:                  Llama4Maverick,
+		Name:                "Llama4Maverick",
+		Provider:            ProviderGROQ,
+		APIModel:            "meta-llama/llama-4-maverick-17b-128e-instruct",
+		CostPer1MIn:         0.20,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        0.20,
+		ContextWindow:       128_000,
+		SupportsAttachments: true,
 	},
 
 	Llama3_3_70BVersatile: {
-		ID:                 Llama3_3_70BVersatile,
-		Name:               "Llama3_3_70BVersatile",
-		Provider:           ProviderGROQ,
-		APIModel:           "llama-3.3-70b-versatile",
-		CostPer1MIn:        0.59,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       0.79,
-		ContextWindow:      128_000,
+		ID:                  Llama3_3_70BVersatile,
+		Name:                "Llama3_3_70BVersatile",
+		Provider:            ProviderGROQ,
+		APIModel:            "llama-3.3-70b-versatile",
+		CostPer1MIn:         0.59,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        0.79,
+		ContextWindow:       128_000,
+		SupportsAttachments: false,
 	},
 
 	DeepseekR1DistillLlama70b: {
-		ID:                 DeepseekR1DistillLlama70b,
-		Name:               "DeepseekR1DistillLlama70b",
-		Provider:           ProviderGROQ,
-		APIModel:           "deepseek-r1-distill-llama-70b",
-		CostPer1MIn:        0.75,
-		CostPer1MInCached:  0,
-		CostPer1MOutCached: 0,
-		CostPer1MOut:       0.99,
-		ContextWindow:      128_000,
-		CanReason:          true,
+		ID:                  DeepseekR1DistillLlama70b,
+		Name:                "DeepseekR1DistillLlama70b",
+		Provider:            ProviderGROQ,
+		APIModel:            "deepseek-r1-distill-llama-70b",
+		CostPer1MIn:         0.75,
+		CostPer1MInCached:   0,
+		CostPer1MOutCached:  0,
+		CostPer1MOut:        0.99,
+		ContextWindow:       128_000,
+		CanReason:           true,
+		SupportsAttachments: false,
 	},
 }

internal/llm/models/models.go 🔗

@@ -8,17 +8,18 @@ type (
 )
 
 type Model struct {
-	ID                 ModelID       `json:"id"`
-	Name               string        `json:"name"`
-	Provider           ModelProvider `json:"provider"`
-	APIModel           string        `json:"api_model"`
-	CostPer1MIn        float64       `json:"cost_per_1m_in"`
-	CostPer1MOut       float64       `json:"cost_per_1m_out"`
-	CostPer1MInCached  float64       `json:"cost_per_1m_in_cached"`
-	CostPer1MOutCached float64       `json:"cost_per_1m_out_cached"`
-	ContextWindow      int64         `json:"context_window"`
-	DefaultMaxTokens   int64         `json:"default_max_tokens"`
-	CanReason          bool          `json:"can_reason"`
+	ID                  ModelID       `json:"id"`
+	Name                string        `json:"name"`
+	Provider            ModelProvider `json:"provider"`
+	APIModel            string        `json:"api_model"`
+	CostPer1MIn         float64       `json:"cost_per_1m_in"`
+	CostPer1MOut        float64       `json:"cost_per_1m_out"`
+	CostPer1MInCached   float64       `json:"cost_per_1m_in_cached"`
+	CostPer1MOutCached  float64       `json:"cost_per_1m_out_cached"`
+	ContextWindow       int64         `json:"context_window"`
+	DefaultMaxTokens    int64         `json:"default_max_tokens"`
+	CanReason           bool          `json:"can_reason"`
+	SupportsAttachments bool          `json:"supports_attachments"`
 }
 
 // Model IDs

internal/llm/models/openai.go 🔗

@@ -19,151 +19,163 @@ const (
 
 var OpenAIModels = map[ModelID]Model{
 	GPT41: {
-		ID:                 GPT41,
-		Name:               "GPT 4.1",
-		Provider:           ProviderOpenAI,
-		APIModel:           "gpt-4.1",
-		CostPer1MIn:        2.00,
-		CostPer1MInCached:  0.50,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       8.00,
-		ContextWindow:      1_047_576,
-		DefaultMaxTokens:   20000,
+		ID:                  GPT41,
+		Name:                "GPT 4.1",
+		Provider:            ProviderOpenAI,
+		APIModel:            "gpt-4.1",
+		CostPer1MIn:         2.00,
+		CostPer1MInCached:   0.50,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        8.00,
+		ContextWindow:       1_047_576,
+		DefaultMaxTokens:    20000,
+		SupportsAttachments: true,
 	},
 	GPT41Mini: {
-		ID:                 GPT41Mini,
-		Name:               "GPT 4.1 mini",
-		Provider:           ProviderOpenAI,
-		APIModel:           "gpt-4.1",
-		CostPer1MIn:        0.40,
-		CostPer1MInCached:  0.10,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       1.60,
-		ContextWindow:      200_000,
-		DefaultMaxTokens:   20000,
+		ID:                  GPT41Mini,
+		Name:                "GPT 4.1 mini",
+		Provider:            ProviderOpenAI,
+		APIModel:            "gpt-4.1",
+		CostPer1MIn:         0.40,
+		CostPer1MInCached:   0.10,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        1.60,
+		ContextWindow:       200_000,
+		DefaultMaxTokens:    20000,
+		SupportsAttachments: true,
 	},
 	GPT41Nano: {
-		ID:                 GPT41Nano,
-		Name:               "GPT 4.1 nano",
-		Provider:           ProviderOpenAI,
-		APIModel:           "gpt-4.1-nano",
-		CostPer1MIn:        0.10,
-		CostPer1MInCached:  0.025,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       0.40,
-		ContextWindow:      1_047_576,
-		DefaultMaxTokens:   20000,
+		ID:                  GPT41Nano,
+		Name:                "GPT 4.1 nano",
+		Provider:            ProviderOpenAI,
+		APIModel:            "gpt-4.1-nano",
+		CostPer1MIn:         0.10,
+		CostPer1MInCached:   0.025,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        0.40,
+		ContextWindow:       1_047_576,
+		DefaultMaxTokens:    20000,
+		SupportsAttachments: true,
 	},
 	GPT45Preview: {
-		ID:                 GPT45Preview,
-		Name:               "GPT 4.5 preview",
-		Provider:           ProviderOpenAI,
-		APIModel:           "gpt-4.5-preview",
-		CostPer1MIn:        75.00,
-		CostPer1MInCached:  37.50,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       150.00,
-		ContextWindow:      128_000,
-		DefaultMaxTokens:   15000,
+		ID:                  GPT45Preview,
+		Name:                "GPT 4.5 preview",
+		Provider:            ProviderOpenAI,
+		APIModel:            "gpt-4.5-preview",
+		CostPer1MIn:         75.00,
+		CostPer1MInCached:   37.50,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        150.00,
+		ContextWindow:       128_000,
+		DefaultMaxTokens:    15000,
+		SupportsAttachments: true,
 	},
 	GPT4o: {
-		ID:                 GPT4o,
-		Name:               "GPT 4o",
-		Provider:           ProviderOpenAI,
-		APIModel:           "gpt-4o",
-		CostPer1MIn:        2.50,
-		CostPer1MInCached:  1.25,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       10.00,
-		ContextWindow:      128_000,
-		DefaultMaxTokens:   4096,
+		ID:                  GPT4o,
+		Name:                "GPT 4o",
+		Provider:            ProviderOpenAI,
+		APIModel:            "gpt-4o",
+		CostPer1MIn:         2.50,
+		CostPer1MInCached:   1.25,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        10.00,
+		ContextWindow:       128_000,
+		DefaultMaxTokens:    4096,
+		SupportsAttachments: true,
 	},
 	GPT4oMini: {
-		ID:                 GPT4oMini,
-		Name:               "GPT 4o mini",
-		Provider:           ProviderOpenAI,
-		APIModel:           "gpt-4o-mini",
-		CostPer1MIn:        0.15,
-		CostPer1MInCached:  0.075,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       0.60,
-		ContextWindow:      128_000,
+		ID:                  GPT4oMini,
+		Name:                "GPT 4o mini",
+		Provider:            ProviderOpenAI,
+		APIModel:            "gpt-4o-mini",
+		CostPer1MIn:         0.15,
+		CostPer1MInCached:   0.075,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        0.60,
+		ContextWindow:       128_000,
+		SupportsAttachments: true,
 	},
 	O1: {
-		ID:                 O1,
-		Name:               "O1",
-		Provider:           ProviderOpenAI,
-		APIModel:           "o1",
-		CostPer1MIn:        15.00,
-		CostPer1MInCached:  7.50,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       60.00,
-		ContextWindow:      200_000,
-		DefaultMaxTokens:   50000,
-		CanReason:          true,
+		ID:                  O1,
+		Name:                "O1",
+		Provider:            ProviderOpenAI,
+		APIModel:            "o1",
+		CostPer1MIn:         15.00,
+		CostPer1MInCached:   7.50,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        60.00,
+		ContextWindow:       200_000,
+		DefaultMaxTokens:    50000,
+		CanReason:           true,
+		SupportsAttachments: true,
 	},
 	O1Pro: {
-		ID:                 O1Pro,
-		Name:               "o1 pro",
-		Provider:           ProviderOpenAI,
-		APIModel:           "o1-pro",
-		CostPer1MIn:        150.00,
-		CostPer1MInCached:  0.0,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       600.00,
-		ContextWindow:      200_000,
-		DefaultMaxTokens:   50000,
-		CanReason:          true,
+		ID:                  O1Pro,
+		Name:                "o1 pro",
+		Provider:            ProviderOpenAI,
+		APIModel:            "o1-pro",
+		CostPer1MIn:         150.00,
+		CostPer1MInCached:   0.0,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        600.00,
+		ContextWindow:       200_000,
+		DefaultMaxTokens:    50000,
+		CanReason:           true,
+		SupportsAttachments: true,
 	},
 	O1Mini: {
-		ID:                 O1Mini,
-		Name:               "o1 mini",
-		Provider:           ProviderOpenAI,
-		APIModel:           "o1-mini",
-		CostPer1MIn:        1.10,
-		CostPer1MInCached:  0.55,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       4.40,
-		ContextWindow:      128_000,
-		DefaultMaxTokens:   50000,
-		CanReason:          true,
+		ID:                  O1Mini,
+		Name:                "o1 mini",
+		Provider:            ProviderOpenAI,
+		APIModel:            "o1-mini",
+		CostPer1MIn:         1.10,
+		CostPer1MInCached:   0.55,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        4.40,
+		ContextWindow:       128_000,
+		DefaultMaxTokens:    50000,
+		CanReason:           true,
+		SupportsAttachments: true,
 	},
 	O3: {
-		ID:                 O3,
-		Name:               "o3",
-		Provider:           ProviderOpenAI,
-		APIModel:           "o3",
-		CostPer1MIn:        10.00,
-		CostPer1MInCached:  2.50,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       40.00,
-		ContextWindow:      200_000,
-		CanReason:          true,
+		ID:                  O3,
+		Name:                "o3",
+		Provider:            ProviderOpenAI,
+		APIModel:            "o3",
+		CostPer1MIn:         10.00,
+		CostPer1MInCached:   2.50,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        40.00,
+		ContextWindow:       200_000,
+		CanReason:           true,
+		SupportsAttachments: true,
 	},
 	O3Mini: {
-		ID:                 O3Mini,
-		Name:               "o3 mini",
-		Provider:           ProviderOpenAI,
-		APIModel:           "o3-mini",
-		CostPer1MIn:        1.10,
-		CostPer1MInCached:  0.55,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       4.40,
-		ContextWindow:      200_000,
-		DefaultMaxTokens:   50000,
-		CanReason:          true,
+		ID:                  O3Mini,
+		Name:                "o3 mini",
+		Provider:            ProviderOpenAI,
+		APIModel:            "o3-mini",
+		CostPer1MIn:         1.10,
+		CostPer1MInCached:   0.55,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        4.40,
+		ContextWindow:       200_000,
+		DefaultMaxTokens:    50000,
+		CanReason:           true,
+		SupportsAttachments: false,
 	},
 	O4Mini: {
-		ID:                 O4Mini,
-		Name:               "o4 mini",
-		Provider:           ProviderOpenAI,
-		APIModel:           "o4-mini",
-		CostPer1MIn:        1.10,
-		CostPer1MInCached:  0.275,
-		CostPer1MOutCached: 0.0,
-		CostPer1MOut:       4.40,
-		ContextWindow:      128_000,
-		DefaultMaxTokens:   50000,
-		CanReason:          true,
+		ID:                  O4Mini,
+		Name:                "o4 mini",
+		Provider:            ProviderOpenAI,
+		APIModel:            "o4-mini",
+		CostPer1MIn:         1.10,
+		CostPer1MInCached:   0.275,
+		CostPer1MOutCached:  0.0,
+		CostPer1MOut:        4.40,
+		ContextWindow:       128_000,
+		DefaultMaxTokens:    50000,
+		CanReason:           true,
+		SupportsAttachments: true,
 	},
 }

internal/llm/prompt/title.go 🔗

@@ -8,5 +8,6 @@ func TitlePrompt(_ models.ModelProvider) string {
 - the title should be a summary of the user's message
 - it should be one line long
 - do not use quotes or colons
-- the entire text you return will be used as the title`
+- the entire text you return will be used as the title
+- never return anything that is more than one sentence (one line) long`
 }

internal/llm/provider/anthropic.go 🔗

@@ -13,6 +13,7 @@ import (
 	"github.com/anthropics/anthropic-sdk-go/bedrock"
 	"github.com/anthropics/anthropic-sdk-go/option"
 	"github.com/opencode-ai/opencode/internal/config"
+	"github.com/opencode-ai/opencode/internal/llm/models"
 	"github.com/opencode-ai/opencode/internal/llm/tools"
 	"github.com/opencode-ai/opencode/internal/logging"
 	"github.com/opencode-ai/opencode/internal/message"
@@ -70,7 +71,14 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
 					Type: "ephemeral",
 				}
 			}
-			anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(content))
+			var contentBlocks []anthropic.ContentBlockParamUnion
+			contentBlocks = append(contentBlocks, content)
+			for _, binaryContent := range msg.BinaryContent() {
+				base64Image := binaryContent.String(models.ProviderAnthropic)
+				imageBlock := anthropic.NewImageBlockBase64(binaryContent.MIMEType, base64Image)
+				contentBlocks = append(contentBlocks, imageBlock)
+			}
+			anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(contentBlocks...))
 
 		case message.Assistant:
 			blocks := []anthropic.ContentBlockParamUnion{}
@@ -196,9 +204,10 @@ func (a *anthropicClient) send(ctx context.Context, messages []message.Message,
 	preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
 	cfg := config.Get()
 	if cfg.Debug {
-		// jsonData, _ := json.Marshal(preparedMessages)
-		// logging.Debug("Prepared messages", "messages", string(jsonData))
+		jsonData, _ := json.Marshal(preparedMessages)
+		logging.Debug("Prepared messages", "messages", string(jsonData))
 	}
+
 	attempts := 0
 	for {
 		attempts++
@@ -208,6 +217,7 @@ func (a *anthropicClient) send(ctx context.Context, messages []message.Message,
 		)
 		// If there is an error we are going to see if we can retry the call
 		if err != nil {
+			logging.Error("Error in Anthropic API call", "error", err)
 			retry, after, retryErr := a.shouldRetry(attempts, err)
 			if retryErr != nil {
 				return nil, retryErr

internal/llm/provider/bedrock.go 🔗

@@ -55,7 +55,7 @@ func newBedrockClient(opts providerClientOptions) BedrockClient {
 	if strings.Contains(string(opts.model.APIModel), "anthropic") {
 		// Create Anthropic client with Bedrock configuration
 		anthropicOpts := opts
-		anthropicOpts.anthropicOptions = append(anthropicOpts.anthropicOptions, 
+		anthropicOpts.anthropicOptions = append(anthropicOpts.anthropicOptions,
 			WithAnthropicBedrock(true),
 			WithAnthropicDisableCache(),
 		)
@@ -84,7 +84,7 @@ func (b *bedrockClient) send(ctx context.Context, messages []message.Message, to
 
 func (b *bedrockClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
 	eventChan := make(chan ProviderEvent)
-	
+
 	if b.childProvider == nil {
 		go func() {
 			eventChan <- ProviderEvent{
@@ -95,6 +95,7 @@ func (b *bedrockClient) stream(ctx context.Context, messages []message.Message,
 		}()
 		return eventChan
 	}
-	
+
 	return b.childProvider.stream(ctx, messages, tools)
-}
+}
+

internal/llm/provider/gemini.go 🔗

@@ -57,11 +57,16 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
 	for _, msg := range messages {
 		switch msg.Role {
 		case message.User:
+			var parts []genai.Part
+			parts = append(parts, genai.Text(msg.Content().String()))
+			for _, binaryContent := range msg.BinaryContent() {
+				imageFormat := strings.Split(binaryContent.MIMEType, "/")
+				parts = append(parts, genai.ImageData(imageFormat[1], binaryContent.Data))
+			}
 			history = append(history, &genai.Content{
-				Parts: []genai.Part{genai.Text(msg.Content().String())},
+				Parts: parts,
 				Role:  "user",
 			})
-
 		case message.Assistant:
 			content := &genai.Content{
 				Role:  "model",

internal/llm/provider/openai.go 🔗

@@ -12,6 +12,7 @@ import (
 	"github.com/openai/openai-go/option"
 	"github.com/openai/openai-go/shared"
 	"github.com/opencode-ai/opencode/internal/config"
+	"github.com/opencode-ai/opencode/internal/llm/models"
 	"github.com/opencode-ai/opencode/internal/llm/tools"
 	"github.com/opencode-ai/opencode/internal/logging"
 	"github.com/opencode-ai/opencode/internal/message"
@@ -71,7 +72,17 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
 	for _, msg := range messages {
 		switch msg.Role {
 		case message.User:
-			openaiMessages = append(openaiMessages, openai.UserMessage(msg.Content().String()))
+			var content []openai.ChatCompletionContentPartUnionParam
+			textBlock := openai.ChatCompletionContentPartTextParam{Text: msg.Content().String()}
+			content = append(content, openai.ChatCompletionContentPartUnionParam{OfText: &textBlock})
+			for _, binaryContent := range msg.BinaryContent() {
+				imageURL := openai.ChatCompletionContentPartImageImageURLParam{URL: binaryContent.String(models.ProviderOpenAI)}
+				imageBlock := openai.ChatCompletionContentPartImageParam{ImageURL: imageURL}
+
+				content = append(content, openai.ChatCompletionContentPartUnionParam{OfImageURL: &imageBlock})
+			}
+
+			openaiMessages = append(openaiMessages, openai.UserMessage(content))
 
 		case message.Assistant:
 			assistantMsg := openai.ChatCompletionAssistantMessageParam{

internal/message/attachment.go 🔗

@@ -0,0 +1,8 @@
+package message
+
+type Attachment struct {
+	FilePath string
+	FileName string
+	MimeType string
+	Content  []byte
+}

internal/message/content.go 🔗

@@ -66,13 +66,17 @@ func (iuc ImageURLContent) String() string {
 func (ImageURLContent) isPart() {}
 
 type BinaryContent struct {
+	Path     string
 	MIMEType string
 	Data     []byte
 }
 
-func (bc BinaryContent) String() string {
+func (bc BinaryContent) String(provider models.ModelProvider) string {
 	base64Encoded := base64.StdEncoding.EncodeToString(bc.Data)
-	return "data:" + bc.MIMEType + ";base64," + base64Encoded
+	if provider == models.ProviderOpenAI {
+		return "data:" + bc.MIMEType + ";base64," + base64Encoded
+	}
+	return base64Encoded
 }
 
 func (BinaryContent) isPart() {}
@@ -110,7 +114,6 @@ type Message struct {
 	SessionID string
 	Parts     []ContentPart
 	Model     models.ModelID
-
 	CreatedAt int64
 	UpdatedAt int64
 }

internal/message/message.go 🔗

@@ -64,7 +64,6 @@ func (s *service) Create(ctx context.Context, sessionID string, params CreateMes
 	if err != nil {
 		return Message{}, err
 	}
-
 	dbMessage, err := s.q.CreateMessage(ctx, db.CreateMessageParams{
 		ID:        uuid.New().String(),
 		SessionID: sessionID,

internal/tui/components/chat/chat.go 🔗

@@ -7,6 +7,7 @@ import (
 	"github.com/charmbracelet/lipgloss"
 	"github.com/charmbracelet/x/ansi"
 	"github.com/opencode-ai/opencode/internal/config"
+	"github.com/opencode-ai/opencode/internal/message"
 	"github.com/opencode-ai/opencode/internal/session"
 	"github.com/opencode-ai/opencode/internal/tui/styles"
 	"github.com/opencode-ai/opencode/internal/tui/theme"
@@ -14,7 +15,8 @@ import (
 )
 
 type SendMsg struct {
-	Text string
+	Text        string
+	Attachments []message.Attachment
 }
 
 type SessionSelectedMsg = session.Session

internal/tui/components/chat/editor.go 🔗

@@ -1,14 +1,19 @@
 package chat
 
 import (
+	"fmt"
 	"os"
 	"os/exec"
+	"slices"
+	"unicode"
 
 	"github.com/charmbracelet/bubbles/key"
 	"github.com/charmbracelet/bubbles/textarea"
 	tea "github.com/charmbracelet/bubbletea"
 	"github.com/charmbracelet/lipgloss"
 	"github.com/opencode-ai/opencode/internal/app"
+	"github.com/opencode-ai/opencode/internal/logging"
+	"github.com/opencode-ai/opencode/internal/message"
 	"github.com/opencode-ai/opencode/internal/session"
 	"github.com/opencode-ai/opencode/internal/tui/components/dialog"
 	"github.com/opencode-ai/opencode/internal/tui/layout"
@@ -18,9 +23,13 @@ import (
 )
 
 type editorCmp struct {
-	app      *app.App
-	session  session.Session
-	textarea textarea.Model
+	width       int
+	height      int
+	app         *app.App
+	session     session.Session
+	textarea    textarea.Model
+	attachments []message.Attachment
+	deleteMode  bool
 }
 
 type EditorKeyMaps struct {
@@ -33,6 +42,11 @@ type bluredEditorKeyMaps struct {
 	Focus      key.Binding
 	OpenEditor key.Binding
 }
+type DeleteAttachmentKeyMaps struct {
+	AttachmentDeleteMode key.Binding
+	Escape               key.Binding
+	DeleteAllAttachments key.Binding
+}
 
 var editorMaps = EditorKeyMaps{
 	Send: key.NewBinding(
@@ -45,7 +59,26 @@ var editorMaps = EditorKeyMaps{
 	),
 }
 
-func openEditor() tea.Cmd {
+var DeleteKeyMaps = DeleteAttachmentKeyMaps{
+	AttachmentDeleteMode: key.NewBinding(
+		key.WithKeys("ctrl+r"),
+		key.WithHelp("ctrl+r+{i}", "delete attachment at index i"),
+	),
+	Escape: key.NewBinding(
+		key.WithKeys("esc"),
+		key.WithHelp("esc", "cancel delete mode"),
+	),
+	DeleteAllAttachments: key.NewBinding(
+		key.WithKeys("r"),
+		key.WithHelp("ctrl+r+r", "delete all attchments"),
+	),
+}
+
+const (
+	maxAttachments = 5
+)
+
+func (m *editorCmp) openEditor() tea.Cmd {
 	editor := os.Getenv("EDITOR")
 	if editor == "" {
 		editor = "nvim"
@@ -72,8 +105,11 @@ func openEditor() tea.Cmd {
 			return util.ReportWarn("Message is empty")
 		}
 		os.Remove(tmpfile.Name())
+		attachments := m.attachments
+		m.attachments = nil
 		return SendMsg{
-			Text: string(content),
+			Text:        string(content),
+			Attachments: attachments,
 		}
 	})
 }
@@ -89,12 +125,16 @@ func (m *editorCmp) send() tea.Cmd {
 
 	value := m.textarea.Value()
 	m.textarea.Reset()
+	attachments := m.attachments
+
+	m.attachments = nil
 	if value == "" {
 		return nil
 	}
 	return tea.Batch(
 		util.CmdHandler(SendMsg{
-			Text: value,
+			Text:        value,
+			Attachments: attachments,
 		}),
 	)
 }
@@ -110,7 +150,34 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 			m.session = msg
 		}
 		return m, nil
+	case dialog.AttachmentAddedMsg:
+		if len(m.attachments) >= maxAttachments {
+			logging.ErrorPersist(fmt.Sprintf("cannot add more than %d images", maxAttachments))
+			return m, cmd
+		}
+		m.attachments = append(m.attachments, msg.Attachment)
 	case tea.KeyMsg:
+		if key.Matches(msg, DeleteKeyMaps.AttachmentDeleteMode) {
+			m.deleteMode = true
+			return m, nil
+		}
+		if key.Matches(msg, DeleteKeyMaps.DeleteAllAttachments) && m.deleteMode {
+			m.deleteMode = false
+			m.attachments = nil
+			return m, nil
+		}
+		if m.deleteMode && len(msg.Runes) > 0 && unicode.IsDigit(msg.Runes[0]) {
+			num := int(msg.Runes[0] - '0')
+			m.deleteMode = false
+			if num < 10 && len(m.attachments) > num {
+				if num == 0 {
+					m.attachments = m.attachments[num+1:]
+				} else {
+					m.attachments = slices.Delete(m.attachments, num, num+1)
+				}
+				return m, nil
+			}
+		}
 		if key.Matches(msg, messageKeys.PageUp) || key.Matches(msg, messageKeys.PageDown) ||
 			key.Matches(msg, messageKeys.HalfPageUp) || key.Matches(msg, messageKeys.HalfPageDown) {
 			return m, nil
@@ -119,7 +186,11 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 			if m.app.CoderAgent.IsSessionBusy(m.session.ID) {
 				return m, util.ReportWarn("Agent is working, please wait...")
 			}
-			return m, openEditor()
+			return m, m.openEditor()
+		}
+		if key.Matches(msg, DeleteKeyMaps.Escape) {
+			m.deleteMode = false
+			return m, nil
 		}
 		// Handle Enter key
 		if m.textarea.Focused() && key.Matches(msg, editorMaps.Send) {
@@ -133,6 +204,7 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 				return m, m.send()
 			}
 		}
+
 	}
 	m.textarea, cmd = m.textarea.Update(msg)
 	return m, cmd
@@ -147,12 +219,23 @@ func (m *editorCmp) View() string {
 		Bold(true).
 		Foreground(t.Primary())
 
-	return lipgloss.JoinHorizontal(lipgloss.Top, style.Render(">"), m.textarea.View())
+	if len(m.attachments) == 0 {
+		return lipgloss.JoinHorizontal(lipgloss.Top, style.Render(">"), m.textarea.View())
+	}
+	m.textarea.SetHeight(m.height - 1)
+	return lipgloss.JoinVertical(lipgloss.Top,
+		m.attachmentsContent(),
+		lipgloss.JoinHorizontal(lipgloss.Top, style.Render(">"),
+			m.textarea.View()),
+	)
 }
 
 func (m *editorCmp) SetSize(width, height int) tea.Cmd {
+	m.width = width
+	m.height = height
 	m.textarea.SetWidth(width - 3) // account for the prompt and padding right
 	m.textarea.SetHeight(height)
+	m.textarea.SetWidth(width)
 	return nil
 }
 
@@ -160,9 +243,33 @@ func (m *editorCmp) GetSize() (int, int) {
 	return m.textarea.Width(), m.textarea.Height()
 }
 
+func (m *editorCmp) attachmentsContent() string {
+	var styledAttachments []string
+	t := theme.CurrentTheme()
+	attachmentStyles := styles.BaseStyle().
+		MarginLeft(1).
+		Background(t.TextMuted()).
+		Foreground(t.Text())
+	for i, attachment := range m.attachments {
+		var filename string
+		if len(attachment.FileName) > 10 {
+			filename = fmt.Sprintf(" %s %s...", styles.DocumentIcon, attachment.FileName[0:7])
+		} else {
+			filename = fmt.Sprintf(" %s %s", styles.DocumentIcon, attachment.FileName)
+		}
+		if m.deleteMode {
+			filename = fmt.Sprintf("%d%s", i, filename)
+		}
+		styledAttachments = append(styledAttachments, attachmentStyles.Render(filename))
+	}
+	content := lipgloss.JoinHorizontal(lipgloss.Left, styledAttachments...)
+	return content
+}
+
 func (m *editorCmp) BindingKeys() []key.Binding {
 	bindings := []key.Binding{}
 	bindings = append(bindings, layout.KeyMapToSlice(editorMaps)...)
+	bindings = append(bindings, layout.KeyMapToSlice(DeleteKeyMaps)...)
 	return bindings
 }
 
@@ -198,10 +305,8 @@ func CreateTextArea(existing *textarea.Model) textarea.Model {
 
 func NewEditorCmp(app *app.App) tea.Model {
 	ta := CreateTextArea(nil)
-
 	return &editorCmp{
 		app:      app,
 		textarea: ta,
 	}
 }
-

internal/tui/components/chat/list.go 🔗

@@ -35,6 +35,7 @@ type messagesCmp struct {
 	cachedContent map[string]cacheItem
 	spinner       spinner.Model
 	rendering     bool
+	attachments   viewport.Model
 }
 type renderFinishedMsg struct{}
 
@@ -229,12 +230,15 @@ func (m *messagesCmp) renderView() {
 
 	messages := make([]string, 0)
 	for _, v := range m.uiMessages {
-		messages = append(messages, v.content,
+		messages = append(messages, lipgloss.JoinVertical(lipgloss.Left, v.content),
 			baseStyle.
 				Width(m.width).
-				Render(""),
+				Render(
+					"",
+				),
 		)
 	}
+
 	m.viewport.SetContent(
 		baseStyle.
 			Width(m.width).
@@ -413,6 +417,8 @@ func (m *messagesCmp) SetSize(width, height int) tea.Cmd {
 	m.height = height
 	m.viewport.Width = width
 	m.viewport.Height = height - 2
+	m.attachments.Width = width + 40
+	m.attachments.Height = 3
 	m.rerender()
 	return nil
 }
@@ -431,7 +437,9 @@ func (m *messagesCmp) SetSession(session session.Session) tea.Cmd {
 		return util.ReportError(err)
 	}
 	m.messages = messages
-	m.currentMsgID = m.messages[len(m.messages)-1].ID
+	if len(m.messages) > 0 {
+		m.currentMsgID = m.messages[len(m.messages)-1].ID
+	}
 	delete(m.cachedContent, m.currentMsgID)
 	m.rendering = true
 	return func() tea.Msg {
@@ -453,6 +461,7 @@ func NewMessagesCmp(app *app.App) tea.Model {
 	s := spinner.New()
 	s.Spinner = spinner.Pulse
 	vp := viewport.New(0, 0)
+	attachmets := viewport.New(0, 0)
 	vp.KeyMap.PageUp = messageKeys.PageUp
 	vp.KeyMap.PageDown = messageKeys.PageDown
 	vp.KeyMap.HalfPageUp = messageKeys.HalfPageUp
@@ -462,5 +471,6 @@ func NewMessagesCmp(app *app.App) tea.Model {
 		cachedContent: make(map[string]cacheItem),
 		viewport:      vp,
 		spinner:       s,
+		attachments:   attachmets,
 	}
 }

internal/tui/components/chat/message.go 🔗

@@ -80,7 +80,29 @@ func renderMessage(msg string, isUser bool, isFocused bool, width int, info ...s
 }
 
 func renderUserMessage(msg message.Message, isFocused bool, width int, position int) uiMessage {
-	content := renderMessage(msg.Content().String(), true, isFocused, width)
+	var styledAttachments []string
+	t := theme.CurrentTheme()
+	attachmentStyles := styles.BaseStyle().
+		MarginLeft(1).
+		Background(t.TextMuted()).
+		Foreground(t.Text())
+	for _, attachment := range msg.BinaryContent() {
+		file := filepath.Base(attachment.Path)
+		var filename string
+		if len(file) > 10 {
+			filename = fmt.Sprintf(" %s %s...", styles.DocumentIcon, file[0:7])
+		} else {
+			filename = fmt.Sprintf(" %s %s", styles.DocumentIcon, file)
+		}
+		styledAttachments = append(styledAttachments, attachmentStyles.Render(filename))
+	}
+	content := ""
+	if len(styledAttachments) > 0 {
+		attachmentContent := styles.BaseStyle().Width(width).Render(lipgloss.JoinHorizontal(lipgloss.Left, styledAttachments...))
+		content = renderMessage(msg.Content().String(), true, isFocused, width, attachmentContent)
+	} else {
+		content = renderMessage(msg.Content().String(), true, isFocused, width)
+	}
 	userMsg := uiMessage{
 		ID:          msg.ID,
 		messageType: userMessageType,

internal/tui/components/dialog/filepicker.go 🔗

@@ -0,0 +1,477 @@
+package dialog
+
+import (
+	"fmt"
+	"net/http"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/bubbles/key"
+	"github.com/charmbracelet/bubbles/textinput"
+	"github.com/charmbracelet/bubbles/viewport"
+	tea "github.com/charmbracelet/bubbletea"
+	"github.com/charmbracelet/lipgloss"
+	"github.com/opencode-ai/opencode/internal/app"
+	"github.com/opencode-ai/opencode/internal/config"
+	"github.com/opencode-ai/opencode/internal/logging"
+	"github.com/opencode-ai/opencode/internal/message"
+	"github.com/opencode-ai/opencode/internal/tui/image"
+	"github.com/opencode-ai/opencode/internal/tui/styles"
+	"github.com/opencode-ai/opencode/internal/tui/theme"
+	"github.com/opencode-ai/opencode/internal/tui/util"
+)
+
+const (
+	maxAttachmentSize = int64(5 * 1024 * 1024) // 5MB
+	downArrow         = "down"
+	upArrow           = "up"
+)
+
+type FilePrickerKeyMap struct {
+	Enter          key.Binding
+	Down           key.Binding
+	Up             key.Binding
+	Forward        key.Binding
+	Backward       key.Binding
+	OpenFilePicker key.Binding
+	Esc            key.Binding
+	InsertCWD      key.Binding
+}
+
+var filePickerKeyMap = FilePrickerKeyMap{
+	Enter: key.NewBinding(
+		key.WithKeys("enter"),
+		key.WithHelp("enter", "select file/enter directory"),
+	),
+	Down: key.NewBinding(
+		key.WithKeys("j", downArrow),
+		key.WithHelp("↓/j", "down"),
+	),
+	Up: key.NewBinding(
+		key.WithKeys("k", upArrow),
+		key.WithHelp("↑/k", "up"),
+	),
+	Forward: key.NewBinding(
+		key.WithKeys("l"),
+		key.WithHelp("l", "enter directory"),
+	),
+	Backward: key.NewBinding(
+		key.WithKeys("h", "backspace"),
+		key.WithHelp("h/backspace", "go back"),
+	),
+	OpenFilePicker: key.NewBinding(
+		key.WithKeys("ctrl+f"),
+		key.WithHelp("ctrl+f", "open file picker"),
+	),
+	Esc: key.NewBinding(
+		key.WithKeys("esc"),
+		key.WithHelp("esc", "close/exit"),
+	),
+	InsertCWD: key.NewBinding(
+		key.WithKeys("i"),
+		key.WithHelp("i", "manual path input"),
+	),
+}
+
+type filepickerCmp struct {
+	basePath       string
+	width          int
+	height         int
+	cursor         int
+	err            error
+	cursorChain    stack
+	viewport       viewport.Model
+	dirs           []os.DirEntry
+	cwdDetails     *DirNode
+	selectedFile   string
+	cwd            textinput.Model
+	ShowFilePicker bool
+	app            *app.App
+}
+
+type DirNode struct {
+	parent    *DirNode
+	child     *DirNode
+	directory string
+}
+type stack []int
+
+func (s stack) Push(v int) stack {
+	return append(s, v)
+}
+
+func (s stack) Pop() (stack, int) {
+	l := len(s)
+	return s[:l-1], s[l-1]
+}
+
+type AttachmentAddedMsg struct {
+	Attachment message.Attachment
+}
+
+func (f *filepickerCmp) Init() tea.Cmd {
+	return nil
+}
+
+func (f *filepickerCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
+	var cmd tea.Cmd
+	switch msg := msg.(type) {
+	case tea.WindowSizeMsg:
+		f.width = 60
+		f.height = 20
+		f.viewport.Width = 80
+		f.viewport.Height = 22
+		f.cursor = 0
+		f.getCurrentFileBelowCursor()
+	case tea.KeyMsg:
+		switch {
+		case key.Matches(msg, filePickerKeyMap.InsertCWD):
+			f.cwd.Focus()
+			return f, cmd
+		case key.Matches(msg, filePickerKeyMap.Esc):
+			if f.cwd.Focused() {
+				f.cwd.Blur()
+			}
+		case key.Matches(msg, filePickerKeyMap.Down):
+			if !f.cwd.Focused() || msg.String() == downArrow {
+				if f.cursor < len(f.dirs)-1 {
+					f.cursor++
+					f.getCurrentFileBelowCursor()
+				}
+			}
+		case key.Matches(msg, filePickerKeyMap.Up):
+			if !f.cwd.Focused() || msg.String() == upArrow {
+				if f.cursor > 0 {
+					f.cursor--
+					f.getCurrentFileBelowCursor()
+				}
+			}
+		case key.Matches(msg, filePickerKeyMap.Enter):
+			var path string
+			var isPathDir bool
+			if f.cwd.Focused() {
+				path = f.cwd.Value()
+				fileInfo, err := os.Stat(path)
+				if err != nil {
+					logging.ErrorPersist("Invalid path")
+					return f, cmd
+				}
+				isPathDir = fileInfo.IsDir()
+			} else {
+				path = filepath.Join(f.cwdDetails.directory, "/", f.dirs[f.cursor].Name())
+				isPathDir = f.dirs[f.cursor].IsDir()
+			}
+			if isPathDir {
+				path := filepath.Join(f.cwdDetails.directory, "/", f.dirs[f.cursor].Name())
+				newWorkingDir := DirNode{parent: f.cwdDetails, directory: path}
+				f.cwdDetails.child = &newWorkingDir
+				f.cwdDetails = f.cwdDetails.child
+				f.cursorChain = f.cursorChain.Push(f.cursor)
+				f.dirs = readDir(f.cwdDetails.directory, false)
+				f.cursor = 0
+				f.cwd.SetValue(f.cwdDetails.directory)
+				f.getCurrentFileBelowCursor()
+			} else {
+				f.selectedFile = path
+				return f.addAttachmentToMessage()
+			}
+		case key.Matches(msg, filePickerKeyMap.Esc):
+			if !f.cwd.Focused() {
+				f.cursorChain = make(stack, 0)
+				f.cursor = 0
+			} else {
+				f.cwd.Blur()
+			}
+		case key.Matches(msg, filePickerKeyMap.Forward):
+			if !f.cwd.Focused() {
+				if f.dirs[f.cursor].IsDir() {
+					path := filepath.Join(f.cwdDetails.directory, "/", f.dirs[f.cursor].Name())
+					newWorkingDir := DirNode{parent: f.cwdDetails, directory: path}
+					f.cwdDetails.child = &newWorkingDir
+					f.cwdDetails = f.cwdDetails.child
+					f.cursorChain = f.cursorChain.Push(f.cursor)
+					f.dirs = readDir(f.cwdDetails.directory, false)
+					f.cursor = 0
+					f.cwd.SetValue(f.cwdDetails.directory)
+					f.getCurrentFileBelowCursor()
+				}
+			}
+		case key.Matches(msg, filePickerKeyMap.Backward):
+			if !f.cwd.Focused() {
+				if len(f.cursorChain) != 0 && f.cwdDetails.parent != nil {
+					f.cursorChain, f.cursor = f.cursorChain.Pop()
+					f.cwdDetails = f.cwdDetails.parent
+					f.cwdDetails.child = nil
+					f.dirs = readDir(f.cwdDetails.directory, false)
+					f.cwd.SetValue(f.cwdDetails.directory)
+					f.getCurrentFileBelowCursor()
+				}
+			}
+		case key.Matches(msg, filePickerKeyMap.OpenFilePicker):
+			f.dirs = readDir(f.cwdDetails.directory, false)
+			f.cursor = 0
+			f.getCurrentFileBelowCursor()
+		}
+	}
+	if f.cwd.Focused() {
+		f.cwd, cmd = f.cwd.Update(msg)
+	}
+	return f, cmd
+}
+
+func (f *filepickerCmp) addAttachmentToMessage() (tea.Model, tea.Cmd) {
+	modeInfo := GetSelectedModel(config.Get())
+	if !modeInfo.SupportsAttachments {
+		logging.ErrorPersist(fmt.Sprintf("Model %s doesn't support attachments", modeInfo.Name))
+		return f, nil
+	}
+	if isExtSupported(f.dirs[f.cursor].Name()) {
+		f.selectedFile = f.dirs[f.cursor].Name()
+		selectedFilePath := filepath.Join(f.cwdDetails.directory, "/", f.selectedFile)
+		isFileLarge, err := image.ValidateFileSize(selectedFilePath, maxAttachmentSize)
+		if err != nil {
+			logging.ErrorPersist("unable to read the image")
+			return f, nil
+		}
+		if isFileLarge {
+			logging.ErrorPersist("file too large, max 5MB")
+			return f, nil
+		}
+
+		content, err := os.ReadFile(selectedFilePath)
+		if err != nil {
+			logging.ErrorPersist("Unable read selected file")
+			return f, nil
+		}
+
+		mimeBufferSize := min(512, len(content))
+		mimeType := http.DetectContentType(content[:mimeBufferSize])
+		fileName := f.selectedFile
+		attachment := message.Attachment{FilePath: selectedFilePath, FileName: fileName, MimeType: mimeType, Content: content}
+		f.selectedFile = ""
+		return f, util.CmdHandler(AttachmentAddedMsg{attachment})
+	}
+	if !isExtSupported(f.selectedFile) {
+		logging.ErrorPersist("Unsupported file")
+		return f, nil
+	}
+	return f, nil
+}
+
+func (f *filepickerCmp) View() string {
+	t := theme.CurrentTheme()
+	const maxVisibleDirs = 20
+	const maxWidth = 80
+
+	adjustedWidth := maxWidth
+	for _, file := range f.dirs {
+		if len(file.Name()) > adjustedWidth-4 { // Account for padding
+			adjustedWidth = len(file.Name()) + 4
+		}
+	}
+	adjustedWidth = max(30, min(adjustedWidth, f.width-15)) + 1
+
+	files := make([]string, 0, maxVisibleDirs)
+	startIdx := 0
+
+	if len(f.dirs) > maxVisibleDirs {
+		halfVisible := maxVisibleDirs / 2
+		if f.cursor >= halfVisible && f.cursor < len(f.dirs)-halfVisible {
+			startIdx = f.cursor - halfVisible
+		} else if f.cursor >= len(f.dirs)-halfVisible {
+			startIdx = len(f.dirs) - maxVisibleDirs
+		}
+	}
+
+	endIdx := min(startIdx+maxVisibleDirs, len(f.dirs))
+
+	for i := startIdx; i < endIdx; i++ {
+		file := f.dirs[i]
+		itemStyle := styles.BaseStyle().Width(adjustedWidth)
+
+		if i == f.cursor {
+			itemStyle = itemStyle.
+				Background(t.Primary()).
+				Foreground(t.Background()).
+				Bold(true)
+		}
+		filename := file.Name()
+
+		if len(filename) > adjustedWidth-4 {
+			filename = filename[:adjustedWidth-7] + "..."
+		}
+		if file.IsDir() {
+			filename = filename + "/"
+		} else if isExtSupported(file.Name()) {
+			filename = filename
+		} else {
+			filename = filename
+		}
+
+		files = append(files, itemStyle.Padding(0, 1).Render(filename))
+	}
+
+	// Pad to always show exactly 21 lines
+	for len(files) < maxVisibleDirs {
+		files = append(files, styles.BaseStyle().Width(adjustedWidth).Render(""))
+	}
+
+	currentPath := styles.BaseStyle().
+		Height(1).
+		Width(adjustedWidth).
+		Render(f.cwd.View())
+
+	viewportstyle := lipgloss.NewStyle().
+		Width(f.viewport.Width).
+		Background(t.Background()).
+		Border(lipgloss.RoundedBorder()).
+		BorderForeground(t.TextMuted()).
+		BorderBackground(t.Background()).
+		Padding(2).
+		Render(f.viewport.View())
+	var insertExitText string
+	if f.IsCWDFocused() {
+		insertExitText = "Press esc to exit typing path"
+	} else {
+		insertExitText = "Press i to start typing path"
+	}
+
+	content := lipgloss.JoinVertical(
+		lipgloss.Left,
+		currentPath,
+		styles.BaseStyle().Width(adjustedWidth).Render(""),
+		styles.BaseStyle().Width(adjustedWidth).Render(lipgloss.JoinVertical(lipgloss.Left, files...)),
+		styles.BaseStyle().Width(adjustedWidth).Render(""),
+		styles.BaseStyle().Foreground(t.TextMuted()).Width(adjustedWidth).Render(insertExitText),
+	)
+
+	f.cwd.SetValue(f.cwd.Value())
+	contentStyle := styles.BaseStyle().Padding(1, 2).
+		Border(lipgloss.RoundedBorder()).
+		BorderBackground(t.Background()).
+		BorderForeground(t.TextMuted()).
+		Width(lipgloss.Width(content) + 4)
+
+	return lipgloss.JoinHorizontal(lipgloss.Center, contentStyle.Render(content), viewportstyle)
+}
+
+type FilepickerCmp interface {
+	tea.Model
+	ToggleFilepicker(showFilepicker bool)
+	IsCWDFocused() bool
+}
+
+func (f *filepickerCmp) ToggleFilepicker(showFilepicker bool) {
+	f.ShowFilePicker = showFilepicker
+}
+
+func (f *filepickerCmp) IsCWDFocused() bool {
+	return f.cwd.Focused()
+}
+
+func NewFilepickerCmp(app *app.App) FilepickerCmp {
+	homepath, err := os.UserHomeDir()
+	if err != nil {
+		logging.Error("error loading user files")
+		return nil
+	}
+	baseDir := DirNode{parent: nil, directory: homepath}
+	dirs := readDir(homepath, false)
+	viewport := viewport.New(0, 0)
+	currentDirectory := textinput.New()
+	currentDirectory.CharLimit = 200
+	currentDirectory.Width = 44
+	currentDirectory.Cursor.Blink = true
+	currentDirectory.SetValue(baseDir.directory)
+	return &filepickerCmp{cwdDetails: &baseDir, dirs: dirs, cursorChain: make(stack, 0), viewport: viewport, cwd: currentDirectory, app: app}
+}
+
+func (f *filepickerCmp) getCurrentFileBelowCursor() {
+	if len(f.dirs) == 0 || f.cursor < 0 || f.cursor >= len(f.dirs) {
+		logging.Error(fmt.Sprintf("Invalid cursor position. Dirs length: %d, Cursor: %d", len(f.dirs), f.cursor))
+		f.viewport.SetContent("Preview unavailable")
+		return
+	}
+
+	dir := f.dirs[f.cursor]
+	filename := dir.Name()
+	if !dir.IsDir() && isExtSupported(filename) {
+		fullPath := f.cwdDetails.directory + "/" + dir.Name()
+
+		go func() {
+			imageString, err := image.ImagePreview(f.viewport.Width-4, fullPath)
+			if err != nil {
+				logging.Error(err.Error())
+				f.viewport.SetContent("Preview unavailable")
+				return
+			}
+
+			f.viewport.SetContent(imageString)
+		}()
+	} else {
+		f.viewport.SetContent("Preview unavailable")
+	}
+}
+
+func readDir(path string, showHidden bool) []os.DirEntry {
+	logging.Info(fmt.Sprintf("Reading directory: %s", path))
+
+	entriesChan := make(chan []os.DirEntry, 1)
+	errChan := make(chan error, 1)
+
+	go func() {
+		dirEntries, err := os.ReadDir(path)
+		if err != nil {
+			logging.ErrorPersist(err.Error())
+			errChan <- err
+			return
+		}
+		entriesChan <- dirEntries
+	}()
+
+	select {
+	case dirEntries := <-entriesChan:
+		sort.Slice(dirEntries, func(i, j int) bool {
+			if dirEntries[i].IsDir() == dirEntries[j].IsDir() {
+				return dirEntries[i].Name() < dirEntries[j].Name()
+			}
+			return dirEntries[i].IsDir()
+		})
+
+		if showHidden {
+			return dirEntries
+		}
+
+		var sanitizedDirEntries []os.DirEntry
+		for _, dirEntry := range dirEntries {
+			isHidden, _ := IsHidden(dirEntry.Name())
+			if !isHidden {
+				if dirEntry.IsDir() || isExtSupported(dirEntry.Name()) {
+					sanitizedDirEntries = append(sanitizedDirEntries, dirEntry)
+				}
+			}
+		}
+
+		return sanitizedDirEntries
+
+	case err := <-errChan:
+		logging.ErrorPersist(fmt.Sprintf("Error reading directory %s", path), err)
+		return []os.DirEntry{}
+
+	case <-time.After(5 * time.Second):
+		logging.ErrorPersist(fmt.Sprintf("Timeout reading directory %s", path), nil)
+		return []os.DirEntry{}
+	}
+}
+
+func IsHidden(file string) (bool, error) {
+	return strings.HasPrefix(file, "."), nil
+}
+
+func isExtSupported(path string) bool {
+	ext := strings.ToLower(filepath.Ext(path))
+	return (ext == ".jpg" || ext == ".jpeg" || ext == ".webp" || ext == ".png")
+}

internal/tui/components/dialog/models.go 🔗

@@ -270,20 +270,23 @@ func (m *modelDialogCmp) BindingKeys() []key.Binding {
 
 func (m *modelDialogCmp) setupModels() {
 	cfg := config.Get()
-
+	modelInfo := GetSelectedModel(cfg)
 	m.availableProviders = getEnabledProviders(cfg)
 	m.hScrollPossible = len(m.availableProviders) > 1
 
-	agentCfg := cfg.Agents[config.AgentCoder]
-	selectedModelId := agentCfg.Model
-	modelInfo := models.SupportedModels[selectedModelId]
-
 	m.provider = modelInfo.Provider
 	m.hScrollOffset = findProviderIndex(m.availableProviders, m.provider)
 
 	m.setupModelsForProvider(m.provider)
 }
 
+func GetSelectedModel(cfg *config.Config) models.Model {
+
+	agentCfg := cfg.Agents[config.AgentCoder]
+	selectedModelId := agentCfg.Model
+	return models.SupportedModels[selectedModelId]
+}
+
 func getEnabledProviders(cfg *config.Config) []models.ModelProvider {
 	var providers []models.ModelProvider
 	for providerId, provider := range cfg.Providers {

internal/tui/components/dialog/permission.go 🔗

@@ -2,8 +2,6 @@ package dialog
 
 import (
 	"fmt"
-	"strings"
-
 	"github.com/charmbracelet/bubbles/key"
 	"github.com/charmbracelet/bubbles/viewport"
 	tea "github.com/charmbracelet/bubbletea"
@@ -15,6 +13,7 @@ import (
 	"github.com/opencode-ai/opencode/internal/tui/styles"
 	"github.com/opencode-ai/opencode/internal/tui/theme"
 	"github.com/opencode-ai/opencode/internal/tui/util"
+	"strings"
 )
 
 type PermissionAction string

internal/tui/image/images.go 🔗

@@ -0,0 +1,72 @@
+package image
+
+import (
+	"fmt"
+	"image"
+	"os"
+	"strings"
+
+	"github.com/charmbracelet/lipgloss"
+	"github.com/disintegration/imaging"
+	"github.com/lucasb-eyer/go-colorful"
+)
+
+func ValidateFileSize(filePath string, sizeLimit int64) (bool, error) {
+	fileInfo, err := os.Stat(filePath)
+	if err != nil {
+		return false, fmt.Errorf("error getting file info: %w", err)
+	}
+
+	if fileInfo.Size() > sizeLimit {
+		return true, nil
+	}
+
+	return false, nil
+}
+
+func ToString(width int, img image.Image) string {
+	img = imaging.Resize(img, width, 0, imaging.Lanczos)
+	b := img.Bounds()
+	imageWidth := b.Max.X
+	h := b.Max.Y
+	str := strings.Builder{}
+
+	for heightCounter := 0; heightCounter < h; heightCounter += 2 {
+		for x := range imageWidth {
+			c1, _ := colorful.MakeColor(img.At(x, heightCounter))
+			color1 := lipgloss.Color(c1.Hex())
+
+			var color2 lipgloss.Color
+			if heightCounter+1 < h {
+				c2, _ := colorful.MakeColor(img.At(x, heightCounter+1))
+				color2 = lipgloss.Color(c2.Hex())
+			} else {
+				color2 = color1
+			}
+
+			str.WriteString(lipgloss.NewStyle().Foreground(color1).
+				Background(color2).Render("▀"))
+		}
+
+		str.WriteString("\n")
+	}
+
+	return str.String()
+}
+
+func ImagePreview(width int, filename string) (string, error) {
+	imageContent, err := os.Open(filename)
+	if err != nil {
+		return "", err
+	}
+	defer imageContent.Close()
+
+	img, _, err := image.Decode(imageContent)
+	if err != nil {
+		return "", err
+	}
+
+	imageString := ToString(width, img)
+
+	return imageString, nil
+}

internal/tui/page/chat.go 🔗

@@ -6,6 +6,7 @@ import (
 	"github.com/charmbracelet/bubbles/key"
 	tea "github.com/charmbracelet/bubbletea"
 	"github.com/opencode-ai/opencode/internal/app"
+	"github.com/opencode-ai/opencode/internal/message"
 	"github.com/opencode-ai/opencode/internal/session"
 	"github.com/opencode-ai/opencode/internal/tui/components/chat"
 	"github.com/opencode-ai/opencode/internal/tui/layout"
@@ -52,7 +53,7 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 		cmd := p.layout.SetSize(msg.Width, msg.Height)
 		cmds = append(cmds, cmd)
 	case chat.SendMsg:
-		cmd := p.sendMessage(msg.Text)
+		cmd := p.sendMessage(msg.Text, msg.Attachments)
 		if cmd != nil {
 			return p, cmd
 		}
@@ -99,7 +100,7 @@ func (p *chatPage) clearSidebar() tea.Cmd {
 	return p.layout.ClearRightPanel()
 }
 
-func (p *chatPage) sendMessage(text string) tea.Cmd {
+func (p *chatPage) sendMessage(text string, attachments []message.Attachment) tea.Cmd {
 	var cmds []tea.Cmd
 	if p.session.ID == "" {
 		session, err := p.app.Sessions.Create(context.Background(), "New Session")
@@ -115,7 +116,10 @@ func (p *chatPage) sendMessage(text string) tea.Cmd {
 		cmds = append(cmds, util.CmdHandler(chat.SessionSelectedMsg(session)))
 	}
 
-	p.app.CoderAgent.Run(context.Background(), p.session.ID, text)
+	_, err := p.app.CoderAgent.Run(context.Background(), p.session.ID, text, attachments...)
+	if err != nil {
+		return util.ReportError(err)
+	}
 	return tea.Batch(cmds...)
 }
 
@@ -134,6 +138,7 @@ func (p *chatPage) View() string {
 func (p *chatPage) BindingKeys() []key.Binding {
 	bindings := layout.KeyMapToSlice(keyMap)
 	bindings = append(bindings, p.messages.BindingKeys()...)
+	bindings = append(bindings, p.editor.BindingKeys()...)
 	return bindings
 }
 

internal/tui/styles/icons.go 🔗

@@ -3,11 +3,12 @@ package styles
 const (
 	OpenCodeIcon string = "⌬"
 
-	CheckIcon   string = "✓"
-	ErrorIcon   string = "✖"
-	WarningIcon string = "⚠"
-	InfoIcon    string = ""
-	HintIcon    string = "i"
-	SpinnerIcon string = "..."
-	LoadingIcon string = "⟳"
-)
+	CheckIcon    string = "✓"
+	ErrorIcon    string = "✖"
+	WarningIcon  string = "⚠"
+	InfoIcon     string = ""
+	HintIcon     string = "i"
+	SpinnerIcon  string = "..."
+	LoadingIcon  string = "⟳"
+	DocumentIcon string = "🖼"
+)

internal/tui/styles/styles.go 🔗

@@ -5,6 +5,10 @@ import (
 	"github.com/opencode-ai/opencode/internal/tui/theme"
 )
 
+var (
+	ImageBakcground = "#212121"
+)
+
 // Style generation functions that use the current theme
 
 // BaseStyle returns the base style with background and foreground colors
@@ -149,4 +153,3 @@ func BorderFocusedColor() lipgloss.AdaptiveColor {
 func BorderDimColor() lipgloss.AdaptiveColor {
 	return theme.CurrentTheme().BorderDim()
 }
-

internal/tui/tui.go 🔗

@@ -26,10 +26,15 @@ type keyMap struct {
 	Help          key.Binding
 	SwitchSession key.Binding
 	Commands      key.Binding
+	Filepicker    key.Binding
 	Models        key.Binding
 	SwitchTheme   key.Binding
 }
 
+const (
+	quitKey = "q"
+)
+
 var keys = keyMap{
 	Logs: key.NewBinding(
 		key.WithKeys("ctrl+l"),
@@ -54,7 +59,10 @@ var keys = keyMap{
 		key.WithKeys("ctrl+k"),
 		key.WithHelp("ctrl+k", "commands"),
 	),
-
+	Filepicker: key.NewBinding(
+		key.WithKeys("ctrl+f"),
+		key.WithHelp("ctrl+f", "select files to upload"),
+	),
 	Models: key.NewBinding(
 		key.WithKeys("ctrl+o"),
 		key.WithHelp("ctrl+o", "model selection"),
@@ -77,7 +85,7 @@ var returnKey = key.NewBinding(
 )
 
 var logsKeyReturnKey = key.NewBinding(
-	key.WithKeys("esc", "backspace", "q"),
+	key.WithKeys("esc", "backspace", quitKey),
 	key.WithHelp("esc/q", "go back"),
 )
 
@@ -112,6 +120,9 @@ type appModel struct {
 	showInitDialog bool
 	initDialog     dialog.InitDialogCmp
 
+	showFilepicker bool
+	filepicker     dialog.FilepickerCmp
+
 	showThemeDialog bool
 	themeDialog     dialog.ThemeDialog
 }
@@ -135,6 +146,7 @@ func (a appModel) Init() tea.Cmd {
 	cmds = append(cmds, cmd)
 	cmd = a.initDialog.Init()
 	cmds = append(cmds, cmd)
+	cmd = a.filepicker.Init()
 	cmd = a.themeDialog.Init()
 	cmds = append(cmds, cmd)
 
@@ -182,6 +194,10 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 		a.commandDialog = command.(dialog.CommandDialog)
 		cmds = append(cmds, commandCmd)
 
+		filepicker, filepickerCmd := a.filepicker.Update(msg)
+		a.filepicker = filepicker.(dialog.FilepickerCmp)
+		cmds = append(cmds, filepickerCmd)
+
 		a.initDialog.SetSize(msg.Width, msg.Height)
 
 		return a, tea.Batch(cmds...)
@@ -333,6 +349,7 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 
 	case tea.KeyMsg:
 		switch {
+
 		case key.Matches(msg, keys.Quit):
 			a.showQuit = !a.showQuit
 			if a.showHelp {
@@ -344,6 +361,10 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 			if a.showCommandDialog {
 				a.showCommandDialog = false
 			}
+			if a.showFilepicker {
+				a.showFilepicker = false
+				a.filepicker.ToggleFilepicker(a.showFilepicker)
+			}
 			if a.showModelDialog {
 				a.showModelDialog = false
 			}
@@ -364,7 +385,7 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 			}
 			return a, nil
 		case key.Matches(msg, keys.Commands):
-			if a.currentPage == page.ChatPage && !a.showQuit && !a.showPermissions && !a.showSessionDialog && !a.showThemeDialog {
+			if a.currentPage == page.ChatPage && !a.showQuit && !a.showPermissions && !a.showSessionDialog && !a.showThemeDialog && !a.showFilepicker {
 				// Show commands dialog
 				if len(a.commands) == 0 {
 					return a, util.ReportWarn("No commands available")
@@ -392,26 +413,36 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 				return a, a.themeDialog.Init()
 			}
 			return a, nil
-		case key.Matches(msg, logsKeyReturnKey):
-			if a.currentPage == page.LogsPage {
-				return a, a.moveToPage(page.ChatPage)
-			}
-		case key.Matches(msg, returnKey):
-			if a.showQuit {
-				a.showQuit = !a.showQuit
-				return a, nil
-			}
-			if a.showHelp {
-				a.showHelp = !a.showHelp
-				return a, nil
-			}
-			if a.showInitDialog {
-				a.showInitDialog = false
-				// Mark the project as initialized without running the command
-				if err := config.MarkProjectInitialized(); err != nil {
-					return a, util.ReportError(err)
+		case key.Matches(msg, returnKey) || key.Matches(msg):
+			if msg.String() == quitKey {
+				if a.currentPage == page.LogsPage {
+					return a, a.moveToPage(page.ChatPage)
+				}
+			} else if !a.filepicker.IsCWDFocused() {
+				if a.showQuit {
+					a.showQuit = !a.showQuit
+					return a, nil
+				}
+				if a.showHelp {
+					a.showHelp = !a.showHelp
+					return a, nil
+				}
+				if a.showInitDialog {
+					a.showInitDialog = false
+					// Mark the project as initialized without running the command
+					if err := config.MarkProjectInitialized(); err != nil {
+						return a, util.ReportError(err)
+					}
+					return a, nil
+				}
+				if a.showFilepicker {
+					a.showFilepicker = false
+					a.filepicker.ToggleFilepicker(a.showFilepicker)
+					return a, nil
+				}
+				if a.currentPage == page.LogsPage {
+					return a, a.moveToPage(page.ChatPage)
 				}
-				return a, nil
 			}
 		case key.Matches(msg, keys.Logs):
 			return a, a.moveToPage(page.LogsPage)
@@ -429,10 +460,28 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
 				a.showHelp = !a.showHelp
 				return a, nil
 			}
+		case key.Matches(msg, keys.Filepicker):
+			a.showFilepicker = !a.showFilepicker
+			a.filepicker.ToggleFilepicker(a.showFilepicker)
+			return a, nil
 		}
+	default:
+		f, filepickerCmd := a.filepicker.Update(msg)
+		a.filepicker = f.(dialog.FilepickerCmp)
+		cmds = append(cmds, filepickerCmd)
 
 	}
 
+	if a.showFilepicker {
+		f, filepickerCmd := a.filepicker.Update(msg)
+		a.filepicker = f.(dialog.FilepickerCmp)
+		cmds = append(cmds, filepickerCmd)
+		// Only block key messages send all other messages down
+		if _, ok := msg.(tea.KeyMsg); ok {
+			return a, tea.Batch(cmds...)
+		}
+	}
+
 	if a.showQuit {
 		q, quitCmd := a.quit.Update(msg)
 		a.quit = q.(dialog.QuitDialog)
@@ -519,6 +568,7 @@ func (a *appModel) moveToPage(pageID page.PageID) tea.Cmd {
 		// For now we don't move to any page if the agent is busy
 		return util.ReportWarn("Agent is busy, please wait...")
 	}
+
 	var cmds []tea.Cmd
 	if _, ok := a.loadedPages[pageID]; !ok {
 		cmd := a.pages[pageID].Init()
@@ -559,6 +609,22 @@ func (a appModel) View() string {
 		)
 	}
 
+	if a.showFilepicker {
+		overlay := a.filepicker.View()
+		row := lipgloss.Height(appView) / 2
+		row -= lipgloss.Height(overlay) / 2
+		col := lipgloss.Width(appView) / 2
+		col -= lipgloss.Width(overlay) / 2
+		appView = layout.PlaceOverlay(
+			col,
+			row,
+			overlay,
+			appView,
+			true,
+		)
+
+	}
+
 	if !a.app.CoderAgent.IsBusy() {
 		a.status.SetHelpWidgetMsg("ctrl+? help")
 	} else {
@@ -704,6 +770,7 @@ func New(app *app.App) tea.Model {
 			page.ChatPage: page.NewChatPage(app),
 			page.LogsPage: page.NewLogsPage(),
 		},
+		filepicker: dialog.NewFilepickerCmp(app),
 	}
 
 	model.RegisterCommand(dialog.Command{