.github/CODEOWNERS 🔗
@@ -1 +1 @@
-*.go @kujtimiihoxha
+*.go @charmbracelet/everyone
Raphael Amorim created
.github/CODEOWNERS | 2
.github/cla-signatures.json | 456
.github/dependabot.yml | 15
.github/labeler.yml | 29
.github/workflows/build.yml | 21
.github/workflows/cla.yml | 6
.github/workflows/dependabot-sync.yml | 17
.github/workflows/labeler.yml | 6
.github/workflows/lint-sync.yml | 6
.github/workflows/lint.yml | 1
.github/workflows/nightly.yml | 2
.github/workflows/release.yml | 1
.github/workflows/schema-update.yml | 6
.goreleaser.yml | 44
CRUSH.md | 6
LICENSE.md | 0
README.md | 354
Taskfile.yaml | 52
crush.json | 4
go.mod | 118
go.sum | 227
internal/app/app.go | 70
internal/app/lsp.go | 96
internal/app/lsp_events.go | 99
internal/cmd/logs.go | 9
internal/cmd/root.go | 53
internal/cmd/update_providers.go | 60
internal/config/config.go | 150
internal/config/init.go | 29
internal/config/load.go | 177
internal/config/load_test.go | 134
internal/config/lsp_defaults_test.go | 35
internal/config/merge_test.go | 2
internal/config/provider.go | 154
internal/config/provider_empty_test.go | 6
internal/config/provider_test.go | 6
internal/csync/maps.go | 12
internal/csync/maps_test.go | 10
internal/csync/slices.go | 6
internal/csync/slices_test.go | 88
internal/csync/versionedmap.go | 51
internal/csync/versionedmap_test.go | 89
internal/db/connect.go | 40
internal/event/all.go | 59
internal/event/event.go | 107
internal/event/identifier.go | 49
internal/event/logger.go | 28
internal/fsext/fileutil.go | 127
internal/fsext/fileutil_test.go | 273
internal/fsext/ignore_test.go | 110
internal/fsext/lookup.go | 141
internal/fsext/lookup_test.go | 483
internal/fsext/ls.go | 262
internal/fsext/ls_test.go | 66
internal/fsext/owner_others.go | 24
internal/fsext/owner_windows.go | 15
internal/home/home.go | 43
internal/home/home_test.go | 26
internal/llm/agent/agent.go | 287
internal/llm/agent/errors.go | 15
internal/llm/agent/event.go | 53
internal/llm/agent/mcp-tools.go | 301
internal/llm/prompt/coder.go | 6
internal/llm/prompt/init.md | 1
internal/llm/prompt/openai.md | 96
internal/llm/prompt/prompt.go | 14
internal/llm/prompt/prompt_test.go | 62
internal/llm/provider/anthropic.go | 40
internal/llm/provider/gemini.go | 71
internal/llm/provider/openai.go | 252
internal/llm/provider/openai_test.go | 80
internal/llm/provider/provider.go | 7
internal/llm/tools/bash.go | 287
internal/llm/tools/bash.md | 161
internal/llm/tools/diagnostics.go | 326
internal/llm/tools/diagnostics.md | 21
internal/llm/tools/download.go | 39
internal/llm/tools/download.md | 34
internal/llm/tools/edit.go | 82
internal/llm/tools/edit.md | 60
internal/llm/tools/fetch.go | 39
internal/llm/tools/fetch.md | 34
internal/llm/tools/glob.go | 49
internal/llm/tools/glob.md | 46
internal/llm/tools/grep.go | 119
internal/llm/tools/grep.md | 54
internal/llm/tools/grep_test.go | 61
internal/llm/tools/ls.go | 44
internal/llm/tools/ls.md | 40
internal/llm/tools/multiedit.go | 67
internal/llm/tools/multiedit.md | 48
internal/llm/tools/rg.go | 4
internal/llm/tools/safe.go | 15
internal/llm/tools/sourcegraph.go | 104
internal/llm/tools/sourcegraph.md | 102
internal/llm/tools/view.go | 49
internal/llm/tools/view.md | 42
internal/llm/tools/write.go | 50
internal/llm/tools/write.md | 38
internal/log/http.go | 4
internal/log/log.go | 3
internal/lsp/client.go | 814
internal/lsp/client_test.go | 57
internal/lsp/handlers.go | 63
internal/lsp/language.go | 2
internal/lsp/methods.go | 554
internal/lsp/protocol.go | 48
internal/lsp/protocol/LICENSE | 27
internal/lsp/protocol/interface.go | 117
internal/lsp/protocol/pattern_interfaces.go | 73
internal/lsp/protocol/tables.go | 30
internal/lsp/protocol/tsdocument-changes.go | 81
internal/lsp/protocol/tsjson.go | 3072
internal/lsp/protocol/tsprotocol.go | 6952
internal/lsp/protocol/uri.go | 229
internal/lsp/rootmarkers_test.go | 37
internal/lsp/transport.go | 275
internal/lsp/util/edit.go | 2
internal/lsp/watcher/ulimit_bsd.go | 25
internal/lsp/watcher/ulimit_darwin.go | 24
internal/lsp/watcher/ulimit_fallback.go | 8
internal/lsp/watcher/ulimit_linux.go | 25
internal/lsp/watcher/ulimit_windows.go | 38
internal/lsp/watcher/watcher.go | 975
internal/permission/permission.go | 10
internal/permission/permission_test.go | 12
internal/session/session.go | 3
internal/shell/command_block_test.go | 286
internal/shell/coreutils.go | 59
internal/shell/shell.go | 64
internal/shell/shell_test.go | 4
internal/tui/components/anim/example/main.go | 90
internal/tui/components/chat/chat.go | 254
internal/tui/components/chat/editor/editor.go | 74
internal/tui/components/chat/editor/keys.go | 2
internal/tui/components/chat/header/header.go | 104
internal/tui/components/chat/messages/messages.go | 26
internal/tui/components/chat/messages/tool.go | 15
internal/tui/components/chat/queue.go | 28
internal/tui/components/chat/sidebar/sidebar.go | 508
internal/tui/components/chat/splash/keys.go | 2
internal/tui/components/chat/splash/splash.go | 104
internal/tui/components/completions/keys.go | 2
internal/tui/components/core/core.go | 39
internal/tui/components/core/status_test.go | 3
internal/tui/components/core/testdata/TestStatus/AllFieldsWithExtraContent.golden | 2
internal/tui/components/core/testdata/TestStatus/Default.golden | 2
internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden | 2
internal/tui/components/core/testdata/TestStatus/LongDescription.golden | 2
internal/tui/components/core/testdata/TestStatus/NarrowWidth.golden | 2
internal/tui/components/core/testdata/TestStatus/VeryNarrowWidth.golden | 2
internal/tui/components/core/testdata/TestStatus/WithColors.golden | 2
internal/tui/components/core/testdata/TestStatus/WithCustomIcon.golden | 2
internal/tui/components/core/testdata/TestStatus/WithExtraContent.golden | 2
internal/tui/components/core/testdata/TestStatusTruncation/Width20.golden | 2
internal/tui/components/core/testdata/TestStatusTruncation/Width30.golden | 2
internal/tui/components/core/testdata/TestStatusTruncation/Width40.golden | 2
internal/tui/components/core/testdata/TestStatusTruncation/Width50.golden | 2
internal/tui/components/core/testdata/TestStatusTruncation/Width60.golden | 2
internal/tui/components/dialogs/commands/commands.go | 79
internal/tui/components/dialogs/commands/keys.go | 2
internal/tui/components/dialogs/commands/loader.go | 5
internal/tui/components/dialogs/compact/compact.go | 15
internal/tui/components/dialogs/compact/keys.go | 2
internal/tui/components/dialogs/filepicker/filepicker.go | 50
internal/tui/components/dialogs/filepicker/keys.go | 2
internal/tui/components/dialogs/keys.go | 2
internal/tui/components/dialogs/models/apikey.go | 4
internal/tui/components/dialogs/models/keys.go | 6
internal/tui/components/dialogs/models/list.go | 20
internal/tui/components/dialogs/models/models.go | 36
internal/tui/components/dialogs/permissions/keys.go | 2
internal/tui/components/dialogs/permissions/permissions.go | 127
internal/tui/components/dialogs/quit/keys.go | 2
internal/tui/components/dialogs/quit/quit.go | 6
internal/tui/components/dialogs/reasoning/reasoning.go | 268
internal/tui/components/dialogs/sessions/keys.go | 6
internal/tui/components/dialogs/sessions/sessions.go | 2
internal/tui/components/files/files.go | 146
internal/tui/components/lsp/lsp.go | 162
internal/tui/components/mcp/mcp.go | 126
internal/tui/exp/diffview/diffview.go | 6
internal/tui/exp/diffview/split.go | 2
internal/tui/exp/list/filterable.go | 10
internal/tui/exp/list/filterable_group.go | 88
internal/tui/exp/list/items.go | 4
internal/tui/exp/list/list.go | 479
internal/tui/page/chat/chat.go | 147
internal/tui/page/chat/keys.go | 2
internal/tui/styles/charmtone.go | 24
internal/tui/styles/icons.go | 23
internal/tui/styles/theme.go | 82
internal/tui/tui.go | 176
main.go | 2
schema.json | 106
scripts/run-labeler.sh | 8
196 files changed, 8,959 insertions(+), 16,384 deletions(-)
@@ -1 +1 @@
-*.go @kujtimiihoxha
+*.go @charmbracelet/everyone
@@ -223,6 +223,462 @@
"created_at": "2025-08-04T14:01:30Z",
"repoId": 987670088,
"pullRequestNo": 546
+ },
+ {
+ "name": "zloeber",
+ "id": 4702624,
+ "comment_id": 3152513500,
+ "created_at": "2025-08-04T21:55:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 564
+ },
+ {
+ "name": "nelsenm2",
+ "id": 197524521,
+ "comment_id": 3152872109,
+ "created_at": "2025-08-05T00:24:50Z",
+ "repoId": 987670088,
+ "pullRequestNo": 569
+ },
+ {
+ "name": "mohseenrm",
+ "id": 10768371,
+ "comment_id": 3153159347,
+ "created_at": "2025-08-05T03:39:12Z",
+ "repoId": 987670088,
+ "pullRequestNo": 574
+ },
+ {
+ "name": "0xWelt",
+ "id": 49543594,
+ "comment_id": 3157331134,
+ "created_at": "2025-08-06T04:07:52Z",
+ "repoId": 987670088,
+ "pullRequestNo": 584
+ },
+ {
+ "name": "kslamph",
+ "id": 15257433,
+ "comment_id": 3157402768,
+ "created_at": "2025-08-06T04:56:35Z",
+ "repoId": 987670088,
+ "pullRequestNo": 585
+ },
+ {
+ "name": "kslamph",
+ "id": 15257433,
+ "comment_id": 3157429309,
+ "created_at": "2025-08-06T05:14:23Z",
+ "repoId": 987670088,
+ "pullRequestNo": 585
+ },
+ {
+ "name": "Sunsvea",
+ "id": 14066471,
+ "comment_id": 3159014183,
+ "created_at": "2025-08-06T10:04:39Z",
+ "repoId": 987670088,
+ "pullRequestNo": 593
+ },
+ {
+ "name": "taciturnaxolotl",
+ "id": 92754843,
+ "comment_id": 3160702345,
+ "created_at": "2025-08-06T15:48:47Z",
+ "repoId": 987670088,
+ "pullRequestNo": 606
+ },
+ {
+ "name": "bashbunni",
+ "id": 15822994,
+ "comment_id": 3160746002,
+ "created_at": "2025-08-06T16:02:06Z",
+ "repoId": 987670088,
+ "pullRequestNo": 187
+ },
+ {
+ "name": "edafonseca",
+ "id": 3027921,
+ "comment_id": 3161714270,
+ "created_at": "2025-08-06T21:41:34Z",
+ "repoId": 987670088,
+ "pullRequestNo": 618
+ },
+ {
+ "name": "smores56",
+ "id": 22140449,
+ "comment_id": 3161853491,
+ "created_at": "2025-08-06T22:51:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 620
+ },
+ {
+ "name": "danielsz",
+ "id": 859131,
+ "comment_id": 3163327861,
+ "created_at": "2025-08-07T09:39:26Z",
+ "repoId": 987670088,
+ "pullRequestNo": 628
+ },
+ {
+ "name": "pavelzw",
+ "id": 29506042,
+ "comment_id": 3164728921,
+ "created_at": "2025-08-07T15:33:23Z",
+ "repoId": 987670088,
+ "pullRequestNo": 631
+ },
+ {
+ "name": "CyrusZei",
+ "id": 5053903,
+ "comment_id": 3165875211,
+ "created_at": "2025-08-07T21:45:30Z",
+ "repoId": 987670088,
+ "pullRequestNo": 638
+ },
+ {
+ "name": "maxjustus",
+ "id": 24899,
+ "comment_id": 3166599830,
+ "created_at": "2025-08-08T05:12:37Z",
+ "repoId": 987670088,
+ "pullRequestNo": 645
+ },
+ {
+ "name": "akaytatsu",
+ "id": 2520440,
+ "comment_id": 3167400968,
+ "created_at": "2025-08-08T10:32:51Z",
+ "repoId": 987670088,
+ "pullRequestNo": 647
+ },
+ {
+ "name": "theguy000",
+ "id": 52881001,
+ "comment_id": 3167442675,
+ "created_at": "2025-08-08T10:46:15Z",
+ "repoId": 987670088,
+ "pullRequestNo": 648
+ },
+ {
+ "name": "pwnintended",
+ "id": 92651319,
+ "comment_id": 3169309904,
+ "created_at": "2025-08-08T21:08:45Z",
+ "repoId": 987670088,
+ "pullRequestNo": 668
+ },
+ {
+ "name": "tazjin",
+ "id": 1552853,
+ "comment_id": 3169412729,
+ "created_at": "2025-08-08T22:05:40Z",
+ "repoId": 987670088,
+ "pullRequestNo": 672
+ },
+ {
+ "name": "liznear",
+ "id": 160093988,
+ "comment_id": 3170486027,
+ "created_at": "2025-08-09T08:15:27Z",
+ "repoId": 987670088,
+ "pullRequestNo": 681
+ },
+ {
+ "name": "jamestrew",
+ "id": 66286082,
+ "comment_id": 3171975481,
+ "created_at": "2025-08-09T17:55:31Z",
+ "repoId": 987670088,
+ "pullRequestNo": 689
+ },
+ {
+ "name": "wwwjfy",
+ "id": 126527,
+ "comment_id": 3172676799,
+ "created_at": "2025-08-10T14:35:04Z",
+ "repoId": 987670088,
+ "pullRequestNo": 702
+ },
+ {
+ "name": "orospakr",
+ "id": 16714,
+ "comment_id": 3181859171,
+ "created_at": "2025-08-13T01:01:30Z",
+ "repoId": 987670088,
+ "pullRequestNo": 746
+ },
+ {
+ "name": "samiulsami",
+ "id": 33352407,
+ "comment_id": 3189231059,
+ "created_at": "2025-08-14T17:12:20Z",
+ "repoId": 987670088,
+ "pullRequestNo": 779
+ },
+ {
+ "name": "neomantra",
+ "id": 26842,
+ "comment_id": 3189674073,
+ "created_at": "2025-08-14T19:42:53Z",
+ "repoId": 987670088,
+ "pullRequestNo": 752
+ },
+ {
+ "name": "uri",
+ "id": 676443,
+ "comment_id": 3190313833,
+ "created_at": "2025-08-15T00:58:11Z",
+ "repoId": 987670088,
+ "pullRequestNo": 782
+ },
+ {
+ "name": "lpmitchell",
+ "id": 5081038,
+ "comment_id": 3191789654,
+ "created_at": "2025-08-15T15:23:59Z",
+ "repoId": 987670088,
+ "pullRequestNo": 790
+ },
+ {
+ "name": "marcosktsz",
+ "id": 58952492,
+ "comment_id": 3193569650,
+ "created_at": "2025-08-16T10:29:10Z",
+ "repoId": 987670088,
+ "pullRequestNo": 798
+ },
+ {
+ "name": "sainadh-d",
+ "id": 15155045,
+ "comment_id": 3193680786,
+ "created_at": "2025-08-16T13:49:03Z",
+ "repoId": 987670088,
+ "pullRequestNo": 803
+ },
+ {
+ "name": "ericcoleta",
+ "id": 42539971,
+ "comment_id": 3201263288,
+ "created_at": "2025-08-19T15:38:48Z",
+ "repoId": 987670088,
+ "pullRequestNo": 835
+ },
+ {
+ "name": "linw1995",
+ "id": 13523027,
+ "comment_id": 3209540243,
+ "created_at": "2025-08-21T08:29:20Z",
+ "repoId": 987670088,
+ "pullRequestNo": 850
+ },
+ {
+ "name": "khareyash05",
+ "id": 60147732,
+ "comment_id": 3213528951,
+ "created_at": "2025-08-22T08:26:23Z",
+ "repoId": 987670088,
+ "pullRequestNo": 860
+ },
+ {
+ "name": "mpj",
+ "id": 17815,
+ "comment_id": 3214796891,
+ "created_at": "2025-08-22T15:35:12Z",
+ "repoId": 987670088,
+ "pullRequestNo": 865
+ },
+ {
+ "name": "xPrimeTime",
+ "id": 101987372,
+ "comment_id": 3217970106,
+ "created_at": "2025-08-24T09:28:11Z",
+ "repoId": 987670088,
+ "pullRequestNo": 873
+ },
+ {
+ "name": "mercmobily",
+ "id": 2128734,
+ "comment_id": 3219992206,
+ "created_at": "2025-08-25T12:00:54Z",
+ "repoId": 987670088,
+ "pullRequestNo": 882
+ },
+ {
+ "name": "xhos",
+ "id": 60789741,
+ "comment_id": 3221119175,
+ "created_at": "2025-08-25T17:26:06Z",
+ "repoId": 987670088,
+ "pullRequestNo": 892
+ },
+ {
+ "name": "henrebotha",
+ "id": 5593874,
+ "comment_id": 3223793535,
+ "created_at": "2025-08-26T11:34:17Z",
+ "repoId": 987670088,
+ "pullRequestNo": 900
+ },
+ {
+ "name": "negz",
+ "id": 1049349,
+ "comment_id": 3232462357,
+ "created_at": "2025-08-28T08:23:46Z",
+ "repoId": 987670088,
+ "pullRequestNo": 914
+ },
+ {
+ "name": "undo76",
+ "id": 1415667,
+ "comment_id": 3235052544,
+ "created_at": "2025-08-28T21:53:00Z",
+ "repoId": 987670088,
+ "pullRequestNo": 921
+ },
+ {
+ "name": "andersonjoseph",
+ "id": 22438127,
+ "comment_id": 3237655829,
+ "created_at": "2025-08-29T16:54:00Z",
+ "repoId": 987670088,
+ "pullRequestNo": 926
+ },
+ {
+ "name": "tisDDM",
+ "id": 77615100,
+ "comment_id": 3240239275,
+ "created_at": "2025-08-31T15:58:52Z",
+ "repoId": 987670088,
+ "pullRequestNo": 944
+ },
+ {
+ "name": "shaitanu",
+ "id": 117723026,
+ "comment_id": 3259367914,
+ "created_at": "2025-09-05T18:29:58Z",
+ "repoId": 987670088,
+ "pullRequestNo": 968
+ },
+ {
+ "name": "vadiminshakov",
+ "id": 26391516,
+ "comment_id": 3267808993,
+ "created_at": "2025-09-08T20:13:33Z",
+ "repoId": 987670088,
+ "pullRequestNo": 998
+ },
+ {
+ "name": "adriens",
+ "id": 5235127,
+ "comment_id": 3270041072,
+ "created_at": "2025-09-09T10:30:49Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1000
+ },
+ {
+ "name": "SubodhSenpai",
+ "id": 116248387,
+ "comment_id": 3275351636,
+ "created_at": "2025-09-10T14:55:25Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1008
+ },
+ {
+ "name": "tauraamui",
+ "id": 3159648,
+ "comment_id": 3279503814,
+ "created_at": "2025-09-11T09:31:52Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1016
+ },
+ {
+ "name": "kim0",
+ "id": 59667,
+ "comment_id": 3282025022,
+ "created_at": "2025-09-11T17:37:57Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1017
+ },
+ {
+ "name": "Amolith",
+ "id": 29460675,
+ "comment_id": 3285628360,
+ "created_at": "2025-09-12T15:00:12Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1025
+ },
+ {
+ "name": "WhiskeyJack96",
+ "id": 10688621,
+ "comment_id": 3290164209,
+ "created_at": "2025-09-15T01:16:08Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1037
+ },
+ {
+ "name": "Grin1024",
+ "id": 34613592,
+ "comment_id": 3290570050,
+ "created_at": "2025-09-15T05:42:29Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1042
+ },
+ {
+ "name": "dvcrn",
+ "id": 688326,
+ "comment_id": 3296702457,
+ "created_at": "2025-09-16T08:48:17Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1056
+ },
+ {
+ "name": "khushveer007",
+ "id": 122660325,
+ "comment_id": 3301369568,
+ "created_at": "2025-09-17T05:32:53Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1065
+ },
+ {
+ "name": "msteinert",
+ "id": 202852,
+ "comment_id": 3312218015,
+ "created_at": "2025-09-19T13:31:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1084
+ },
+ {
+ "name": "zoete",
+ "id": 33318916,
+ "comment_id": 3314945939,
+ "created_at": "2025-09-20T12:37:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1095
+ },
+ {
+ "name": "Kaneki-x",
+ "id": 6857108,
+ "comment_id": 3338743039,
+ "created_at": "2025-09-26T13:30:16Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1135
+ },
+ {
+ "name": "maxious",
+ "id": 81432,
+ "comment_id": 3341700737,
+ "created_at": "2025-09-27T13:09:22Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1141
+ },
+ {
+ "name": "Wangch29",
+ "id": 115294077,
+ "comment_id": 3344526018,
+ "created_at": "2025-09-29T01:19:40Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1148
}
]
}
@@ -13,6 +13,17 @@ updates:
commit-message:
prefix: "chore"
include: "scope"
+ groups:
+ all:
+ patterns:
+ - "*"
+ ignore:
+ - dependency-name: github.com/charmbracelet/bubbletea/v2
+ versions:
+ - v2.0.0-beta1
+ - dependency-name: github.com/charmbracelet/lipgloss/v2
+ versions:
+ - v2.0.0-beta1
- package-ecosystem: "github-actions"
directory: "/"
@@ -26,3 +37,7 @@ updates:
commit-message:
prefix: "chore"
include: "scope"
+ groups:
+ all:
+ patterns:
+ - "*"
@@ -1,7 +1,12 @@
+# NOTE(@andreynering): We should have a single entry per label, because if we
+# have multiple the action would only apply the label if ALL of them match.
+
"area: ci":
- "/^ci: /i"
"area: diff":
- "/diff/i"
+"area: docs":
+ - "/(docs|documentation|readme)/i"
"area: images":
- "/image/i"
"area: lsp":
@@ -10,30 +15,40 @@
- "/mcp/i"
"area: mouse":
- "/(mouse|scroll)/i"
+"area: onboarding":
+ - "/(onboarding)/i"
"area: permissions":
- "/permission/i"
+"area: releasing":
+ - "/(release|releasing|aur|brew|nix|npm|scoop|termux|winget)/i"
"area: session":
- "/session/i"
+"area: shell":
+ - "/shell/i"
"area: themes":
- "/theme/i"
"area: tools":
- "/tool/i"
+"os: android":
+ - "/(android|termux)/i"
"os: linux":
- - "/linux/i"
+ - "/(linux|fedora|debian|ubuntu)/i"
"os: macos":
- - "/(macos|osx)/i"
+ - "/(macos|osx|darwin)/i"
"os: windows":
- - "/windows/i"
+ - "/(windows|win10|win11|win32|wsl|scoop|winget)/i"
+"panic / crash":
+ - "/(panic|crash|segfault)/i"
"provider: anthropic claude":
- "/(anthropic|claude)/i"
"provider: aws bedrock":
- "/(aws|bedrock)/i"
+"provider: azure":
+ - "/azure/i"
"provider: google gemini":
- "/gemini/i"
"provider: google vertex":
- "/vertex/i"
-"provider: grok":
- - "/grok/i"
"provider: kimi":
- "/kimi/i"
"provider: ollama":
@@ -44,3 +59,7 @@
- "/openrouter/i"
"provider: qwen":
- "/qwen/i"
+"provider: xai grok":
+ - "/(xai|x\\.ai|grok)/i"
+"security":
+ - "/(security|vulnerability|exploit)/i"
@@ -9,24 +9,3 @@ jobs:
go-version-file: ./go.mod
secrets:
gh_pat: "${{ secrets.PERSONAL_ACCESS_TOKEN }}"
-
- dependabot:
- needs: [build]
- runs-on: ubuntu-latest
- permissions:
- pull-requests: write
- contents: write
- if: ${{ github.actor == 'dependabot[bot]' && github.event_name == 'pull_request'}}
- steps:
- - id: metadata
- uses: dependabot/fetch-metadata@v2
- with:
- github-token: "${{ secrets.GITHUB_TOKEN }}"
- - run: |
- echo "Approving..."
- gh pr review --approve "$PR_URL"
- echo "Merging..."
- gh pr merge --squash --auto "$PR_URL"
- env:
- PR_URL: ${{github.event.pull_request.html_url}}
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
@@ -22,15 +22,15 @@ jobs:
github.event.comment.body == 'recheck' ||
github.event.comment.body == 'I have read the Contributor License Agreement (CLA) and hereby sign the CLA.' ||
github.event_name == 'pull_request_target'
- uses: contributor-assistant/github-action@v2.6.1
+ uses: contributor-assistant/github-action@ca4a40a7d1004f18d9960b404b97e5f30a505a08 # v2.6.1
env:
- GITHUB_TOKEN: ${{ secrets.CRUSH_CLA_BOT }}
+ GITHUB_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
with:
path-to-signatures: ".github/cla-signatures.json"
path-to-document: "https://github.com/charmbracelet/crush/blob/main/CLA.md"
branch: "main"
allowlist: charmcli,charmcrush,dependabot[bot]
custom-pr-sign-comment: "I have read the Contributor License Agreement (CLA) and hereby sign the CLA."
- custom-notsigned-precomment: "Thank you for your submission. We really appreciate it! Like many open-source projects, we ask that you sign our [Contributor License Agreement](https://github.com/charmbracelet/crush/blob/main/CLA.md) before we can accept your contribution. You can sign the CLA by just posting a Pull Request comment same as the below format."
+ custom-notsigned-prcomment: "Thank you for your submission. We really appreciate it! Like many open-source projects, we ask that you sign our [Contributor License Agreement](https://github.com/charmbracelet/crush/blob/main/CLA.md) before we can accept your contribution. You can sign the CLA by just posting a Pull Request comment same as the below format."
lock-pullrequest-aftermerge: false
signed-commit-message: "chore(legal): @$contributorName has signed the CLA"
@@ -1,17 +0,0 @@
-name: dependabot-sync
-on:
- schedule:
- - cron: "0 0 * * 0" # every Sunday at midnight
- workflow_dispatch: # allows manual triggering
-
-permissions:
- contents: write
- pull-requests: write
-
-jobs:
- dependabot-sync:
- uses: charmbracelet/meta/.github/workflows/dependabot-sync.yml@main
- with:
- repo_name: ${{ github.event.repository.name }}
- secrets:
- gh_token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
@@ -1,4 +1,4 @@
-name: Issue Labeler
+name: labeler
on:
issues:
@@ -8,7 +8,7 @@ on:
workflow_dispatch:
inputs:
issue-number:
- description: Issue Number
+ description: "Issue/PR #"
required: true
type: string
@@ -20,7 +20,7 @@ jobs:
triage:
runs-on: ubuntu-latest
steps:
- - uses: github/issue-labeler@v3.4
+ - uses: github/issue-labeler@c1b0f9f52a63158c4adc09425e858e87b32e9685 # v3.4
with:
configuration-path: .github/labeler.yml
enable-versioned-regex: 0
@@ -1,8 +1,8 @@
name: lint-sync
on:
- schedule:
- # every Sunday at midnight
- - cron: "0 0 * * 0"
+ # schedule:
+ # # every Sunday at midnight
+ # - cron: "0 0 * * 0"
workflow_dispatch: # allows manual triggering
permissions:
@@ -8,4 +8,5 @@ jobs:
uses: charmbracelet/meta/.github/workflows/lint.yml@main
with:
golangci_path: .golangci.yml
+ golangci_version: v2.4
timeout: 10m
@@ -11,7 +11,7 @@ jobs:
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 1
- id: check
@@ -15,6 +15,7 @@ jobs:
goreleaser:
uses: charmbracelet/meta/.github/workflows/goreleaser.yml@main
with:
+ go_version: "1.25"
macos_sign_entitlements: "./.github/entitlements.plist"
secrets:
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -10,10 +10,10 @@ jobs:
update-schema:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
- token: ${{ secrets.CRUSH_CLA_BOT }}
- - uses: actions/setup-go@v5
+ token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
+ - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
go-version-file: go.mod
- run: go run . schema > ./schema.json
@@ -13,6 +13,7 @@ metadata:
license: "FSL-1.1-MIT"
homepage: "https://charm.sh/crush"
description: "A powerful terminal-based AI assistant for developers, providing intelligent coding assistance directly in your terminal."
+ full_description: "A powerful terminal-based AI assistant for developers, providing intelligent coding assistance directly in your terminal."
maintainers:
- "kujtimiihoxha <kujtim@charm.sh>"
- "caarlos0 <carlos@charm.sh>" # for aur
@@ -43,6 +44,7 @@ before:
builds:
- env:
- CGO_ENABLED=0
+ - GOEXPERIMENT=greenteagc
goos:
- linux
- darwin
@@ -50,6 +52,7 @@ builds:
- freebsd
- openbsd
- netbsd
+ - android
goarch:
- amd64
- arm64
@@ -57,8 +60,19 @@ builds:
- arm
goarm:
- "7"
+ ignore:
+ - goos: android
+ goarch: amd64
+ - goos: android
+ goarch: arm
+ - goos: android
+ goarch: "386"
+ - goos: windows
+ goarch: arm
ldflags:
- -s -w -X github.com/charmbracelet/crush/internal/version.Version={{.Version}}
+ flags:
+ - -trimpath
archives:
- name_template: >-
@@ -85,6 +99,9 @@ checksum:
aur_sources:
- private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/crush.git"
+ commit_author:
+ name: "Charm"
+ email: "charmcli@users.noreply.github.com"
makedepends:
- go
- git
@@ -125,6 +142,9 @@ aur_sources:
aurs:
- private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/crush-bin.git"
+ commit_author:
+ name: "Charm"
+ email: "charmcli@users.noreply.github.com"
provides:
- crush
conflicts:
@@ -159,6 +179,9 @@ brews:
owner: charmbracelet
name: homebrew-tap
token: "{{ .Env.HOMEBREW_TAP_GITHUB_TOKEN }}"
+ commit_author:
+ name: "Charm"
+ email: "charmcli@users.noreply.github.com"
goarm: 7
extra_install: |-
bash_completion.install "completions/{{ .ProjectName }}.bash" => "{{ .ProjectName }}"
@@ -171,6 +194,9 @@ scoops:
owner: charmbracelet
name: scoop-bucket
token: "{{ .Env.HOMEBREW_TAP_GITHUB_TOKEN }}"
+ commit_author:
+ name: "Charm"
+ email: "charmcli@users.noreply.github.com"
npms:
- name: "@charmland/crush"
@@ -184,16 +210,17 @@ nfpms:
- deb
- rpm
- archlinux
+ - termux.deb
file_name_template: "{{ .ConventionalFileName }}"
contents:
- src: ./completions/crush.bash
- dst: /etc/bash_completion.d/crush
+ dst: '{{ if eq .Format "termux.deb" }}/data/data/com.termux/files/usr{{ end }}/etc/bash_completion.d/crush'
- src: ./completions/crush.fish
- dst: /usr/share/fish/vendor_completions.d/crush.fish
+ dst: '{{ if eq .Format "termux.deb" }}/data/data/com.termux/files{{ end }}/usr/share/fish/vendor_completions.d/crush.fish'
- src: ./completions/crush.zsh
- dst: /usr/share/zsh/site-functions/_crush
+ dst: '{{ if eq .Format "termux.deb" }}/data/data/com.termux/files{{ end }}/usr/share/zsh/site-functions/_crush'
- src: ./manpages/crush.1.gz
- dst: /usr/share/man/man1/crush.1.gz
+ dst: '{{ if eq .Format "termux.deb" }}/data/data/com.termux/files{{ end }}/usr/share/man/man1/crush.1.gz'
rpm:
signature:
key_file: '{{ if ne (index .Env "GPG_KEY_PATH") "" }}{{ .Env.GPG_KEY_PATH }}{{ else }}{{ end }}'
@@ -226,6 +253,9 @@ nix:
owner: "charmbracelet"
name: nur
token: "{{ .Env.HOMEBREW_TAP_GITHUB_TOKEN }}"
+ commit_author:
+ name: "Charm"
+ email: "charmcli@users.noreply.github.com"
license: fsl11Mit
extra_install: |-
installManPage ./manpages/crush.1.gz
@@ -234,6 +264,12 @@ nix:
winget:
- publisher: charmbracelet
copyright: Charmbracelet, Inc
+ publisher_url: https://charm.land
+ release_notes_url: "https://github.com/charmbracelet/crush/releases/tag/{{.Tag}}"
+ license_url: https://github.com/charmbracelet/crush/blob/main/LICENSE.md
+ commit_author:
+ name: "Charm"
+ email: "charmcli@users.noreply.github.com"
repository:
owner: "charmbracelet"
name: winget-pkgs
@@ -4,6 +4,8 @@
- **Build**: `go build .` or `go run .`
- **Test**: `task test` or `go test ./...` (run single test: `go test ./internal/llm/prompt -run TestGetContextFromPaths`)
+- **Update Golden Files**: `go test ./... -update` (regenerates .golden files when test output changes)
+ - Update specific package: `go test ./internal/tui/components/core -update` (in this case, we're updating "core")
- **Lint**: `task lint-fix`
- **Format**: `task fmt` (gofumpt -w .)
- **Dev**: `task dev` (runs with profiling enabled)
@@ -57,3 +59,7 @@ func TestYourFunction(t *testing.T) {
- If `goimports` is not available, use `gofmt`.
- You can also use `task fmt` to run `gofumpt -w .` on the entire project,
as long as `gofumpt` is on the `PATH`.
+
+## Committing
+
+- ALWAYS use semantic commits (`fix:`, `feat:`, `chore:`, `refactor:`, `docs:`, `sec:`, etc).
@@ -3,10 +3,11 @@
<p align="center">
<a href="https://stuff.charm.sh/crush/charm-crush.png"><img width="450" alt="Charm Crush Logo" src="https://github.com/user-attachments/assets/adc1a6f4-b284-4603-836c-59038caa2e8b" /></a><br />
<a href="https://github.com/charmbracelet/crush/releases"><img src="https://img.shields.io/github/release/charmbracelet/crush" alt="Latest Release"></a>
- <a href="https://github.com/charmbracelet/crush/actions"><img src="https://github.com/charmbracelet/crush/workflows/build/badge.svg" alt="Build Status"></a>
+ <a href="https://github.com/charmbracelet/crush/actions"><img src="https://github.com/charmbracelet/crush/actions/workflows/build.yml/badge.svg" alt="Build Status"></a>
</p>
<p align="center">Your new coding bestie, now available in your favourite terminal.<br />Your tools, your code, and your workflows, wired into your LLM of choice.</p>
+<p align="center">你的新编程伙伴,现在就在你最爱的终端中。<br />你的工具、代码和工作流,都与您选择的 LLM 模型紧密相连。</p>
<p align="center"><img width="800" alt="Crush Demo" src="https://github.com/user-attachments/assets/58280caf-851b-470a-b6f7-d5c4ea8a1968" /></p>
@@ -37,9 +38,20 @@ yay -S crush-bin
nix run github:numtide/nix-ai-tools#crush
```
+Windows users:
+
+```bash
+# Winget
+winget install charmbracelet.crush
+
+# Scoop
+scoop bucket add charm https://github.com/charmbracelet/scoop-bucket.git
+scoop install crush
+```
+
<details>
<summary><strong>Nix (NUR)</strong></summary>
-
+
Crush is available via [NUR](https://github.com/nix-community/NUR) in `nur.repos.charmbracelet.crush`.
You can also try out Crush via `nix-shell`:
@@ -53,6 +65,61 @@ nix-channel --update
nix-shell -p '(import <nur> { pkgs = import <nixpkgs> {}; }).repos.charmbracelet.crush'
```
+### NixOS & Home Manager Module Usage via NUR
+
+Crush provides NixOS and Home Manager modules via NUR.
+You can use these modules directly in your flake by importing them from NUR. Since it auto detects whether its a home manager or nixos context you can use the import the exact same way :)
+
+```nix
+{
+ inputs = {
+ nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
+ nur.url = "github:nix-community/NUR";
+ };
+
+ outputs = { self, nixpkgs, nur, ... }: {
+ nixosConfigurations.your-hostname = nixpkgs.lib.nixosSystem {
+ system = "x86_64-linux";
+ modules = [
+ nur.modules.nixos.default
+ nur.repos.charmbracelet.modules.crush
+ {
+ programs.crush = {
+ enable = true;
+ settings = {
+ providers = {
+ openai = {
+ id = "openai";
+ name = "OpenAI";
+ base_url = "https://api.openai.com/v1";
+ type = "openai";
+ api_key = "sk-fake123456789abcdef...";
+ models = [
+ {
+ id = "gpt-4";
+ name = "GPT-4";
+ }
+ ];
+ };
+ };
+ lsp = {
+ go = { command = "gopls"; enabled = true; };
+ nix = { command = "nil"; enabled = true; };
+ };
+ options = {
+ context_paths = [ "/etc/nixos/configuration.nix" ];
+ tui = { compact_mode = true; };
+ debug = false;
+ };
+ };
+ };
+ }
+ ];
+ };
+ };
+}
+```
+
</details>
<details>
@@ -108,27 +175,31 @@ Crush. You'll be prompted to enter your API key.
That said, you can also set environment variables for preferred providers.
-| Environment Variable | Provider |
-| -------------------------- | -------------------------------------------------- |
-| `ANTHROPIC_API_KEY` | Anthropic |
-| `OPENAI_API_KEY` | OpenAI |
-| `OPENROUTER_API_KEY` | OpenRouter |
-| `GEMINI_API_KEY` | Google Gemini |
-| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
-| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
-| `GROQ_API_KEY` | Groq |
-| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
-| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
-| `AWS_REGION` | AWS Bedrock (Claude) |
-| `AZURE_OPENAI_ENDPOINT` | Azure OpenAI models |
-| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
-| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
+| Environment Variable | Provider |
+| --------------------------- | -------------------------------------------------- |
+| `ANTHROPIC_API_KEY` | Anthropic |
+| `OPENAI_API_KEY` | OpenAI |
+| `OPENROUTER_API_KEY` | OpenRouter |
+| `GEMINI_API_KEY` | Google Gemini |
+| `CEREBRAS_API_KEY` | Cerebras |
+| `HF_TOKEN` | Huggingface Inference |
+| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
+| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
+| `GROQ_API_KEY` | Groq |
+| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
+| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
+| `AWS_REGION` | AWS Bedrock (Claude) |
+| `AWS_PROFILE` | Custom AWS Profile |
+| `AWS_REGION` | AWS Region |
+| `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI models |
+| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
+| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
### By the Way
Is there a provider you’d like to see in Crush? Is there an existing model that needs an update?
-Crush’s default model listing is managed in [Catwalk](https://github.com/charmbracelet/catwalk), an community-supported, open source repository of Crush-compatible models, and you’re welcome to contribute.
+Crush’s default model listing is managed in [Catwalk](https://github.com/charmbracelet/catwalk), a community-supported, open source repository of Crush-compatible models, and you’re welcome to contribute.
<a href="https://github.com/charmbracelet/catwalk"><img width="174" height="174" alt="Catwalk Badge" src="https://github.com/user-attachments/assets/95b49515-fe82-4409-b10d-5beb0873787d" /></a>
@@ -138,19 +209,29 @@ Crush runs great with no configuration. That said, if you do need or want to
customize Crush, configuration can be added either local to the project itself,
or globally, with the following priority:
-1. `./.crush.json`
-2. `./crush.json`
-3. `$HOME/.config/crush/crush.json`
+1. `.crush.json`
+2. `crush.json`
+3. `$HOME/.config/crush/crush.json` (Windows: `%USERPROFILE%\AppData\Local\crush\crush.json`)
Configuration itself is stored as a JSON object:
```json
{
- "this-setting": { }
- "that-setting": { }
+ "this-setting": { "this": "that" },
+ "that-setting": ["ceci", "cela"]
}
```
+As an additional note, Crush also stores ephemeral data, such as application state, in one additional location:
+
+```bash
+# Unix
+$HOME/.local/share/crush/crush.json
+
+# Windows
+%LOCALAPPDATA%\crush\crush.json
+```
+
### LSPs
Crush can use LSPs for additional context to help inform its decisions, just
@@ -161,7 +242,10 @@ like you would. LSPs can be added manually like so:
"$schema": "https://charm.land/crush.json",
"lsp": {
"go": {
- "command": "gopls"
+ "command": "gopls",
+ "env": {
+ "GOTOOLCHAIN": "go1.24.5"
+ }
},
"typescript": {
"command": "typescript-language-server",
@@ -189,6 +273,8 @@ using `$(echo $VAR)` syntax.
"type": "stdio",
"command": "node",
"args": ["/path/to/mcp-server.js"],
+ "timeout": 120,
+ "disabled": false,
"env": {
"NODE_ENV": "production"
}
@@ -196,6 +282,8 @@ using `$(echo $VAR)` syntax.
"github": {
"type": "http",
"url": "https://example.com/mcp/",
+ "timeout": 120,
+ "disabled": false,
"headers": {
"Authorization": "$(echo Bearer $EXAMPLE_MCP_TOKEN)"
}
@@ -203,6 +291,8 @@ using `$(echo $VAR)` syntax.
"streaming-service": {
"type": "sse",
"url": "https://example.com/mcp/sse",
+ "timeout": 120,
+ "disabled": false,
"headers": {
"API-Key": "$(echo $API_KEY)"
}
@@ -221,10 +311,10 @@ control but don't want Crush to consider when providing context.
The `.crushignore` file uses the same syntax as `.gitignore` and can be placed
in the root of your project or in subdirectories.
-### Whitelisting Tools
+### Allowing Tools
By default, Crush will ask you for permission before running tool calls. If
-you'd like, you can whitelist tools to be executed without prompting you for
+you'd like, you can allow tools to be executed without prompting you for
permissions. Use this with care.
```json
@@ -245,6 +335,74 @@ permissions. Use this with care.
You can also skip all permission prompts entirely by running Crush with the
`--yolo` flag. Be very, very careful with this feature.
+### Attribution Settings
+
+By default, Crush adds attribution information to Git commits and pull requests
+it creates. You can customize this behavior with the `attribution` option:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "attribution": {
+ "co_authored_by": true,
+ "generated_with": true
+ }
+ }
+}
+```
+
+- `co_authored_by`: When true (default), adds `Co-Authored-By: Crush <crush@charm.land>` to commit messages
+- `generated_with`: When true (default), adds `💘 Generated with Crush` line to commit messages and PR descriptions
+
+### Local Models
+
+Local models can also be configured via OpenAI-compatible API. Here are two common examples:
+
+#### Ollama
+
+```json
+{
+ "providers": {
+ "ollama": {
+ "name": "Ollama",
+ "base_url": "http://localhost:11434/v1/",
+ "type": "openai",
+ "models": [
+ {
+ "name": "Qwen 3 30B",
+ "id": "qwen3:30b",
+ "context_window": 256000,
+ "default_max_tokens": 20000
+ }
+ ]
+ }
+ }
+}
+```
+
+#### LM Studio
+
+```json
+{
+ "providers": {
+ "lmstudio": {
+ "name": "LM Studio",
+ "base_url": "http://localhost:1234/v1/",
+ "type": "openai",
+ "models": [
+ {
+ "name": "Qwen 3 30B",
+ "id": "qwen/qwen3-30b-a3b-2507",
+ "context_window": 256000,
+ "default_max_tokens": 20000
+ }
+ ]
+ }
+ }
+}
+```
+
### Custom Providers
Crush supports custom provider configurations for both OpenAI-compatible and
@@ -314,6 +472,48 @@ Custom Anthropic-compatible providers follow this format:
}
```
+### Amazon Bedrock
+
+Crush currently supports running Anthropic models through Bedrock, with caching disabled.
+
+- A Bedrock provider will appear once you have AWS configured, i.e. `aws configure`
+- Crush also expects the `AWS_REGION` or `AWS_DEFAULT_REGION` to be set
+- To use a specific AWS profile set `AWS_PROFILE` in your environment, i.e. `AWS_PROFILE=myprofile crush`
+
+### Vertex AI Platform
+
+Vertex AI will appear in the list of available providers when `VERTEXAI_PROJECT` and `VERTEXAI_LOCATION` are set. You will also need to be authenticated:
+
+```bash
+gcloud auth application-default login
+```
+
+To add specific models to the configuration, configure as such:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "providers": {
+ "vertexai": {
+ "models": [
+ {
+ "id": "claude-sonnet-4@20250514",
+ "name": "VertexAI Sonnet 4",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 3.75,
+ "cost_per_1m_out_cached": 0.3,
+ "context_window": 200000,
+ "default_max_tokens": 50000,
+ "can_reason": true,
+ "supports_attachments": true
+ }
+ ]
+ }
+ }
+}
+```
+
## Logging
Sometimes you need to look at logs. Luckily, Crush logs all sorts of
@@ -345,6 +545,105 @@ config:
}
```
+## Provider Auto-Updates
+
+By default, Crush automatically checks for the latest and greatest list of
+providers and models from [Catwalk](https://github.com/charmbracelet/catwalk),
+the open source Crush provider database. This means that when new providers and
+models are available, or when model metadata changes, Crush automatically
+updates your local configuration.
+
+### Disabling automatic provider updates
+
+For those with restricted internet access, or those who prefer to work in
+air-gapped environments, this might not be want you want, and this feature can
+be disabled.
+
+To disable automatic provider updates, set `disable_provider_auto_update` into
+your `crush.json` config:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "disable_provider_auto_update": true
+ }
+}
+```
+
+Or set the `CRUSH_DISABLE_PROVIDER_AUTO_UPDATE` environment variable:
+
+```bash
+export CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1
+```
+
+### Manually updating providers
+
+Manually updating providers is possible with the `crush update-providers`
+command:
+
+```bash
+# Update providers remotely from Catwalk.
+crush update-providers
+
+# Update providers from a custom Catwalk base URL.
+crush update-providers https://example.com/
+
+# Update providers from a local file.
+crush update-providers /path/to/local-providers.json
+
+# Reset providers to the embedded version, embedded at crush at build time.
+crush update-providers embedded
+
+# For more info:
+crush update-providers --help
+```
+
+## Metrics
+
+Crush records pseudonymous usage metrics (tied to a device-specific hash),
+which maintainers rely on to inform development and support priorities. The
+metrics include solely usage metadata; prompts and responses are NEVER
+collected.
+
+Details on exactly what’s collected are in the source code ([here](https://github.com/charmbracelet/crush/tree/main/internal/event)
+and [here](https://github.com/charmbracelet/crush/blob/main/internal/llm/agent/event.go)).
+
+You can opt out of metrics collection at any time by setting the environment
+variable by setting the following in your environment:
+
+```bash
+export CRUSH_DISABLE_METRICS=1
+```
+
+Or by setting the following in your config:
+
+```json
+{
+ "options": {
+ "disable_metrics": true
+ }
+}
+```
+
+Crush also respects the [`DO_NOT_TRACK`](https://consoledonottrack.com)
+convention which can be enabled via `export DO_NOT_TRACK=1`.
+
+## A Note on Claude Max and GitHub Copilot
+
+Crush only supports model providers through official, compliant APIs. We do not
+support or endorse any methods that rely on personal Claude Max and GitHub
+Copilot accounts or OAuth workarounds, which violate Anthropic and
+Microsoft’s Terms of Service.
+
+We’re committed to building sustainable, trusted integrations with model
+providers. If you’re a provider interested in working with us,
+[reach out](mailto:vt100@charm.sh).
+
+## Contributing
+
+See the [contributing guide](https://github.com/charmbracelet/crush?tab=contributing-ov-file#contributing).
+
## Whatcha think?
We’d love to hear your thoughts on this project. Need help? We gotchu. You can find us on:
@@ -353,12 +652,13 @@ We’d love to hear your thoughts on this project. Need help? We gotchu. You can
- [Discord][discord]
- [Slack](https://charm.land/slack)
- [The Fediverse](https://mastodon.social/@charmcli)
+- [Bluesky](https://bsky.app/profile/charm.land)
[discord]: https://charm.land/discord
## License
-[FSL-1.1-MIT](https://github.com/charmbracelet/crush/raw/main/LICENSE)
+[FSL-1.1-MIT](https://github.com/charmbracelet/crush/raw/main/LICENSE.md)
---
@@ -2,16 +2,43 @@
version: "3"
+env:
+ CGO_ENABLED: 0
+ GOEXPERIMENT: greenteagc
+
tasks:
+ lint:install:
+ desc: Install golangci-lint
+ cmds:
+ - go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
+ env:
+ GOTOOLCHAIN: go1.25.0
+
lint:
desc: Run base linters
cmds:
- golangci-lint run --path-mode=abs --config=".golangci.yml" --timeout=5m
+ env:
+ GOEXPERIMENT: null
lint-fix:
desc: Run base linters and fix issues
cmds:
- golangci-lint run --path-mode=abs --config=".golangci.yml" --timeout=5m --fix
+ env:
+ GOEXPERIMENT: null
+
+ build:
+ desc: Run build
+ cmds:
+ - go build .
+ generates:
+ - crush
+
+ run:
+ desc: Run build
+ cmds:
+ - go run .
test:
desc: Run tests
@@ -30,6 +57,11 @@ tasks:
cmds:
- go run .
+ install:
+ desc: Install the application
+ cmds:
+ - go install -v .
+
profile:cpu:
desc: 10s CPU profile
cmds:
@@ -50,3 +82,23 @@ tasks:
cmds:
- go run main.go schema > schema.json
- echo "Generated schema.json"
+ generates:
+ - schema.json
+
+ release:
+ desc: Create and push a new tag following semver
+ vars:
+ NEXT:
+ sh: go run github.com/caarlos0/svu/v3@latest next --always
+ prompt: "This will release {{.NEXT}}. Continue?"
+ preconditions:
+ - sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
+ msg: Not on main branch
+ - sh: "[ $(git status --porcelain=2 | wc -l) = 0 ]"
+ msg: "Git is dirty"
+ cmds:
+ - git commit --allow-empty -m "{{.NEXT}}"
+ - git tag -d nightly
+ - git tag --sign {{.NEXT}} {{.CLI_ARGS}}
+ - echo "Pushing {{.NEXT}}..."
+ - git push origin --tags
@@ -1,8 +1,6 @@
{
"$schema": "https://charm.land/crush.json",
"lsp": {
- "Go": {
- "command": "gopls"
- }
+ "gopls": {}
}
}
@@ -1,73 +1,59 @@
module github.com/charmbracelet/crush
-go 1.24.3
+go 1.25.0
require (
github.com/JohannesKaufmann/html-to-markdown v1.6.0
github.com/MakeNowJust/heredoc v1.0.0
- github.com/PuerkitoBio/goquery v1.9.2
+ github.com/PuerkitoBio/goquery v1.10.3
github.com/alecthomas/chroma/v2 v2.20.0
- github.com/anthropics/anthropic-sdk-go v1.6.2
+ github.com/anthropics/anthropic-sdk-go v1.12.0
github.com/atotto/clipboard v0.1.4
github.com/aymanbagabas/go-udiff v0.3.1
- github.com/bmatcuk/doublestar/v4 v4.9.0
- github.com/charlievieth/fastwalk v1.0.11
- github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5
- github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d
- github.com/charmbracelet/catwalk v0.3.5
- github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674
- github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe
- github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
+ github.com/bmatcuk/doublestar/v4 v4.9.1
+ github.com/charlievieth/fastwalk v1.0.14
+ github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
+ github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e
+ github.com/charmbracelet/catwalk v0.6.1
+ github.com/charmbracelet/fang v0.4.2
+ github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
+ github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706
- github.com/charmbracelet/x/ansi v0.9.3
+ github.com/charmbracelet/x/ansi v0.10.1
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
- github.com/fsnotify/fsnotify v1.9.0
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
github.com/joho/godotenv v1.5.1
- github.com/mark3labs/mcp-go v0.36.0
+ github.com/mark3labs/mcp-go v0.41.0
github.com/muesli/termenv v0.16.0
- github.com/ncruces/go-sqlite3 v0.25.0
+ github.com/ncruces/go-sqlite3 v0.29.0
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
github.com/nxadm/tail v1.4.11
- github.com/openai/openai-go v1.11.1
- github.com/pressly/goose/v3 v3.24.2
- github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c
+ github.com/openai/openai-go v1.12.0
+ github.com/pressly/goose/v3 v3.25.0
+ github.com/qjebbs/go-jsons v1.0.0-alpha.4
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sahilm/fuzzy v0.1.1
- github.com/spf13/cobra v1.9.1
+ github.com/spf13/cobra v1.10.1
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c
github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef
- github.com/stretchr/testify v1.10.0
+ github.com/stretchr/testify v1.11.1
github.com/tidwall/sjson v1.2.5
- github.com/u-root/u-root v0.14.1-0.20250724181933-b01901710169
github.com/zeebo/xxh3 v1.0.2
gopkg.in/natefinch/lumberjack.v2 v2.2.1
- mvdan.cc/sh/v3 v3.12.1-0.20250726150758-e256f53bade8
-)
-
-require (
- cloud.google.com/go/auth/oauth2adapt v0.2.6 // indirect
- github.com/bahlo/generic-list-go v0.2.0 // indirect
- github.com/buger/jsonparser v1.1.1 // indirect
- github.com/mailru/easyjson v0.7.7 // indirect
- github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
- go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect
- golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
- golang.org/x/oauth2 v0.25.0 // indirect
- golang.org/x/time v0.8.0 // indirect
- google.golang.org/api v0.211.0 // indirect
+ mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5
)
require (
cloud.google.com/go v0.116.0 // indirect
cloud.google.com/go/auth v0.13.0 // indirect
+ cloud.google.com/go/auth/oauth2adapt v0.2.6 // indirect
cloud.google.com/go/compute/metadata v0.6.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
- github.com/andybalholm/cascadia v1.3.2 // indirect
+ github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/aws/aws-sdk-go-v2 v1.30.3 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.3 // indirect
github.com/aws/aws-sdk-go-v2/config v1.27.27 // indirect
@@ -84,20 +70,25 @@ require (
github.com/aws/smithy-go v1.20.3 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
- github.com/charmbracelet/colorprofile v0.3.1 // indirect
- github.com/charmbracelet/ultraviolet v0.0.0-20250731212901-76da584cc9a5 // indirect
- github.com/charmbracelet/x/cellbuf v0.0.14-0.20250516160309-24eee56f89fa // indirect
- github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef
+ github.com/bahlo/generic-list-go v0.2.0 // indirect
+ github.com/buger/jsonparser v1.1.1 // indirect
+ github.com/charmbracelet/colorprofile v0.3.2 // indirect
+ github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef
+ github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
+ github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
+ github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4
github.com/charmbracelet/x/term v0.2.1
github.com/charmbracelet/x/termios v0.1.1 // indirect
- github.com/charmbracelet/x/windows v0.2.1 // indirect
+ github.com/charmbracelet/x/windows v0.2.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/denisbrodbeck/machineid v1.0.1
github.com/disintegration/gift v1.1.2 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
+ github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-logfmt/logfmt v0.6.0 // indirect
- github.com/go-logr/logr v1.4.2 // indirect
+ github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/s2a-go v0.1.8 // indirect
@@ -105,50 +96,67 @@ require (
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
+ github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
+ github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.0.9 // indirect
- github.com/lucasb-eyer/go-colorful v1.2.0
+ github.com/klauspost/pgzip v1.2.6 // indirect
+ github.com/lucasb-eyer/go-colorful v1.3.0
+ github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
+ github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/mango v0.1.0 // indirect
github.com/muesli/mango-cobra v1.2.0 // indirect
github.com/muesli/mango-pflag v0.1.0 // indirect
github.com/muesli/roff v0.1.0 // indirect
github.com/ncruces/julianday v1.0.0 // indirect
+ github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/posthog/posthog-go v1.6.10
github.com/rivo/uniseg v0.4.7
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
+ github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
github.com/spf13/cast v1.7.1 // indirect
- github.com/spf13/pflag v1.0.7 // indirect
+ github.com/spf13/pflag v1.0.9 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
+ github.com/u-root/u-root v0.14.1-0.20250807200646-5e7721023dc7 // indirect
+ github.com/u-root/uio v0.0.0-20240224005618-d2acac8f3701 // indirect
+ github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/yuin/goldmark v1.7.8 // indirect
github.com/yuin/goldmark-emoji v1.0.5 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect
- go.opentelemetry.io/otel v1.35.0 // indirect
- go.opentelemetry.io/otel/metric v1.35.0 // indirect
- go.opentelemetry.io/otel/trace v1.35.0 // indirect
+ go.opentelemetry.io/otel v1.37.0 // indirect
+ go.opentelemetry.io/otel/metric v1.37.0 // indirect
+ go.opentelemetry.io/otel/trace v1.37.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
- golang.org/x/crypto v0.38.0 // indirect
+ golang.org/x/crypto v0.41.0 // indirect
+ golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
golang.org/x/image v0.26.0 // indirect
- golang.org/x/net v0.40.0 // indirect
- golang.org/x/sync v0.16.0 // indirect
- golang.org/x/sys v0.34.0
- golang.org/x/term v0.32.0 // indirect
- golang.org/x/text v0.27.0
- google.golang.org/genai v1.3.0
+ golang.org/x/net v0.43.0 // indirect
+ golang.org/x/oauth2 v0.30.0 // indirect
+ golang.org/x/sync v0.17.0 // indirect
+ golang.org/x/sys v0.36.0 // indirect
+ golang.org/x/term v0.34.0 // indirect
+ golang.org/x/text v0.29.0
+ golang.org/x/time v0.8.0 // indirect
+ google.golang.org/api v0.211.0 // indirect
+ google.golang.org/genai v1.26.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
google.golang.org/grpc v1.71.0 // indirect
- google.golang.org/protobuf v1.36.6 // indirect
+ google.golang.org/protobuf v1.36.8 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
+ mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5
)
@@ -18,18 +18,20 @@ github.com/JohannesKaufmann/html-to-markdown v1.6.0 h1:04VXMiE50YYfCfLboJCLcgqF5
github.com/JohannesKaufmann/html-to-markdown v1.6.0/go.mod h1:NUI78lGg/a7vpEJTz/0uOcYMaibytE4BUOQS8k78yPQ=
github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
-github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
+github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
+github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.20.0 h1:sfIHpxPyR07/Oylvmcai3X/exDlE8+FA820NTz+9sGw=
github.com/alecthomas/chroma/v2 v2.20.0/go.mod h1:e7tViK0xh/Nf4BYHl00ycY6rV7b8iXBksI9E359yNmA=
github.com/alecthomas/repr v0.5.1 h1:E3G4t2QbHTSNpPKBgMTln5KLkZHLOcU7r37J4pXBuIg=
github.com/alecthomas/repr v0.5.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
-github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
-github.com/anthropics/anthropic-sdk-go v1.6.2 h1:oORA212y0/zAxe7OPvdgIbflnn/x5PGk5uwjF60GqXM=
-github.com/anthropics/anthropic-sdk-go v1.6.2/go.mod h1:3qSNQ5NrAmjC8A2ykuruSQttfqfdEYNZY5o8c0XSHB8=
+github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
+github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
+github.com/anthropics/anthropic-sdk-go v1.12.0 h1:xPqlGnq7rWrTiHazIvCiumA0u7mGQnwDQtvA1M82h9U=
+github.com/anthropics/anthropic-sdk-go v1.12.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY=
@@ -68,52 +70,56 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
-github.com/bmatcuk/doublestar/v4 v4.9.0 h1:DBvuZxjdKkRP/dr4GVV4w2fnmrk5Hxc90T51LZjv0JA=
-github.com/bmatcuk/doublestar/v4 v4.9.0/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
+github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
+github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
-github.com/charlievieth/fastwalk v1.0.11 h1:5sLT/q9+d9xMdpKExawLppqvXFZCVKf6JHnr2u/ufj8=
-github.com/charlievieth/fastwalk v1.0.11/go.mod h1:yGy1zbxog41ZVMcKA/i8ojXLFsuayX5VvwhQVoj9PBI=
-github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5 h1:GTcMIfDQJKyNKS+xVt7GkNIwz+tBuQtIuiP50WpzNgs=
-github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250716191546-1e2ffbbcf5c5/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d h1:YMXLZHSo8DjytVY/b5dK8LDuyQsVUmBK3ydQMpu2Ui4=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250730165737-56ff7146d52d/go.mod h1:XIQ1qQfRph6Z5o2EikCydjumo0oDInQySRHuPATzbZc=
-github.com/charmbracelet/catwalk v0.3.5 h1:ChMvA5ooTNZhDKFagmGNQgIZvZp8XjpdaJ+cDmhgCgA=
-github.com/charmbracelet/catwalk v0.3.5/go.mod h1:gUUCqqZ8bk4D7ZzGTu3I77k7cC2x4exRuJBN1H2u2pc=
-github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40=
-github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0=
-github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0=
-github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU=
-github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe h1:i6ce4CcAlPpTj2ER69m1DBeLZ3RRcHnKExuwhKa3GfY=
-github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe/go.mod h1:p3Q+aN4eQKeM5jhrmXPMgPrlKbmc59rWSnMsSA3udhk=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0 h1:sWRGoSw/JsO2S4t2+fmmEkRbkOxphI0AxZkQPQVKWbs=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
+github.com/charlievieth/fastwalk v1.0.14 h1:3Eh5uaFGwHZd8EGwTjJnSpBkfwfsak9h6ICgnWlhAyg=
+github.com/charlievieth/fastwalk v1.0.14/go.mod h1:diVcUreiU1aQ4/Wu3NbxxH4/KYdKpLDojrQ1Bb2KgNY=
+github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
+github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e h1:4BBnKWFwJ5FLyhw/ijFxKE04i9rubr8WIPR1kjO57iA=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e/go.mod h1:F7AfLKYQqpM3NNBVs7ctW417tavhvoh9SBjsgtwpzbY=
+github.com/charmbracelet/catwalk v0.6.1 h1:2rRqUlwo+fdyIty8jEvUufRTgqBl0aea21LV6YQPqb0=
+github.com/charmbracelet/catwalk v0.6.1/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
+github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
+github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
+github.com/charmbracelet/fang v0.4.2 h1:nWr7Tb82/TTNNGMGG35aTZ1X68loAOQmpb0qxkKXjas=
+github.com/charmbracelet/fang v0.4.2/go.mod h1:wHJKQYO5ReYsxx+yZl+skDtrlKO/4LLEQ6EXsdHhRhg=
+github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
+github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
+github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea h1:g1HfUgSMvye8mgecMD1mPscpt+pzJoDEiSA+p2QXzdQ=
+github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea/go.mod h1:ngHerf1JLJXBrDXdphn5gFrBPriCL437uwukd5c93pM=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mSIGnGuSdKl9qDSyfbYK50z2wc2gGMggegE=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
-github.com/charmbracelet/ultraviolet v0.0.0-20250731212901-76da584cc9a5 h1:FrEzjuUbVbGd8UtZBfK8mf/IA4ExT2i3/fi+SEOv2eM=
-github.com/charmbracelet/ultraviolet v0.0.0-20250731212901-76da584cc9a5/go.mod h1:XrrgNFfXLrFAyd9DUmrqVc3yQFVv8Uk+okj4PsNNzpc=
-github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0=
-github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
-github.com/charmbracelet/x/cellbuf v0.0.14-0.20250516160309-24eee56f89fa h1:lphz0Z3rsiOtMYiz8axkT24i9yFiueDhJbzyNUADmME=
-github.com/charmbracelet/x/cellbuf v0.0.14-0.20250516160309-24eee56f89fa/go.mod h1:xBlh2Yi3DL3zy/2n15kITpg0YZardf/aa/hgUaIM6Rk=
+github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef h1:VrWaUi2LXYLjfjCHowdSOEc6dQ9Ro14KY7Bw4IWd19M=
+github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef/go.mod h1:AThRsQH1t+dfyOKIwXRoJBniYFQUkUpQq4paheHMc2o=
+github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
+github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
+github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
+github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a/go.mod h1:rc2bsPC6MWae3LdOxNO1mOb443NlMrrDL0xEya48NNc=
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3 h1:1xwHZg6eMZ9Wv5TE1UGub6ARubyOd1Lo5kPUI/6VL50=
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3/go.mod h1:T9jr8CzFpjhFVHjNjKwbAD7KwBNyFnj2pntAO7F2zw0=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHEJ52OC4VuTzU8t+n5frMjLvpYWEznSr/u8tnkCYw=
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
-github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef h1:v7qwsZ2OxzlwvpKwz8dtZXp7fIJlcDEUOyFBNE4fz4Q=
-github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
+github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
+github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
+github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
+github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo=
-github.com/charmbracelet/x/windows v0.2.1 h1:3x7vnbpQrjpuq/4L+I4gNsG5htYoCiA5oe9hLjAij5I=
-github.com/charmbracelet/x/windows v0.2.1/go.mod h1:ptZp16h40gDYqs5TSawSVW+yiLB13j4kSMA0lSCHL0M=
+github.com/charmbracelet/x/windows v0.2.2 h1:IofanmuvaxnKHuV04sC0eBy/smG6kIKrWG2/jYn2GuM=
+github.com/charmbracelet/x/windows v0.2.2/go.mod h1:/8XtdKZzedat74NQFn0NGlGL4soHB0YQZrETF96h75k=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=
+github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/disintegration/gift v1.1.2 h1:9ZyHJr+kPamiH10FX3Pynt1AxFUob812bU9Wt4GMzhs=
github.com/disintegration/gift v1.1.2/go.mod h1:Jh2i7f7Q2BM7Ezno3PhfezbR1xpUg9dUg3/RlKGr4HI=
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec h1:YrB6aVr9touOt75I9O1SiancmR2GMg45U9UYf0gtgWg=
@@ -132,16 +138,17 @@ github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
-github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
-github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
+github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
+github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI=
github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
-github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
-github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
+github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
+github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM=
@@ -154,8 +161,11 @@ github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrk
github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
+github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
+github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@@ -165,8 +175,12 @@ github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uO
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
+github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
+github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
+github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
+github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@@ -176,12 +190,12 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
-github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
-github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
+github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
+github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/mark3labs/mcp-go v0.36.0 h1:rIZaijrRYPeSbJG8/qNDe0hWlGrCJ7FWHNMz2SQpTis=
-github.com/mark3labs/mcp-go v0.36.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
+github.com/mark3labs/mcp-go v0.41.0 h1:IFfJaovCet65F3av00bE1HzSnmHpMRWM1kz96R98I70=
+github.com/mark3labs/mcp-go v0.41.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
@@ -190,6 +204,8 @@ github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6B
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI=
@@ -202,8 +218,8 @@ github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8=
github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
-github.com/ncruces/go-sqlite3 v0.25.0 h1:trugKUs98Zwy9KwRr/EUxZHL92LYt7UqcKqAfpGpK+I=
-github.com/ncruces/go-sqlite3 v0.25.0/go.mod h1:n6Z7036yFilJx04yV0mi5JWaF66rUmXn1It9Ux8dx68=
+github.com/ncruces/go-sqlite3 v0.29.0 h1:1tsLiagCoqZEfcHDeKsNSv5jvrY/Iu393pAnw2wLNJU=
+github.com/ncruces/go-sqlite3 v0.29.0/go.mod h1:r1hSvYKPNJ+OlUA1O3r8o9LAawzPAlqeZiIdxTBBBJ0=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt7M=
@@ -212,17 +228,21 @@ github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY=
github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc=
-github.com/openai/openai-go v1.11.1 h1:fTQ4Sr9eoRiWFAoHzXiZZpVi6KtLeoTMyGrcOCudjNU=
-github.com/openai/openai-go v1.11.1/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
+github.com/openai/openai-go v1.12.0 h1:NBQCnXzqOTv5wsgNC36PrFEiskGfO5wccfCWDo9S1U0=
+github.com/openai/openai-go v1.12.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
+github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
+github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/pressly/goose/v3 v3.24.2 h1:c/ie0Gm8rnIVKvnDQ/scHErv46jrDv9b4I0WRcFJzYU=
-github.com/pressly/goose/v3 v3.24.2/go.mod h1:kjefwFB0eR4w30Td2Gj2Mznyw94vSP+2jJYkOVNbD1k=
-github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c h1:kmzxiX+OB0knCo1V0dkEkdPelzCdAzCURCfmFArn2/A=
-github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
+github.com/posthog/posthog-go v1.6.10 h1:OA6bkiUg89rI7f5cSXbcrH5+wLinyS6hHplnD92Pu/M=
+github.com/posthog/posthog-go v1.6.10/go.mod h1:LcC1Nu4AgvV22EndTtrMXTy+7RGVC0MhChSw7Qk5XkY=
+github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjGng=
+github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
+github.com/qjebbs/go-jsons v1.0.0-alpha.4 h1:Qsb4ohRUHQODIUAsJKdKJ/SIDbsO7oGOzsfy+h1yQZs=
+github.com/qjebbs/go-jsons v1.0.0-alpha.4/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
@@ -243,13 +263,14 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
+github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFRh6zSQ=
+github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
-github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
-github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
-github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
-github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M=
-github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
+github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
+github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
+github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c h1:km8GpoQut05eY3GiYWEedbTT0qnSxrCjsVbb7yKY1KE=
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c/go.mod h1:cNQ3dwVJtS5Hmnjxy6AgTPd0Inb3pW05ftPSX7NZO7Q=
github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef h1:Ch6Q+AZUxDBCVqdkI8FSpFyZDtCVBc2VmejdNrm5rRQ=
@@ -258,8 +279,8 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
-github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
+github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
@@ -272,8 +293,10 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
-github.com/u-root/u-root v0.14.1-0.20250724181933-b01901710169 h1:f4cp2yGKkMuGpCwAyNEjzcw8szgVXmemK/wfOu4l5gc=
-github.com/u-root/u-root v0.14.1-0.20250724181933-b01901710169/go.mod h1:/0Qr7qJeDwWxoKku2xKQ4Szc+SwBE3g9VE8jNiamsmc=
+github.com/u-root/u-root v0.14.1-0.20250807200646-5e7721023dc7 h1:ax+jBy7xFhh+Ka0IGLmH5mft+YDuqvzEjSgWuAP0nsM=
+github.com/u-root/u-root v0.14.1-0.20250807200646-5e7721023dc7/go.mod h1:/0Qr7qJeDwWxoKku2xKQ4Szc+SwBE3g9VE8jNiamsmc=
+github.com/u-root/uio v0.0.0-20240224005618-d2acac8f3701 h1:pyC9PaHYZFgEKFdlp3G8RaCKgVpHZnecvArXvPXcFkM=
+github.com/u-root/uio v0.0.0-20240224005618-d2acac8f3701/go.mod h1:P3a5rG4X7tI17Nn3aOIAYr5HbIMukwXG0urG0WuL8OA=
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
@@ -296,49 +319,60 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.5
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0/go.mod h1:B9yO6b04uB80CzjedvewuqDhxJxi11s7/GtiGa8bAjI=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
-go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
-go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
-go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
-go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
+go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
+go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
+go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
+go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A=
go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU=
go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk=
go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w=
-go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
-go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
+go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
+go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
-golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
-golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
-golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
-golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
+golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
+golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
+golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
+golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
+golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
+golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
-golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
-golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
-golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
-golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
+golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
+golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
+golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
+golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
-golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
+golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
+golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -349,47 +383,56 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
-golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
+golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
+golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
+golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
-golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
-golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
+golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
+golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
+golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
-golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
+golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
+golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
+golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
+golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
+golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
-google.golang.org/genai v1.3.0 h1:tXhPJF30skOjnnDY7ZnjK3q7IKy4PuAlEA0fk7uEaEI=
-google.golang.org/genai v1.3.0/go.mod h1:TyfOKRz/QyCaj6f/ZDt505x+YreXnY40l2I6k8TvgqY=
+google.golang.org/genai v1.26.0 h1:r4HGL54kFv/WCRMTAbZg05Ct+vXfhAbTRlXhFyBkEQo=
+google.golang.org/genai v1.26.0/go.mod h1:OClfdf+r5aaD+sCd4aUSkPzJItmg2wD/WON9lQnRPaY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
-google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
-google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
+google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
+google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
@@ -404,13 +447,15 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-modernc.org/libc v1.61.13 h1:3LRd6ZO1ezsFiX1y+bHd1ipyEHIJKvuprv0sLTBwLW8=
-modernc.org/libc v1.61.13/go.mod h1:8F/uJWL/3nNil0Lgt1Dpz+GgkApWh04N3el3hxJcA6E=
+modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ=
+modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
-modernc.org/memory v1.9.1 h1:V/Z1solwAVmMW1yttq3nDdZPJqV1rM05Ccq6KMSZ34g=
-modernc.org/memory v1.9.1/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
-modernc.org/sqlite v1.36.2 h1:vjcSazuoFve9Wm0IVNHgmJECoOXLZM1KfMXbcX2axHA=
-modernc.org/sqlite v1.36.2/go.mod h1:ADySlx7K4FdY5MaJcEv86hTJ0PjedAloTUuif0YS3ws=
-mvdan.cc/sh/v3 v3.12.1-0.20250726150758-e256f53bade8 h1:yOKqXg3uKDx7VxqnrKRUzB+InP3whTBi7jeggyFkfX0=
-mvdan.cc/sh/v3 v3.12.1-0.20250726150758-e256f53bade8/go.mod h1:Se6Cj17eYSn+sNooLZiEUnNNmNxg0imoYlTu4CyaGyg=
+modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
+modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
+modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
+modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
+mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5 h1:mO2lyKtGwu4mGQ+Qqjx0+fd5UU5BXhX/rslFmxd5aco=
+mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5/go.mod h1:Of9PCedbLDYT8b3EyiYG64rNnx5nOp27OLCVdDrjJyo=
+mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5 h1:e7Z/Lgw/zMijvQBVrfh/vUDZ+9FpuSLrJDVGBuoJtuo=
+mvdan.cc/sh/v3 v3.12.1-0.20250902163504-3cf4fd5717a5/go.mod h1:P21wo2gLLe3426sP+CmANLBaixSEbRtPl35w3YlM6dg=
@@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"log/slog"
- "maps"
"sync"
"time"
@@ -35,12 +34,7 @@ type App struct {
CoderAgent agent.Service
- LSPClients map[string]*lsp.Client
-
- clientsMutex sync.RWMutex
-
- watcherCancelFuncs *csync.Slice[context.CancelFunc]
- lspWatcherWG sync.WaitGroup
+ LSPClients *csync.Map[string, *lsp.Client]
config *config.Config
@@ -51,7 +45,7 @@ type App struct {
// global context and cleanup functions
globalCtx context.Context
- cleanupFuncs []func()
+ cleanupFuncs []func() error
}
// New initializes a new applcation instance.
@@ -71,14 +65,12 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
Messages: messages,
History: files,
Permissions: permission.NewPermissionService(cfg.WorkingDir(), skipPermissionsRequests, allowedTools),
- LSPClients: make(map[string]*lsp.Client),
+ LSPClients: csync.NewMap[string, *lsp.Client](),
globalCtx: ctx,
config: cfg,
- watcherCancelFuncs: csync.NewSlice[context.CancelFunc](),
-
events: make(chan tea.Msg, 100),
serviceEventsWG: &sync.WaitGroup{},
tuiWG: &sync.WaitGroup{},
@@ -92,6 +84,9 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
// Check for updates in the background.
go app.checkForUpdates(ctx)
+ // cleanup database upon app shutdown
+ app.cleanupFuncs = append(app.cleanupFuncs, conn.Close)
+
// TODO: remove the concept of agent config, most likely.
if cfg.IsConfigured() {
if err := app.InitCoderAgent(); err != nil {
@@ -158,7 +153,7 @@ func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool
}
messageEvents := app.Messages.Subscribe(ctx)
- readBts := 0
+ messageReadBytes := make(map[string]int)
for {
select {
@@ -174,11 +169,14 @@ func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool
}
msgContent := result.Message.Content().String()
+ readBts := messageReadBytes[result.Message.ID]
+
if len(msgContent) < readBts {
slog.Error("Non-interactive: message content is shorter than read bytes", "message_length", len(msgContent), "read_bytes", readBts)
return fmt.Errorf("message content is shorter than read bytes: %d < %d", len(msgContent), readBts)
}
fmt.Println(msgContent[readBts:])
+ messageReadBytes[result.Message.ID] = len(msgContent)
slog.Info("Non-interactive: run completed", "session_id", sess.ID)
return nil
@@ -187,9 +185,18 @@ func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool
msg := event.Payload
if msg.SessionID == sess.ID && msg.Role == message.Assistant && len(msg.Parts) > 0 {
stopSpinner()
- part := msg.Content().String()[readBts:]
+
+ content := msg.Content().String()
+ readBytes := messageReadBytes[msg.ID]
+
+ if len(content) < readBytes {
+ slog.Error("Non-interactive: message content is shorter than read bytes", "message_length", len(content), "read_bytes", readBytes)
+ return fmt.Errorf("message content is shorter than read bytes: %d < %d", len(content), readBytes)
+ }
+
+ part := content[readBytes:]
fmt.Print(part)
- readBts += len(part)
+ messageReadBytes[msg.ID] = len(content)
}
case <-ctx.Done():
@@ -211,9 +218,12 @@ func (app *App) setupEvents() {
setupSubscriber(ctx, app.serviceEventsWG, "permissions", app.Permissions.Subscribe, app.events)
setupSubscriber(ctx, app.serviceEventsWG, "permissions-notifications", app.Permissions.SubscribeNotifications, app.events)
setupSubscriber(ctx, app.serviceEventsWG, "history", app.History.Subscribe, app.events)
- cleanupFunc := func() {
+ setupSubscriber(ctx, app.serviceEventsWG, "mcp", agent.SubscribeMCPEvents, app.events)
+ setupSubscriber(ctx, app.serviceEventsWG, "lsp", SubscribeLSPEvents, app.events)
+ cleanupFunc := func() error {
cancel()
app.serviceEventsWG.Wait()
+ return nil
}
app.cleanupFuncs = append(app.cleanupFuncs, cleanupFunc)
}
@@ -225,9 +235,7 @@ func setupSubscriber[T any](
subscriber func(context.Context) <-chan pubsub.Event[T],
outputCh chan<- tea.Msg,
) {
- wg.Add(1)
- go func() {
- defer wg.Done()
+ wg.Go(func() {
subCh := subscriber(ctx)
for {
select {
@@ -250,7 +258,7 @@ func setupSubscriber[T any](
return
}
}
- }()
+ })
}
func (app *App) InitCoderAgent() error {
@@ -289,10 +297,11 @@ func (app *App) Subscribe(program *tea.Program) {
app.tuiWG.Add(1)
tuiCtx, tuiCancel := context.WithCancel(app.globalCtx)
- app.cleanupFuncs = append(app.cleanupFuncs, func() {
+ app.cleanupFuncs = append(app.cleanupFuncs, func() error {
slog.Debug("Cancelling TUI message handler")
tuiCancel()
app.tuiWG.Wait()
+ return nil
})
defer app.tuiWG.Done()
@@ -317,23 +326,10 @@ func (app *App) Shutdown() {
app.CoderAgent.CancelAll()
}
- for cancel := range app.watcherCancelFuncs.Seq() {
- cancel()
- }
-
- // Wait for all LSP watchers to finish.
- app.lspWatcherWG.Wait()
-
- // Get all LSP clients.
- app.clientsMutex.RLock()
- clients := make(map[string]*lsp.Client, len(app.LSPClients))
- maps.Copy(clients, app.LSPClients)
- app.clientsMutex.RUnlock()
-
// Shutdown all LSP clients.
- for name, client := range clients {
+ for name, client := range app.LSPClients.Seq2() {
shutdownCtx, cancel := context.WithTimeout(app.globalCtx, 5*time.Second)
- if err := client.Shutdown(shutdownCtx); err != nil {
+ if err := client.Close(shutdownCtx); err != nil {
slog.Error("Failed to shutdown LSP client", "name", name, "error", err)
}
cancel()
@@ -342,7 +338,9 @@ func (app *App) Shutdown() {
// Call call cleanup functions.
for _, cleanup := range app.cleanupFuncs {
if cleanup != nil {
- cleanup()
+ if err := cleanup(); err != nil {
+ slog.Error("Failed to cleanup app properly on shutdown", "error", err)
+ }
}
}
}
@@ -5,39 +5,57 @@ import (
"log/slog"
"time"
- "github.com/charmbracelet/crush/internal/log"
+ "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/watcher"
)
// initLSPClients initializes LSP clients.
func (app *App) initLSPClients(ctx context.Context) {
for name, clientConfig := range app.config.LSP {
- go app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
+ if clientConfig.Disabled {
+ slog.Info("Skipping disabled LSP client", "name", name)
+ continue
+ }
+ go app.createAndStartLSPClient(ctx, name, clientConfig)
}
slog.Info("LSP clients initialization started in background")
}
// createAndStartLSPClient creates a new LSP client, initializes it, and starts its workspace watcher
-func (app *App) createAndStartLSPClient(ctx context.Context, name string, command string, args ...string) {
- slog.Info("Creating LSP client", "name", name, "command", command, "args", args)
+func (app *App) createAndStartLSPClient(ctx context.Context, name string, config config.LSPConfig) {
+ slog.Info("Creating LSP client", "name", name, "command", config.Command, "fileTypes", config.FileTypes, "args", config.Args)
+
+ // Check if any root markers exist in the working directory (config now has defaults)
+ if !lsp.HasRootMarkers(app.config.WorkingDir(), config.RootMarkers) {
+ slog.Info("Skipping LSP client - no root markers found", "name", name, "rootMarkers", config.RootMarkers)
+ updateLSPState(name, lsp.StateDisabled, nil, nil, 0)
+ return
+ }
+
+ // Update state to starting
+ updateLSPState(name, lsp.StateStarting, nil, nil, 0)
// Create LSP client.
- lspClient, err := lsp.NewClient(ctx, command, args...)
+ lspClient, err := lsp.New(ctx, name, config, app.config.Resolver())
if err != nil {
slog.Error("Failed to create LSP client for", name, err)
+ updateLSPState(name, lsp.StateError, err, nil, 0)
return
}
+ // Set diagnostics callback
+ lspClient.SetDiagnosticsCallback(updateLSPDiagnostics)
+
// Increase initialization timeout as some servers take more time to start.
initCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
// Initialize LSP client.
- _, err = lspClient.InitializeLSPClient(initCtx, app.config.WorkingDir())
+ _, err = lspClient.Initialize(initCtx, app.config.WorkingDir())
if err != nil {
slog.Error("Initialize failed", "name", name, "error", err)
- lspClient.Close()
+ updateLSPState(name, lsp.StateError, err, lspClient, 0)
+ lspClient.Close(ctx)
return
}
@@ -47,72 +65,16 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, comman
// Server never reached a ready state, but let's continue anyway, as
// some functionality might still work.
lspClient.SetServerState(lsp.StateError)
+ updateLSPState(name, lsp.StateError, err, lspClient, 0)
} else {
// Server reached a ready state scuccessfully.
slog.Info("LSP server is ready", "name", name)
lspClient.SetServerState(lsp.StateReady)
+ updateLSPState(name, lsp.StateReady, nil, lspClient, 0)
}
slog.Info("LSP client initialized", "name", name)
- // Create a child context that can be canceled when the app is shutting
- // down.
- watchCtx, cancelFunc := context.WithCancel(ctx)
-
- // Create the workspace watcher.
- workspaceWatcher := watcher.NewWorkspaceWatcher(name, lspClient)
-
- // Store the cancel function to be called during cleanup.
- app.watcherCancelFuncs.Append(cancelFunc)
-
// Add to map with mutex protection before starting goroutine
- app.clientsMutex.Lock()
- app.LSPClients[name] = lspClient
- app.clientsMutex.Unlock()
-
- // Run workspace watcher.
- app.lspWatcherWG.Add(1)
- go app.runWorkspaceWatcher(watchCtx, name, workspaceWatcher)
-}
-
-// runWorkspaceWatcher executes the workspace watcher for an LSP client.
-func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.WorkspaceWatcher) {
- defer app.lspWatcherWG.Done()
- defer log.RecoverPanic("LSP-"+name, func() {
- // Try to restart the client.
- app.restartLSPClient(ctx, name)
- })
-
- workspaceWatcher.WatchWorkspace(ctx, app.config.WorkingDir())
- slog.Info("Workspace watcher stopped", "client", name)
-}
-
-// restartLSPClient attempts to restart a crashed or failed LSP client.
-func (app *App) restartLSPClient(ctx context.Context, name string) {
- // Get the original configuration.
- clientConfig, exists := app.config.LSP[name]
- if !exists {
- slog.Error("Cannot restart client, configuration not found", "client", name)
- return
- }
-
- // Clean up the old client if it exists.
- app.clientsMutex.Lock()
- oldClient, exists := app.LSPClients[name]
- if exists {
- // Remove from map before potentially slow shutdown.
- delete(app.LSPClients, name)
- }
- app.clientsMutex.Unlock()
-
- if exists && oldClient != nil {
- // Try to shut down client gracefully, but don't block on errors.
- shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
- _ = oldClient.Shutdown(shutdownCtx)
- cancel()
- }
-
- // Create a new client using the shared function.
- app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
- slog.Info("Successfully restarted LSP client", "client", name)
+ app.LSPClients.Set(name, lspClient)
}
@@ -0,0 +1,99 @@
+package app
+
+import (
+ "context"
+ "maps"
+ "time"
+
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/lsp"
+ "github.com/charmbracelet/crush/internal/pubsub"
+)
+
+// LSPEventType represents the type of LSP event
+type LSPEventType string
+
+const (
+ LSPEventStateChanged LSPEventType = "state_changed"
+ LSPEventDiagnosticsChanged LSPEventType = "diagnostics_changed"
+)
+
+// LSPEvent represents an event in the LSP system
+type LSPEvent struct {
+ Type LSPEventType
+ Name string
+ State lsp.ServerState
+ Error error
+ DiagnosticCount int
+}
+
+// LSPClientInfo holds information about an LSP client's state
+type LSPClientInfo struct {
+ Name string
+ State lsp.ServerState
+ Error error
+ Client *lsp.Client
+ DiagnosticCount int
+ ConnectedAt time.Time
+}
+
+var (
+ lspStates = csync.NewMap[string, LSPClientInfo]()
+ lspBroker = pubsub.NewBroker[LSPEvent]()
+)
+
+// SubscribeLSPEvents returns a channel for LSP events
+func SubscribeLSPEvents(ctx context.Context) <-chan pubsub.Event[LSPEvent] {
+ return lspBroker.Subscribe(ctx)
+}
+
+// GetLSPStates returns the current state of all LSP clients
+func GetLSPStates() map[string]LSPClientInfo {
+ return maps.Collect(lspStates.Seq2())
+}
+
+// GetLSPState returns the state of a specific LSP client
+func GetLSPState(name string) (LSPClientInfo, bool) {
+ return lspStates.Get(name)
+}
+
+// updateLSPState updates the state of an LSP client and publishes an event
+func updateLSPState(name string, state lsp.ServerState, err error, client *lsp.Client, diagnosticCount int) {
+ info := LSPClientInfo{
+ Name: name,
+ State: state,
+ Error: err,
+ Client: client,
+ DiagnosticCount: diagnosticCount,
+ }
+ if state == lsp.StateReady {
+ info.ConnectedAt = time.Now()
+ }
+ lspStates.Set(name, info)
+
+ // Publish state change event
+ lspBroker.Publish(pubsub.UpdatedEvent, LSPEvent{
+ Type: LSPEventStateChanged,
+ Name: name,
+ State: state,
+ Error: err,
+ DiagnosticCount: diagnosticCount,
+ })
+}
+
+// updateLSPDiagnostics updates the diagnostic count for an LSP client and publishes an event
+func updateLSPDiagnostics(name string, diagnosticCount int) {
+ if info, exists := lspStates.Get(name); exists {
+ info.DiagnosticCount = diagnosticCount
+ lspStates.Set(name, info)
+
+ // Publish diagnostics change event
+ lspBroker.Publish(pubsub.UpdatedEvent, LSPEvent{
+ Type: LSPEventDiagnosticsChanged,
+ Name: name,
+ State: info.State,
+ Error: info.Error,
+ DiagnosticCount: diagnosticCount,
+ })
+ }
+}
@@ -28,6 +28,11 @@ var logsCmd = &cobra.Command{
return fmt.Errorf("failed to get current working directory: %v", err)
}
+ dataDir, err := cmd.Flags().GetString("data-dir")
+ if err != nil {
+ return fmt.Errorf("failed to get data directory: %v", err)
+ }
+
follow, err := cmd.Flags().GetBool("follow")
if err != nil {
return fmt.Errorf("failed to get follow flag: %v", err)
@@ -41,11 +46,11 @@ var logsCmd = &cobra.Command{
log.SetLevel(log.DebugLevel)
log.SetOutput(os.Stdout)
- cfg, err := config.Load(cwd, false)
+ cfg, err := config.Load(cwd, dataDir, false)
if err != nil {
return fmt.Errorf("failed to load configuration: %v", err)
}
- logsFile := filepath.Join(cfg.WorkingDir(), cfg.Options.DataDirectory, "logs", "crush.log")
+ logsFile := filepath.Join(cfg.Options.DataDirectory, "logs", "crush.log")
_, err = os.Stat(logsFile)
if os.IsNotExist(err) {
log.Warn("Looks like you are not in a crush project. No logs found.")
@@ -6,11 +6,14 @@ import (
"io"
"log/slog"
"os"
+ "path/filepath"
+ "strconv"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/db"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/tui"
"github.com/charmbracelet/crush/internal/version"
"github.com/charmbracelet/fang"
@@ -20,12 +23,14 @@ import (
func init() {
rootCmd.PersistentFlags().StringP("cwd", "c", "", "Current working directory")
+ rootCmd.PersistentFlags().StringP("data-dir", "D", "", "Custom crush data directory")
rootCmd.PersistentFlags().BoolP("debug", "d", false, "Debug")
rootCmd.Flags().BoolP("help", "h", false, "Help")
rootCmd.Flags().BoolP("yolo", "y", false, "Automatically accept all permissions (dangerous mode)")
rootCmd.AddCommand(runCmd)
+ rootCmd.AddCommand(updateProvidersCmd)
}
var rootCmd = &cobra.Command{
@@ -44,6 +49,9 @@ crush -d
# Run with debug logging in a specific directory
crush -d -c /path/to/project
+# Run with custom data directory
+crush -D /path/to/custom/.crush
+
# Print version
crush -v
@@ -60,6 +68,8 @@ crush -y
}
defer app.Shutdown()
+ event.AppInitialized()
+
// Set up the TUI.
program := tea.NewProgram(
tui.New(app),
@@ -72,11 +82,15 @@ crush -y
go app.Subscribe(program)
if _, err := program.Run(); err != nil {
+ event.Error(err)
slog.Error("TUI run error", "error", err)
return fmt.Errorf("TUI error: %v", err)
}
return nil
},
+ PostRun: func(cmd *cobra.Command, args []string) {
+ event.AppExited()
+ },
}
func Execute() {
@@ -95,6 +109,7 @@ func Execute() {
func setupApp(cmd *cobra.Command) (*app.App, error) {
debug, _ := cmd.Flags().GetBool("debug")
yolo, _ := cmd.Flags().GetBool("yolo")
+ dataDir, _ := cmd.Flags().GetString("data-dir")
ctx := cmd.Context()
cwd, err := ResolveCwd(cmd)
@@ -102,7 +117,7 @@ func setupApp(cmd *cobra.Command) (*app.App, error) {
return nil, err
}
- cfg, err := config.Init(cwd, debug)
+ cfg, err := config.Init(cwd, dataDir, debug)
if err != nil {
return nil, err
}
@@ -112,6 +127,10 @@ func setupApp(cmd *cobra.Command) (*app.App, error) {
}
cfg.Permissions.SkipRequests = yolo
+ if err := createDotCrushDir(cfg.Options.DataDirectory); err != nil {
+ return nil, err
+ }
+
// Connect to DB; this will also run migrations.
conn, err := db.Connect(ctx, cfg.Options.DataDirectory)
if err != nil {
@@ -124,9 +143,26 @@ func setupApp(cmd *cobra.Command) (*app.App, error) {
return nil, err
}
+ if shouldEnableMetrics() {
+ event.Init()
+ }
+
return appInstance, nil
}
+func shouldEnableMetrics() bool {
+ if v, _ := strconv.ParseBool(os.Getenv("CRUSH_DISABLE_METRICS")); v {
+ return false
+ }
+ if v, _ := strconv.ParseBool(os.Getenv("DO_NOT_TRACK")); v {
+ return false
+ }
+ if config.Get().Options.DisableMetrics {
+ return false
+ }
+ return true
+}
+
func MaybePrependStdin(prompt string) (string, error) {
if term.IsTerminal(os.Stdin.Fd()) {
return prompt, nil
@@ -160,3 +196,18 @@ func ResolveCwd(cmd *cobra.Command) (string, error) {
}
return cwd, nil
}
+
+func createDotCrushDir(dir string) error {
+ if err := os.MkdirAll(dir, 0o700); err != nil {
+ return fmt.Errorf("failed to create data directory: %q %w", dir, err)
+ }
+
+ gitIgnorePath := filepath.Join(dir, ".gitignore")
+ if _, err := os.Stat(gitIgnorePath); os.IsNotExist(err) {
+ if err := os.WriteFile(gitIgnorePath, []byte("*\n"), 0o644); err != nil {
+ return fmt.Errorf("failed to create .gitignore file: %q %w", gitIgnorePath, err)
+ }
+ }
+
+ return nil
+}
@@ -0,0 +1,60 @@
+package cmd
+
+import (
+ "fmt"
+ "log/slog"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/exp/charmtone"
+ "github.com/spf13/cobra"
+)
+
+var updateProvidersCmd = &cobra.Command{
+ Use: "update-providers [path-or-url]",
+ Short: "Update providers",
+ Long: `Update the list of providers from a specified local path or remote URL.`,
+ Example: `
+# Update providers remotely from Catwalk
+crush update-providers
+
+# Update providers from a custom URL
+crush update-providers https://example.com/
+
+# Update providers from a local file
+crush update-providers /path/to/local-providers.json
+
+# Update providers from embedded version
+crush update-providers embedded
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ // NOTE(@andreynering): We want to skip logging output do stdout here.
+ slog.SetDefault(slog.New(slog.DiscardHandler))
+
+ var pathOrUrl string
+ if len(args) > 0 {
+ pathOrUrl = args[0]
+ }
+
+ if err := config.UpdateProviders(pathOrUrl); err != nil {
+ return err
+ }
+
+ // NOTE(@andreynering): This style is more-or-less copied from Fang's
+ // error message, adapted for success.
+ headerStyle := lipgloss.NewStyle().
+ Foreground(charmtone.Butter).
+ Background(charmtone.Guac).
+ Bold(true).
+ Padding(0, 1).
+ Margin(1).
+ MarginLeft(2).
+ SetString("SUCCESS")
+ textStyle := lipgloss.NewStyle().
+ MarginLeft(2).
+ SetString("Providers updated successfully.")
+
+ fmt.Printf("%s\n%s\n\n", headerStyle.Render(), textStyle.Render())
+ return nil
+ },
+}
@@ -20,7 +20,6 @@ import (
const (
appName = "crush"
defaultDataDirectory = ".crush"
- defaultLogLevel = "info"
)
var defaultContextPaths = []string{
@@ -111,20 +110,26 @@ type MCPConfig struct {
Type MCPType `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
URL string `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
+ Timeout int `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
// TODO: maybe make it possible to get the value from the env
Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
}
type LSPConfig struct {
- Disabled bool `json:"enabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
- Command string `json:"command" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
- Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
- Options any `json:"options,omitempty" jsonschema:"description=LSP server-specific configuration options"`
+ Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
+ Command string `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
+ Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
+ Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
+ FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
+ RootMarkers []string `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
+ InitOptions map[string]any `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
+ Options map[string]any `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
}
type TUIOptions struct {
- CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
+ CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
+ DiffMode string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
// Here we can add themes later or any TUI related options
}
@@ -133,13 +138,22 @@ type Permissions struct {
SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
}
+type Attribution struct {
+ CoAuthoredBy bool `json:"co_authored_by,omitempty" jsonschema:"description=Add Co-Authored-By trailer to commit messages,default=true"`
+ GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
+}
+
type Options struct {
- ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
- TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
- Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
- DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
- DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
- DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
+ ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
+ TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
+ Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
+ DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
+ DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
+ DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
+ DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
+ DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
+ Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
+ DisableMetrics bool `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
}
type MCPs map[string]MCPConfig
@@ -184,22 +198,12 @@ func (l LSPs) Sorted() []LSP {
return sorted
}
-func (m MCPConfig) ResolvedEnv() []string {
- resolver := NewShellVariableResolver(env.New())
- for e, v := range m.Env {
- var err error
- m.Env[e], err = resolver.ResolveValue(v)
- if err != nil {
- slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
- continue
- }
- }
+func (l LSPConfig) ResolvedEnv() []string {
+ return resolveEnvs(l.Env)
+}
- env := make([]string, 0, len(m.Env))
- for k, v := range m.Env {
- env = append(env, fmt.Sprintf("%s=%s", k, v))
- }
- return env
+func (m MCPConfig) ResolvedEnv() []string {
+ return resolveEnvs(m.Env)
}
func (m MCPConfig) ResolvedHeaders() map[string]string {
@@ -244,6 +248,8 @@ type Agent struct {
// Config holds the configuration for crush.
type Config struct {
+ Schema string `json:"$schema,omitempty"`
+
// We currently only support large/small as values here.
Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
@@ -420,7 +426,52 @@ func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
return nil
}
+func allToolNames() []string {
+ return []string{
+ "agent",
+ "bash",
+ "download",
+ "edit",
+ "multiedit",
+ "fetch",
+ "glob",
+ "grep",
+ "ls",
+ "sourcegraph",
+ "view",
+ "write",
+ }
+}
+
+func resolveAllowedTools(allTools []string, disabledTools []string) []string {
+ if disabledTools == nil {
+ return allTools
+ }
+ // filter out disabled tools (exclude mode)
+ return filterSlice(allTools, disabledTools, false)
+}
+
+func resolveReadOnlyTools(tools []string) []string {
+ readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
+ // filter to only include tools that are in allowedtools (include mode)
+ return filterSlice(tools, readOnlyTools, true)
+}
+
+func filterSlice(data []string, mask []string, include bool) []string {
+ filtered := []string{}
+ for _, s := range data {
+ // if include is true, we include items that ARE in the mask
+ // if include is false, we include items that are NOT in the mask
+ if include == slices.Contains(mask, s) {
+ filtered = append(filtered, s)
+ }
+ }
+ return filtered
+}
+
func (c *Config) SetupAgents() {
+ allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
+
agents := map[string]Agent{
"coder": {
ID: "coder",
@@ -428,7 +479,7 @@ func (c *Config) SetupAgents() {
Description: "An agent that helps with executing coding tasks.",
Model: SelectedModelTypeLarge,
ContextPaths: c.Options.ContextPaths,
- // All tools allowed
+ AllowedTools: allowedTools,
},
"task": {
ID: "task",
@@ -436,13 +487,7 @@ func (c *Config) SetupAgents() {
Description: "An agent that helps with searching for context and finding implementation details.",
Model: SelectedModelTypeLarge,
ContextPaths: c.Options.ContextPaths,
- AllowedTools: []string{
- "glob",
- "grep",
- "ls",
- "sourcegraph",
- "view",
- },
+ AllowedTools: resolveReadOnlyTools(allowedTools),
// NO MCPs or LSPs by default
AllowedMCP: map[string][]string{},
AllowedLSP: []string{},
@@ -465,7 +510,11 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if baseURL == "" {
baseURL = "https://api.openai.com/v1"
}
- testURL = baseURL + "/models"
+ if c.ID == string(catwalk.InferenceProviderOpenRouter) {
+ testURL = baseURL + "/credits"
+ } else {
+ testURL = baseURL + "/models"
+ }
headers["Authorization"] = "Bearer " + apiKey
case catwalk.TypeAnthropic:
baseURL, _ := resolver.ResolveValue(c.BaseURL)
@@ -499,9 +548,34 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if err != nil {
return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
}
- if b.StatusCode != http.StatusOK {
- return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ if c.ID == string(catwalk.InferenceProviderZAI) {
+ if b.StatusCode == http.StatusUnauthorized {
+ // for z.ai just check if the http response is not 401
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
+ } else {
+ if b.StatusCode != http.StatusOK {
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
}
_ = b.Body.Close()
return nil
}
+
+func resolveEnvs(envs map[string]string) []string {
+ resolver := NewShellVariableResolver(env.New())
+ for e, v := range envs {
+ var err error
+ envs[e], err = resolver.ResolveValue(v)
+ if err != nil {
+ slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
+ continue
+ }
+ }
+
+ res := make([]string, 0, len(envs))
+ for k, v := range envs {
+ res = append(res, fmt.Sprintf("%s=%s", k, v))
+ }
+ return res
+}
@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
+ "slices"
"strings"
"sync/atomic"
)
@@ -19,8 +20,8 @@ type ProjectInitFlag struct {
// TODO: we need to remove the global config instance keeping it now just until everything is migrated
var instance atomic.Pointer[Config]
-func Init(workingDir string, debug bool) (*Config, error) {
- cfg, err := Load(workingDir, debug)
+func Init(workingDir, dataDir string, debug bool) (*Config, error) {
+ cfg, err := Load(workingDir, dataDir, debug)
if err != nil {
return nil, err
}
@@ -50,30 +51,38 @@ func ProjectNeedsInitialization() (bool, error) {
return false, fmt.Errorf("failed to check init flag file: %w", err)
}
- crushExists, err := crushMdExists(cfg.WorkingDir())
+ someContextFileExists, err := contextPathsExist(cfg.WorkingDir())
if err != nil {
- return false, fmt.Errorf("failed to check for CRUSH.md files: %w", err)
+ return false, fmt.Errorf("failed to check for context files: %w", err)
}
- if crushExists {
+ if someContextFileExists {
return false, nil
}
return true, nil
}
-func crushMdExists(dir string) (bool, error) {
+func contextPathsExist(dir string) (bool, error) {
entries, err := os.ReadDir(dir)
if err != nil {
return false, err
}
+ // Create a slice of lowercase filenames for lookup with slices.Contains
+ var files []string
for _, entry := range entries {
- if entry.IsDir() {
- continue
+ if !entry.IsDir() {
+ files = append(files, strings.ToLower(entry.Name()))
}
+ }
+
+ // Check if any of the default context paths exist in the directory
+ for _, path := range defaultContextPaths {
+ // Extract just the filename from the path
+ _, filename := filepath.Split(path)
+ filename = strings.ToLower(filename)
- name := strings.ToLower(entry.Name())
- if name == "crush.md" {
+ if slices.Contains(files, filename) {
return true, nil
}
}
@@ -10,12 +10,16 @@ import (
"path/filepath"
"runtime"
"slices"
+ "strconv"
"strings"
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/env"
+ "github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/log"
+ powernapConfig "github.com/charmbracelet/x/powernap/pkg/config"
)
const defaultCatwalkURL = "https://catwalk.charm.sh"
@@ -36,14 +40,9 @@ func LoadReader(fd io.Reader) (*Config, error) {
}
// Load loads the configuration from the default paths.
-func Load(workingDir string, debug bool) (*Config, error) {
- // uses default config paths
- configPaths := []string{
- globalConfig(),
- GlobalConfigData(),
- filepath.Join(workingDir, fmt.Sprintf("%s.json", appName)),
- filepath.Join(workingDir, fmt.Sprintf(".%s.json", appName)),
- }
+func Load(workingDir, dataDir string, debug bool) (*Config, error) {
+ configPaths := lookupConfigs(workingDir)
+
cfg, err := loadFromConfigPaths(configPaths)
if err != nil {
return nil, fmt.Errorf("failed to load config from paths %v: %w", configPaths, err)
@@ -51,7 +50,7 @@ func Load(workingDir string, debug bool) (*Config, error) {
cfg.dataConfigDir = GlobalConfigData()
- cfg.setDefaults(workingDir)
+ cfg.setDefaults(workingDir, dataDir)
if debug {
cfg.Options.Debug = true
@@ -64,9 +63,9 @@ func Load(workingDir string, debug bool) (*Config, error) {
)
// Load known providers, this loads the config from catwalk
- providers, err := Providers()
- if err != nil || len(providers) == 0 {
- return nil, fmt.Errorf("failed to load providers: %w", err)
+ providers, err := Providers(cfg)
+ if err != nil {
+ return nil, err
}
cfg.knownProviders = providers
@@ -74,7 +73,7 @@ func Load(workingDir string, debug bool) (*Config, error) {
// Configure providers
valueResolver := NewShellVariableResolver(env)
cfg.resolver = valueResolver
- if err := cfg.configureProviders(env, valueResolver, providers); err != nil {
+ if err := cfg.configureProviders(env, valueResolver, cfg.knownProviders); err != nil {
return nil, fmt.Errorf("failed to configure providers: %w", err)
}
@@ -83,25 +82,50 @@ func Load(workingDir string, debug bool) (*Config, error) {
return cfg, nil
}
- if err := cfg.configureSelectedModels(providers); err != nil {
+ if err := cfg.configureSelectedModels(cfg.knownProviders); err != nil {
return nil, fmt.Errorf("failed to configure selected models: %w", err)
}
cfg.SetupAgents()
return cfg, nil
}
+func PushPopCrushEnv() func() {
+ found := []string{}
+ for _, ev := range os.Environ() {
+ if strings.HasPrefix(ev, "CRUSH_") {
+ pair := strings.SplitN(ev, "=", 2)
+ if len(pair) != 2 {
+ continue
+ }
+ found = append(found, strings.TrimPrefix(pair[0], "CRUSH_"))
+ }
+ }
+ backups := make(map[string]string)
+ for _, ev := range found {
+ backups[ev] = os.Getenv(ev)
+ }
+
+ for _, ev := range found {
+ os.Setenv(ev, os.Getenv("CRUSH_"+ev))
+ }
+
+ restore := func() {
+ for k, v := range backups {
+ os.Setenv(k, v)
+ }
+ }
+ return restore
+}
+
func (c *Config) configureProviders(env env.Env, resolver VariableResolver, knownProviders []catwalk.Provider) error {
knownProviderNames := make(map[string]bool)
+ restore := PushPopCrushEnv()
+ defer restore()
for _, p := range knownProviders {
knownProviderNames[string(p.ID)] = true
config, configExists := c.Providers.Get(string(p.ID))
// if the user configured a known provider we need to allow it to override a couple of parameters
if configExists {
- if config.Disable {
- slog.Debug("Skipping provider due to disable flag", "provider", p.ID)
- c.Providers.Del(string(p.ID))
- continue
- }
if config.BaseURL != "" {
p.APIEndpoint = config.BaseURL
}
@@ -246,7 +270,7 @@ func (c *Config) configureProviders(env env.Env, resolver VariableResolver, know
c.Providers.Del(id)
continue
}
- if providerConfig.Type != catwalk.TypeOpenAI && providerConfig.Type != catwalk.TypeAnthropic {
+ if providerConfig.Type != catwalk.TypeOpenAI && providerConfig.Type != catwalk.TypeAnthropic && providerConfig.Type != catwalk.TypeGemini {
slog.Warn("Skipping custom provider because the provider type is not supported", "provider", id, "type", providerConfig.Type)
c.Providers.Del(id)
continue
@@ -268,7 +292,7 @@ func (c *Config) configureProviders(env env.Env, resolver VariableResolver, know
return nil
}
-func (c *Config) setDefaults(workingDir string) {
+func (c *Config) setDefaults(workingDir, dataDir string) {
c.workingDir = workingDir
if c.Options == nil {
c.Options = &Options{}
@@ -279,8 +303,14 @@ func (c *Config) setDefaults(workingDir string) {
if c.Options.ContextPaths == nil {
c.Options.ContextPaths = []string{}
}
- if c.Options.DataDirectory == "" {
- c.Options.DataDirectory = filepath.Join(workingDir, defaultDataDirectory)
+ if dataDir != "" {
+ c.Options.DataDirectory = dataDir
+ } else if c.Options.DataDirectory == "" {
+ if path, ok := fsext.LookupClosest(workingDir, defaultDataDirectory); ok {
+ c.Options.DataDirectory = path
+ } else {
+ c.Options.DataDirectory = filepath.Join(workingDir, defaultDataDirectory)
+ }
}
if c.Providers == nil {
c.Providers = csync.NewMap[string, ProviderConfig]()
@@ -295,16 +325,65 @@ func (c *Config) setDefaults(workingDir string) {
c.LSP = make(map[string]LSPConfig)
}
+ // Apply defaults to LSP configurations
+ c.applyLSPDefaults()
+
// Add the default context paths if they are not already present
c.Options.ContextPaths = append(defaultContextPaths, c.Options.ContextPaths...)
slices.Sort(c.Options.ContextPaths)
c.Options.ContextPaths = slices.Compact(c.Options.ContextPaths)
+
+ if str, ok := os.LookupEnv("CRUSH_DISABLE_PROVIDER_AUTO_UPDATE"); ok {
+ c.Options.DisableProviderAutoUpdate, _ = strconv.ParseBool(str)
+ }
+}
+
+// applyLSPDefaults applies default values from powernap to LSP configurations
+func (c *Config) applyLSPDefaults() {
+ // Get powernap's default configuration
+ configManager := powernapConfig.NewManager()
+ configManager.LoadDefaults()
+
+ // Apply defaults to each LSP configuration
+ for name, cfg := range c.LSP {
+ // Try to get defaults from powernap based on name or command name.
+ base, ok := configManager.GetServer(name)
+ if !ok {
+ base, ok = configManager.GetServer(cfg.Command)
+ if !ok {
+ continue
+ }
+ }
+ if cfg.Options == nil {
+ cfg.Options = base.Settings
+ }
+ if cfg.InitOptions == nil {
+ cfg.InitOptions = base.InitOptions
+ }
+ if len(cfg.FileTypes) == 0 {
+ cfg.FileTypes = base.FileTypes
+ }
+ if len(cfg.RootMarkers) == 0 {
+ cfg.RootMarkers = base.RootMarkers
+ }
+ if cfg.Command == "" {
+ cfg.Command = base.Command
+ }
+ if len(cfg.Args) == 0 {
+ cfg.Args = base.Args
+ }
+ if len(cfg.Env) == 0 {
+ cfg.Env = base.Environment
+ }
+ // Update the config in the map
+ c.LSP[name] = cfg
+ }
}
func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (largeModel SelectedModel, smallModel SelectedModel, err error) {
if len(knownProviders) == 0 && c.Providers.Len() == 0 {
err = fmt.Errorf("no providers configured, please configure at least one provider")
- return
+ return largeModel, smallModel, err
}
// Use the first provider enabled based on the known providers order
@@ -317,7 +396,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
defaultLargeModel := c.GetModel(string(p.ID), p.DefaultLargeModelID)
if defaultLargeModel == nil {
err = fmt.Errorf("default large model %s not found for provider %s", p.DefaultLargeModelID, p.ID)
- return
+ return largeModel, smallModel, err
}
largeModel = SelectedModel{
Provider: string(p.ID),
@@ -329,7 +408,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
defaultSmallModel := c.GetModel(string(p.ID), p.DefaultSmallModelID)
if defaultSmallModel == nil {
err = fmt.Errorf("default small model %s not found for provider %s", p.DefaultSmallModelID, p.ID)
- return
+ return largeModel, smallModel, err
}
smallModel = SelectedModel{
Provider: string(p.ID),
@@ -337,7 +416,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
MaxTokens: defaultSmallModel.DefaultMaxTokens,
ReasoningEffort: defaultSmallModel.DefaultReasoningEffort,
}
- return
+ return largeModel, smallModel, err
}
enabledProviders := c.EnabledProviders()
@@ -347,13 +426,13 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
if len(enabledProviders) == 0 {
err = fmt.Errorf("no providers configured, please configure at least one provider")
- return
+ return largeModel, smallModel, err
}
providerConfig := enabledProviders[0]
if len(providerConfig.Models) == 0 {
err = fmt.Errorf("provider %s has no models configured", providerConfig.ID)
- return
+ return largeModel, smallModel, err
}
defaultLargeModel := c.GetModel(providerConfig.ID, providerConfig.Models[0].ID)
largeModel = SelectedModel{
@@ -367,7 +446,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
Model: defaultSmallModel.ID,
MaxTokens: defaultSmallModel.DefaultMaxTokens,
}
- return
+ return largeModel, smallModel, err
}
func (c *Config) configureSelectedModels(knownProviders []catwalk.Provider) error {
@@ -437,6 +516,28 @@ func (c *Config) configureSelectedModels(knownProviders []catwalk.Provider) erro
return nil
}
+// lookupConfigs searches config files recursively from CWD up to FS root
+func lookupConfigs(cwd string) []string {
+ // prepend default config paths
+ configPaths := []string{
+ globalConfig(),
+ GlobalConfigData(),
+ }
+
+ configNames := []string{appName + ".json", "." + appName + ".json"}
+
+ foundConfigs, err := fsext.Lookup(cwd, configNames...)
+ if err != nil {
+ // returns at least default configs
+ return configPaths
+ }
+
+ // reverse order so last config has more priority
+ slices.Reverse(foundConfigs)
+
+ return append(configPaths, foundConfigs...)
+}
+
func loadFromConfigPaths(configPaths []string) (*Config, error) {
var configs []io.Reader
@@ -492,7 +593,6 @@ func hasAWSCredentials(env env.Env) bool {
env.Get("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
return true
}
-
return false
}
@@ -513,7 +613,7 @@ func globalConfig() string {
return filepath.Join(localAppData, appName, fmt.Sprintf("%s.json", appName))
}
- return filepath.Join(os.Getenv("HOME"), ".config", appName, fmt.Sprintf("%s.json", appName))
+ return filepath.Join(home.Dir(), ".config", appName, fmt.Sprintf("%s.json", appName))
}
// GlobalConfigData returns the path to the main data directory for the application.
@@ -535,16 +635,5 @@ func GlobalConfigData() string {
return filepath.Join(localAppData, appName, fmt.Sprintf("%s.json", appName))
}
- return filepath.Join(os.Getenv("HOME"), ".local", "share", appName, fmt.Sprintf("%s.json", appName))
-}
-
-func HomeDir() string {
- homeDir := os.Getenv("HOME")
- if homeDir == "" {
- homeDir = os.Getenv("USERPROFILE") // For Windows compatibility
- }
- if homeDir == "" {
- homeDir = os.Getenv("HOMEPATH") // Fallback for some environments
- }
- return homeDir
+ return filepath.Join(home.Dir(), ".local", "share", appName, fmt.Sprintf("%s.json", appName))
}
@@ -11,6 +11,7 @@ import (
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/env"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -39,7 +40,7 @@ func TestConfig_LoadFromReaders(t *testing.T) {
func TestConfig_setDefaults(t *testing.T) {
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
require.NotNil(t, cfg.Options)
require.NotNil(t, cfg.Options.TUI)
@@ -68,7 +69,7 @@ func TestConfig_configureProviders(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"OPENAI_API_KEY": "test-key",
})
@@ -110,7 +111,7 @@ func TestConfig_configureProvidersWithOverride(t *testing.T) {
},
},
})
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"OPENAI_API_KEY": "test-key",
@@ -153,7 +154,7 @@ func TestConfig_configureProvidersWithNewProvider(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"OPENAI_API_KEY": "test-key",
})
@@ -188,7 +189,7 @@ func TestConfig_configureProvidersBedrockWithCredentials(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"AWS_ACCESS_KEY_ID": "test-key-id",
"AWS_SECRET_ACCESS_KEY": "test-secret-key",
@@ -217,7 +218,7 @@ func TestConfig_configureProvidersBedrockWithoutCredentials(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -239,7 +240,7 @@ func TestConfig_configureProvidersBedrockWithoutUnsupportedModel(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"AWS_ACCESS_KEY_ID": "test-key-id",
"AWS_SECRET_ACCESS_KEY": "test-secret-key",
@@ -262,7 +263,7 @@ func TestConfig_configureProvidersVertexAIWithCredentials(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"VERTEXAI_PROJECT": "test-project",
"VERTEXAI_LOCATION": "us-central1",
@@ -293,7 +294,7 @@ func TestConfig_configureProvidersVertexAIWithoutCredentials(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"GOOGLE_GENAI_USE_VERTEXAI": "false",
"GOOGLE_CLOUD_PROJECT": "test-project",
@@ -319,7 +320,7 @@ func TestConfig_configureProvidersVertexAIMissingProject(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"GOOGLE_GENAI_USE_VERTEXAI": "true",
"GOOGLE_CLOUD_LOCATION": "us-central1",
@@ -344,7 +345,7 @@ func TestConfig_configureProvidersSetProviderID(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"OPENAI_API_KEY": "test-key",
})
@@ -453,6 +454,67 @@ func TestConfig_IsConfigured(t *testing.T) {
})
}
+func TestConfig_setupAgentsWithNoDisabledTools(t *testing.T) {
+ cfg := &Config{
+ Options: &Options{
+ DisabledTools: []string{},
+ },
+ }
+
+ cfg.SetupAgents()
+ coderAgent, ok := cfg.Agents["coder"]
+ require.True(t, ok)
+ assert.Equal(t, allToolNames(), coderAgent.AllowedTools)
+
+ taskAgent, ok := cfg.Agents["task"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"glob", "grep", "ls", "sourcegraph", "view"}, taskAgent.AllowedTools)
+}
+
+func TestConfig_setupAgentsWithDisabledTools(t *testing.T) {
+ cfg := &Config{
+ Options: &Options{
+ DisabledTools: []string{
+ "edit",
+ "download",
+ "grep",
+ },
+ },
+ }
+
+ cfg.SetupAgents()
+ coderAgent, ok := cfg.Agents["coder"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"agent", "bash", "multiedit", "fetch", "glob", "ls", "sourcegraph", "view", "write"}, coderAgent.AllowedTools)
+
+ taskAgent, ok := cfg.Agents["task"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"glob", "ls", "sourcegraph", "view"}, taskAgent.AllowedTools)
+}
+
+func TestConfig_setupAgentsWithEveryReadOnlyToolDisabled(t *testing.T) {
+ cfg := &Config{
+ Options: &Options{
+ DisabledTools: []string{
+ "glob",
+ "grep",
+ "ls",
+ "sourcegraph",
+ "view",
+ },
+ },
+ }
+
+ cfg.SetupAgents()
+ coderAgent, ok := cfg.Agents["coder"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"agent", "bash", "download", "edit", "multiedit", "fetch", "write"}, coderAgent.AllowedTools)
+
+ taskAgent, ok := cfg.Agents["task"]
+ require.True(t, ok)
+ assert.Equal(t, []string{}, taskAgent.AllowedTools)
+}
+
func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
knownProviders := []catwalk.Provider{
{
@@ -472,7 +534,7 @@ func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"OPENAI_API_KEY": "test-key",
@@ -481,10 +543,10 @@ func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
err := cfg.configureProviders(env, resolver, knownProviders)
require.NoError(t, err)
- // Provider should be removed from config when disabled
- require.Equal(t, cfg.Providers.Len(), 0)
- _, exists := cfg.Providers.Get("openai")
- require.False(t, exists)
+ require.Equal(t, cfg.Providers.Len(), 1)
+ prov, exists := cfg.Providers.Get("openai")
+ require.True(t, exists)
+ require.True(t, prov.Disable)
}
func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
@@ -502,7 +564,7 @@ func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -525,7 +587,7 @@ func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -547,7 +609,7 @@ func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -572,7 +634,7 @@ func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -597,7 +659,7 @@ func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -625,7 +687,7 @@ func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -655,7 +717,7 @@ func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -688,7 +750,7 @@ func TestConfig_configureProvidersEnhancedCredentialValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"GOOGLE_GENAI_USE_VERTEXAI": "false",
@@ -721,7 +783,7 @@ func TestConfig_configureProvidersEnhancedCredentialValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -752,7 +814,7 @@ func TestConfig_configureProvidersEnhancedCredentialValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
@@ -783,7 +845,7 @@ func TestConfig_configureProvidersEnhancedCredentialValidation(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{
"OPENAI_API_KEY": "test-key",
@@ -820,7 +882,7 @@ func TestConfig_defaultModelSelection(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -856,7 +918,7 @@ func TestConfig_defaultModelSelection(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -886,7 +948,7 @@ func TestConfig_defaultModelSelection(t *testing.T) {
}
cfg := &Config{}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -929,7 +991,7 @@ func TestConfig_defaultModelSelection(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -973,7 +1035,7 @@ func TestConfig_defaultModelSelection(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -1015,7 +1077,7 @@ func TestConfig_defaultModelSelection(t *testing.T) {
},
}),
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -1063,7 +1125,7 @@ func TestConfig_configureSelectedModels(t *testing.T) {
},
},
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -1125,7 +1187,7 @@ func TestConfig_configureSelectedModels(t *testing.T) {
},
},
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -1170,7 +1232,7 @@ func TestConfig_configureSelectedModels(t *testing.T) {
},
},
}
- cfg.setDefaults("/tmp")
+ cfg.setDefaults("/tmp", "")
env := env.NewFromMap(map[string]string{})
resolver := NewEnvironmentVariableResolver(env)
err := cfg.configureProviders(env, resolver, knownProviders)
@@ -0,0 +1,35 @@
+package config
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestApplyLSPDefaults(t *testing.T) {
+ t.Parallel()
+
+ // Create a config with an LSP that should get defaults
+ config := &Config{
+ LSP: map[string]LSPConfig{
+ "gopls": {
+ Command: "gopls", // This should get defaults from powernap
+ },
+ "custom": {
+ Command: "custom-lsp",
+ RootMarkers: []string{"custom.toml"}, // This should keep its explicit config
+ },
+ },
+ }
+
+ // Apply defaults
+ config.applyLSPDefaults()
+
+ // Check that gopls got defaults (it should have some root markers now)
+ goplsConfig := config.LSP["gopls"]
+ require.NotEmpty(t, goplsConfig.RootMarkers, "gopls should have received default root markers")
+
+ // Check that custom LSP kept its explicit config
+ customConfig := config.LSP["custom"]
+ require.Equal(t, []string{"custom.toml"}, customConfig.RootMarkers, "custom LSP should keep its explicit root markers")
+}
@@ -15,7 +15,7 @@ func TestMerge(t *testing.T) {
t.Fatalf("expected no error, got %v", err)
}
- expected := `{"baz":"qux","foo":"bar"}`
+ expected := `{"foo":"bar","baz":"qux"}`
got, err := io.ReadAll(merged)
if err != nil {
t.Fatalf("expected no error reading merged data, got %v", err)
@@ -8,10 +8,13 @@ import (
"os"
"path/filepath"
"runtime"
+ "strings"
"sync"
"time"
"github.com/charmbracelet/catwalk/pkg/catwalk"
+ "github.com/charmbracelet/catwalk/pkg/embedded"
+ "github.com/charmbracelet/crush/internal/home"
)
type ProviderClient interface {
@@ -21,6 +24,7 @@ type ProviderClient interface {
var (
providerOnce sync.Once
providerList []catwalk.Provider
+ providerErr error
)
// file to cache provider data
@@ -41,7 +45,7 @@ func providerCacheFileData() string {
return filepath.Join(localAppData, appName, "providers.json")
}
- return filepath.Join(os.Getenv("HOME"), ".local", "share", appName, "providers.json")
+ return filepath.Join(home.Dir(), ".local", "share", appName, "providers.json")
}
func saveProvidersInCache(path string, providers []catwalk.Provider) error {
@@ -74,54 +78,128 @@ func loadProvidersFromCache(path string) ([]catwalk.Provider, error) {
return providers, nil
}
-func Providers() ([]catwalk.Provider, error) {
- catwalkURL := cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL)
- client := catwalk.NewWithURL(catwalkURL)
- path := providerCacheFileData()
- return loadProvidersOnce(client, path)
+func UpdateProviders(pathOrUrl string) error {
+ var providers []catwalk.Provider
+ pathOrUrl = cmp.Or(pathOrUrl, os.Getenv("CATWALK_URL"), defaultCatwalkURL)
+
+ switch {
+ case pathOrUrl == "embedded":
+ providers = embedded.GetAll()
+ case strings.HasPrefix(pathOrUrl, "http://") || strings.HasPrefix(pathOrUrl, "https://"):
+ var err error
+ providers, err = catwalk.NewWithURL(pathOrUrl).GetProviders()
+ if err != nil {
+ return fmt.Errorf("failed to fetch providers from Catwalk: %w", err)
+ }
+ default:
+ content, err := os.ReadFile(pathOrUrl)
+ if err != nil {
+ return fmt.Errorf("failed to read file: %w", err)
+ }
+ if err := json.Unmarshal(content, &providers); err != nil {
+ return fmt.Errorf("failed to unmarshal provider data: %w", err)
+ }
+ if len(providers) == 0 {
+ return fmt.Errorf("no providers found in the provided source")
+ }
+ }
+
+ cachePath := providerCacheFileData()
+ if err := saveProvidersInCache(cachePath, providers); err != nil {
+ return fmt.Errorf("failed to save providers to cache: %w", err)
+ }
+
+ slog.Info("Providers updated successfully", "count", len(providers), "from", pathOrUrl, "to", cachePath)
+ return nil
}
-func loadProvidersOnce(client ProviderClient, path string) ([]catwalk.Provider, error) {
- var err error
+func Providers(cfg *Config) ([]catwalk.Provider, error) {
providerOnce.Do(func() {
- providerList, err = loadProviders(client, path)
+ catwalkURL := cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL)
+ client := catwalk.NewWithURL(catwalkURL)
+ path := providerCacheFileData()
+
+ autoUpdateDisabled := cfg.Options.DisableProviderAutoUpdate
+ providerList, providerErr = loadProviders(autoUpdateDisabled, client, path)
})
- if err != nil {
- return nil, err
- }
- return providerList, nil
+ return providerList, providerErr
}
-func loadProviders(client ProviderClient, path string) (providerList []catwalk.Provider, err error) {
- // if cache is not stale, load from it
- stale, exists := isCacheStale(path)
- if !stale {
- slog.Info("Using cached provider data", "path", path)
- providerList, err = loadProvidersFromCache(path)
- if len(providerList) > 0 && err == nil {
- go func() {
- slog.Info("Updating provider cache in background")
- updated, uerr := client.GetProviders()
- if len(updated) > 0 && uerr == nil {
- _ = saveProvidersInCache(path, updated)
- }
- }()
- return
+func loadProviders(autoUpdateDisabled bool, client ProviderClient, path string) ([]catwalk.Provider, error) {
+ cacheIsStale, cacheExists := isCacheStale(path)
+
+ catwalkGetAndSave := func() ([]catwalk.Provider, error) {
+ providers, err := client.GetProviders()
+ if err != nil {
+ return nil, fmt.Errorf("failed to fetch providers from catwalk: %w", err)
}
+ if len(providers) == 0 {
+ return nil, fmt.Errorf("empty providers list from catwalk")
+ }
+ if err := saveProvidersInCache(path, providers); err != nil {
+ return nil, err
+ }
+ return providers, nil
}
- slog.Info("Getting live provider data")
- providerList, err = client.GetProviders()
- if len(providerList) > 0 && err == nil {
- err = saveProvidersInCache(path, providerList)
- return
+ backgroundCacheUpdate := func() {
+ go func() {
+ slog.Info("Updating providers cache in background", "path", path)
+
+ providers, err := client.GetProviders()
+ if err != nil {
+ slog.Error("Failed to fetch providers in background from Catwalk", "error", err)
+ return
+ }
+ if len(providers) == 0 {
+ slog.Error("Empty providers list from Catwalk")
+ return
+ }
+ if err := saveProvidersInCache(path, providers); err != nil {
+ slog.Error("Failed to update providers.json in background", "error", err)
+ }
+ }()
}
- if !exists {
- err = fmt.Errorf("failed to load providers")
- return
+
+ switch {
+ case autoUpdateDisabled:
+ slog.Warn("Providers auto-update is disabled")
+
+ if cacheExists {
+ slog.Warn("Using locally cached providers")
+ return loadProvidersFromCache(path)
+ }
+
+ slog.Warn("Saving embedded providers to cache")
+ providers := embedded.GetAll()
+ if err := saveProvidersInCache(path, providers); err != nil {
+ return nil, err
+ }
+ return providers, nil
+
+ case cacheExists && !cacheIsStale:
+ slog.Info("Recent providers cache is available.", "path", path)
+
+ providers, err := loadProvidersFromCache(path)
+ if err != nil {
+ return nil, err
+ }
+ if len(providers) == 0 {
+ return catwalkGetAndSave()
+ }
+ backgroundCacheUpdate()
+ return providers, nil
+
+ default:
+ slog.Info("Cache is not available or is stale. Fetching providers from Catwalk.", "path", path)
+
+ providers, err := catwalkGetAndSave()
+ if err != nil {
+ catwalkUrl := fmt.Sprintf("%s/providers", cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL))
+ return nil, fmt.Errorf("Crush was unable to fetch an updated list of providers from %s. Consider setting CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1 to use the embedded providers bundled at the time of this Crush release. You can also update providers manually. For more info see crush update-providers --help. %w", catwalkUrl, err) //nolint:staticcheck
+ }
+ return providers, nil
}
- providerList, err = loadProvidersFromCache(path)
- return
}
func isCacheStale(path string) (stale, exists bool) {
@@ -19,8 +19,8 @@ func TestProvider_loadProvidersEmptyResult(t *testing.T) {
client := &emptyProviderClient{}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
- require.EqualError(t, err, "failed to load providers")
+ providers, err := loadProviders(false, client, tmpPath)
+ require.Contains(t, err.Error(), "Crush was unable to fetch an updated list of providers")
require.Empty(t, providers)
require.Len(t, providers, 0)
@@ -39,7 +39,7 @@ func TestProvider_loadProvidersEmptyCache(t *testing.T) {
require.NoError(t, os.WriteFile(tmpPath, data, 0o644))
// Should refresh and get real providers instead of using empty cache
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
@@ -28,7 +28,7 @@ func (m *mockProviderClient) GetProviders() ([]catwalk.Provider, error) {
func TestProvider_loadProvidersNoIssues(t *testing.T) {
client := &mockProviderClient{shouldFail: false}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
@@ -57,7 +57,7 @@ func TestProvider_loadProvidersWithIssues(t *testing.T) {
if err != nil {
t.Fatalf("Failed to write old providers to file: %v", err)
}
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
@@ -67,7 +67,7 @@ func TestProvider_loadProvidersWithIssues(t *testing.T) {
func TestProvider_loadProvidersWithIssuesAndNoCache(t *testing.T) {
client := &mockProviderClient{shouldFail: true}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.Error(t, err)
require.Nil(t, providers, "Expected nil providers when loading fails and no cache exists")
}
@@ -56,6 +56,18 @@ func (m *Map[K, V]) Len() int {
return len(m.inner)
}
+// GetOrSet gets and returns the key if it exists, otherwise, it executes the
+// given function, set its return value for the given key, and returns it.
+func (m *Map[K, V]) GetOrSet(key K, fn func() V) V {
+ got, ok := m.Get(key)
+ if ok {
+ return got
+ }
+ value := fn()
+ m.Set(key, value)
+ return value
+}
+
// Take gets an item and then deletes it.
func (m *Map[K, V]) Take(key K) (V, bool) {
m.mu.Lock()
@@ -54,6 +54,16 @@ func TestMap_Set(t *testing.T) {
require.Equal(t, 1, m.Len())
}
+func TestMap_GetOrSet(t *testing.T) {
+ t.Parallel()
+
+ m := NewMap[string, int]()
+
+ require.Equal(t, 42, m.GetOrSet("key1", func() int { return 42 }))
+ require.Equal(t, 42, m.GetOrSet("key1", func() int { return 99999 }))
+ require.Equal(t, 1, m.Len())
+}
+
func TestMap_Get(t *testing.T) {
t.Parallel()
@@ -16,11 +16,9 @@ type LazySlice[K any] struct {
// to populate it.
func NewLazySlice[K any](load func() []K) *LazySlice[K] {
s := &LazySlice[K]{}
- s.wg.Add(1)
- go func() {
+ s.wg.Go(func() {
s.inner = load()
- s.wg.Done()
- }()
+ })
return s
}
@@ -5,6 +5,7 @@ import (
"sync"
"sync/atomic"
"testing"
+ "testing/synctest"
"time"
"github.com/stretchr/testify/require"
@@ -13,79 +14,66 @@ import (
func TestLazySlice_Seq(t *testing.T) {
t.Parallel()
- data := []string{"a", "b", "c"}
- s := NewLazySlice(func() []string {
- // TODO: use synctest when new Go is out.
- time.Sleep(10 * time.Millisecond) // Small delay to ensure loading happens
- return data
+ synctest.Test(t, func(t *testing.T) {
+ t.Helper()
+ data := []string{"a", "b", "c"}
+ s := NewLazySlice(func() []string {
+ time.Sleep(10 * time.Millisecond) // Small delay to ensure loading happens
+ return data
+ })
+ require.Equal(t, data, slices.Collect(s.Seq()))
})
-
- var result []string
- for v := range s.Seq() {
- result = append(result, v)
- }
-
- require.Equal(t, data, result)
}
func TestLazySlice_SeqWaitsForLoading(t *testing.T) {
t.Parallel()
+ synctest.Test(t, func(t *testing.T) {
+ t.Helper()
- var loaded atomic.Bool
- data := []string{"x", "y", "z"}
-
- s := NewLazySlice(func() []string {
- // TODO: use synctest when new Go is out.
- time.Sleep(100 * time.Millisecond)
- loaded.Store(true)
- return data
- })
+ var loaded atomic.Bool
+ data := []string{"x", "y", "z"}
- require.False(t, loaded.Load(), "should not be loaded immediately")
+ s := NewLazySlice(func() []string {
+ time.Sleep(100 * time.Millisecond)
+ loaded.Store(true)
+ return data
+ })
- var result []string
- for v := range s.Seq() {
- result = append(result, v)
- }
-
- require.True(t, loaded.Load(), "should be loaded after Seq")
- require.Equal(t, data, result)
+ require.False(t, loaded.Load(), "should not be loaded immediately")
+ require.Equal(t, data, slices.Collect(s.Seq()))
+ require.True(t, loaded.Load(), "should be loaded after Seq")
+ })
}
func TestLazySlice_EmptySlice(t *testing.T) {
t.Parallel()
-
s := NewLazySlice(func() []string {
return []string{}
})
-
- var result []string
- for v := range s.Seq() {
- result = append(result, v)
- }
-
- require.Empty(t, result)
+ require.Empty(t, slices.Collect(s.Seq()))
}
func TestLazySlice_EarlyBreak(t *testing.T) {
t.Parallel()
- data := []string{"a", "b", "c", "d", "e"}
- s := NewLazySlice(func() []string {
- // TODO: use synctest when new Go is out.
- time.Sleep(10 * time.Millisecond) // Small delay to ensure loading happens
- return data
- })
+ synctest.Test(t, func(t *testing.T) {
+ t.Helper()
+ data := []string{"a", "b", "c", "d", "e"}
+ s := NewLazySlice(func() []string {
+ time.Sleep(10 * time.Millisecond) // Small delay to ensure loading happens
+ return data
+ })
- var result []string
- for v := range s.Seq() {
- result = append(result, v)
- if len(result) == 2 {
- break
+ var result []string
+ for v := range s.Seq() {
+ result = append(result, v)
+ if len(result) == 2 {
+ break
+ }
}
- }
- require.Equal(t, []string{"a", "b"}, result)
+ require.Equal(t, []string{"a", "b"}, result)
+ })
}
func TestSlice(t *testing.T) {
@@ -0,0 +1,51 @@
+package csync
+
+import (
+ "iter"
+ "sync/atomic"
+)
+
+// NewVersionedMap creates a new versioned, thread-safe map.
+func NewVersionedMap[K comparable, V any]() *VersionedMap[K, V] {
+ return &VersionedMap[K, V]{
+ m: NewMap[K, V](),
+ }
+}
+
+// VersionedMap is a thread-safe map that keeps track of its version.
+type VersionedMap[K comparable, V any] struct {
+ m *Map[K, V]
+ v atomic.Uint64
+}
+
+// Get gets the value for the specified key from the map.
+func (m *VersionedMap[K, V]) Get(key K) (V, bool) {
+ return m.m.Get(key)
+}
+
+// Set sets the value for the specified key in the map and increments the version.
+func (m *VersionedMap[K, V]) Set(key K, value V) {
+ m.m.Set(key, value)
+ m.v.Add(1)
+}
+
+// Del deletes the specified key from the map and increments the version.
+func (m *VersionedMap[K, V]) Del(key K) {
+ m.m.Del(key)
+ m.v.Add(1)
+}
+
+// Seq2 returns an iter.Seq2 that yields key-value pairs from the map.
+func (m *VersionedMap[K, V]) Seq2() iter.Seq2[K, V] {
+ return m.m.Seq2()
+}
+
+// Len returns the number of items in the map.
+func (m *VersionedMap[K, V]) Len() int {
+ return m.m.Len()
+}
+
+// Version returns the current version of the map.
+func (m *VersionedMap[K, V]) Version() uint64 {
+ return m.v.Load()
+}
@@ -0,0 +1,89 @@
+package csync
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestVersionedMap_Set(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ require.Equal(t, uint64(0), vm.Version())
+
+ vm.Set("key1", 42)
+ require.Equal(t, uint64(1), vm.Version())
+
+ value, ok := vm.Get("key1")
+ require.True(t, ok)
+ require.Equal(t, 42, value)
+}
+
+func TestVersionedMap_Del(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ vm.Set("key1", 42)
+ initialVersion := vm.Version()
+
+ vm.Del("key1")
+ require.Equal(t, initialVersion+1, vm.Version())
+
+ _, ok := vm.Get("key1")
+ require.False(t, ok)
+}
+
+func TestVersionedMap_VersionIncrement(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ initialVersion := vm.Version()
+
+ // Setting a value should increment the version
+ vm.Set("key1", 42)
+ require.Equal(t, initialVersion+1, vm.Version())
+
+ // Deleting a value should increment the version
+ vm.Del("key1")
+ require.Equal(t, initialVersion+2, vm.Version())
+
+ // Deleting a non-existent key should still increment the version
+ vm.Del("nonexistent")
+ require.Equal(t, initialVersion+3, vm.Version())
+}
+
+func TestVersionedMap_ConcurrentAccess(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[int, int]()
+ const numGoroutines = 100
+ const numOperations = 100
+
+ // Initial version
+ initialVersion := vm.Version()
+
+ // Perform concurrent Set and Del operations
+ for i := range numGoroutines {
+ go func(id int) {
+ for j := range numOperations {
+ key := id*numOperations + j
+ vm.Set(key, key*2)
+ vm.Del(key)
+ }
+ }(i)
+ }
+
+ // Wait for operations to complete by checking the version
+ // This is a simplified check - in a real test you might want to use sync.WaitGroup
+ expectedMinVersion := initialVersion + uint64(numGoroutines*numOperations*2)
+
+ // Allow some time for operations to complete
+ for vm.Version() < expectedMinVersion {
+ // Busy wait - in a real test you'd use proper synchronization
+ }
+
+ // Final version should be at least the expected minimum
+ require.GreaterOrEqual(t, vm.Version(), expectedMinVersion)
+ require.Equal(t, 0, vm.Len())
+}
@@ -5,10 +5,10 @@ import (
"database/sql"
"fmt"
"log/slog"
- "os"
"path/filepath"
- _ "github.com/ncruces/go-sqlite3/driver"
+ "github.com/ncruces/go-sqlite3"
+ "github.com/ncruces/go-sqlite3/driver"
_ "github.com/ncruces/go-sqlite3/embed"
"github.com/pressly/goose/v3"
@@ -18,21 +18,7 @@ func Connect(ctx context.Context, dataDir string) (*sql.DB, error) {
if dataDir == "" {
return nil, fmt.Errorf("data.dir is not set")
}
- if err := os.MkdirAll(dataDir, 0o700); err != nil {
- return nil, fmt.Errorf("failed to create data directory: %w", err)
- }
dbPath := filepath.Join(dataDir, "crush.db")
- // Open the SQLite database
- db, err := sql.Open("sqlite3", dbPath)
- if err != nil {
- return nil, fmt.Errorf("failed to open database: %w", err)
- }
-
- // Verify connection
- if err = db.PingContext(ctx); err != nil {
- db.Close()
- return nil, fmt.Errorf("failed to connect to database: %w", err)
- }
// Set pragmas for better performance
pragmas := []string{
@@ -41,14 +27,25 @@ func Connect(ctx context.Context, dataDir string) (*sql.DB, error) {
"PRAGMA page_size = 4096;",
"PRAGMA cache_size = -8000;",
"PRAGMA synchronous = NORMAL;",
+ "PRAGMA secure_delete = ON;",
}
- for _, pragma := range pragmas {
- if _, err = db.ExecContext(ctx, pragma); err != nil {
- slog.Error("Failed to set pragma", pragma, err)
- } else {
- slog.Debug("Set pragma", "pragma", pragma)
+ db, err := driver.Open(dbPath, func(c *sqlite3.Conn) error {
+ for _, pragma := range pragmas {
+ if err := c.Exec(pragma); err != nil {
+ return fmt.Errorf("failed to set pragma `%s`: %w", pragma, err)
+ }
}
+ return nil
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to open database: %w", err)
+ }
+
+ // Verify connection
+ if err = db.PingContext(ctx); err != nil {
+ db.Close()
+ return nil, fmt.Errorf("failed to connect to database: %w", err)
}
goose.SetBaseFS(FS)
@@ -62,5 +59,6 @@ func Connect(ctx context.Context, dataDir string) (*sql.DB, error) {
slog.Error("Failed to apply migrations", "error", err)
return nil, fmt.Errorf("failed to apply migrations: %w", err)
}
+
return db, nil
}
@@ -0,0 +1,59 @@
+package event
+
+import (
+ "time"
+)
+
+var appStartTime time.Time
+
+func AppInitialized() {
+ appStartTime = time.Now()
+ send("app initialized")
+}
+
+func AppExited() {
+ duration := time.Since(appStartTime).Truncate(time.Second)
+ send(
+ "app exited",
+ "app duration pretty", duration.String(),
+ "app duration in seconds", int64(duration.Seconds()),
+ )
+ Flush()
+}
+
+func SessionCreated() {
+ send("session created")
+}
+
+func SessionDeleted() {
+ send("session deleted")
+}
+
+func SessionSwitched() {
+ send("session switched")
+}
+
+func FilePickerOpened() {
+ send("filepicker opened")
+}
+
+func PromptSent(props ...any) {
+ send(
+ "prompt sent",
+ props...,
+ )
+}
+
+func PromptResponded(props ...any) {
+ send(
+ "prompt responded",
+ props...,
+ )
+}
+
+func TokensUsed(props ...any) {
+ send(
+ "tokens used",
+ props...,
+ )
+}
@@ -0,0 +1,107 @@
+package event
+
+import (
+ "fmt"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+
+ "github.com/charmbracelet/crush/internal/version"
+ "github.com/posthog/posthog-go"
+)
+
+const (
+ endpoint = "https://data.charm.land"
+ key = "phc_4zt4VgDWLqbYnJYEwLRxFoaTL2noNrQij0C6E8k3I0V"
+)
+
+var (
+ client posthog.Client
+
+ baseProps = posthog.NewProperties().
+ Set("GOOS", runtime.GOOS).
+ Set("GOARCH", runtime.GOARCH).
+ Set("TERM", os.Getenv("TERM")).
+ Set("SHELL", filepath.Base(os.Getenv("SHELL"))).
+ Set("Version", version.Version).
+ Set("GoVersion", runtime.Version())
+)
+
+func Init() {
+ c, err := posthog.NewWithConfig(key, posthog.Config{
+ Endpoint: endpoint,
+ Logger: logger{},
+ })
+ if err != nil {
+ slog.Error("Failed to initialize PostHog client", "error", err)
+ }
+ client = c
+ distinctId = getDistinctId()
+}
+
+// send logs an event to PostHog with the given event name and properties.
+func send(event string, props ...any) {
+ if client == nil {
+ return
+ }
+ err := client.Enqueue(posthog.Capture{
+ DistinctId: distinctId,
+ Event: event,
+ Properties: pairsToProps(props...).Merge(baseProps),
+ })
+ if err != nil {
+ slog.Error("Failed to enqueue PostHog event", "event", event, "props", props, "error", err)
+ return
+ }
+}
+
+// Error logs an error event to PostHog with the error type and message.
+func Error(err any, props ...any) {
+ if client == nil {
+ return
+ }
+ // The PostHog Go client does not yet support sending exceptions.
+ // We're mimicking the behavior by sending the minimal info required
+ // for PostHog to recognize this as an exception event.
+ props = append(
+ []any{
+ "$exception_list",
+ []map[string]string{
+ {"type": reflect.TypeOf(err).String(), "value": fmt.Sprintf("%v", err)},
+ },
+ },
+ props...,
+ )
+ send("$exception", props...)
+}
+
+func Flush() {
+ if client == nil {
+ return
+ }
+ if err := client.Close(); err != nil {
+ slog.Error("Failed to flush PostHog events", "error", err)
+ }
+}
+
+func pairsToProps(props ...any) posthog.Properties {
+ p := posthog.NewProperties()
+
+ if !isEven(len(props)) {
+ slog.Error("Event properties must be provided as key-value pairs", "props", props)
+ return p
+ }
+
+ for i := 0; i < len(props); i += 2 {
+ key := props[i].(string)
+ value := props[i+1]
+ p = p.Set(key, value)
+ }
+ return p
+}
+
+func isEven(n int) bool {
+ return n%2 == 0
+}
@@ -0,0 +1,49 @@
+package event
+
+import (
+ "crypto/hmac"
+ "crypto/sha256"
+ "encoding/hex"
+ "fmt"
+ "net"
+
+ "github.com/denisbrodbeck/machineid"
+)
+
+var distinctId string
+
+const (
+ hashKey = "charm"
+ fallbackId = "unknown"
+)
+
+func getDistinctId() string {
+ if id, err := machineid.ProtectedID(hashKey); err == nil {
+ return id
+ }
+ if macAddr, err := getMacAddr(); err == nil {
+ return hashString(macAddr)
+ }
+ return fallbackId
+}
+
+func getMacAddr() (string, error) {
+ interfaces, err := net.Interfaces()
+ if err != nil {
+ return "", err
+ }
+ for _, iface := range interfaces {
+ if iface.Flags&net.FlagUp != 0 && iface.Flags&net.FlagLoopback == 0 && len(iface.HardwareAddr) > 0 {
+ if addrs, err := iface.Addrs(); err == nil && len(addrs) > 0 {
+ return iface.HardwareAddr.String(), nil
+ }
+ }
+ }
+ return "", fmt.Errorf("no active interface with mac address found")
+}
+
+func hashString(str string) string {
+ hash := hmac.New(sha256.New, []byte(str))
+ hash.Write([]byte(hashKey))
+ return hex.EncodeToString(hash.Sum(nil))
+}
@@ -0,0 +1,28 @@
+package event
+
+import (
+ "fmt"
+ "log/slog"
+
+ "github.com/posthog/posthog-go"
+)
+
+var _ posthog.Logger = logger{}
+
+type logger struct{}
+
+func (logger) Debugf(format string, args ...any) {
+ slog.Debug(fmt.Sprintf(format, args...))
+}
+
+func (logger) Logf(format string, args ...any) {
+ slog.Info(fmt.Sprintf(format, args...))
+}
+
+func (logger) Warnf(format string, args ...any) {
+ slog.Warn(fmt.Sprintf(format, args...))
+}
+
+func (logger) Errorf(format string, args ...any) {
+ slog.Error(fmt.Sprintf(format, args...))
+}
@@ -10,8 +10,7 @@ import (
"github.com/bmatcuk/doublestar/v4"
"github.com/charlievieth/fastwalk"
-
- ignore "github.com/sabhiram/go-gitignore"
+ "github.com/charmbracelet/crush/internal/home"
)
type FileInfo struct {
@@ -57,63 +56,29 @@ func SkipHidden(path string) bool {
}
// FastGlobWalker provides gitignore-aware file walking with fastwalk
+// It uses hierarchical ignore checking like git does, checking .gitignore/.crushignore
+// files in each directory from the root to the target path.
type FastGlobWalker struct {
- gitignore *ignore.GitIgnore
- crushignore *ignore.GitIgnore
- rootPath string
+ directoryLister *directoryLister
}
func NewFastGlobWalker(searchPath string) *FastGlobWalker {
- walker := &FastGlobWalker{
- rootPath: searchPath,
- }
-
- // Load gitignore if it exists
- gitignorePath := filepath.Join(searchPath, ".gitignore")
- if _, err := os.Stat(gitignorePath); err == nil {
- if gi, err := ignore.CompileIgnoreFile(gitignorePath); err == nil {
- walker.gitignore = gi
- }
- }
-
- // Load crushignore if it exists
- crushignorePath := filepath.Join(searchPath, ".crushignore")
- if _, err := os.Stat(crushignorePath); err == nil {
- if ci, err := ignore.CompileIgnoreFile(crushignorePath); err == nil {
- walker.crushignore = ci
- }
+ return &FastGlobWalker{
+ directoryLister: NewDirectoryLister(searchPath),
}
-
- return walker
}
-// ShouldSkip checks if a path should be skipped based on gitignore, crushignore, and hidden file rules
+// ShouldSkip checks if a path should be skipped based on hierarchical gitignore,
+// crushignore, and hidden file rules
func (w *FastGlobWalker) ShouldSkip(path string) bool {
- if SkipHidden(path) {
- return true
- }
-
- relPath, err := filepath.Rel(w.rootPath, path)
- if err != nil {
- return false
- }
-
- if w.gitignore != nil {
- if w.gitignore.MatchesPath(relPath) {
- return true
- }
- }
-
- if w.crushignore != nil {
- if w.crushignore.MatchesPath(relPath) {
- return true
- }
- }
-
- return false
+ return w.directoryLister.shouldIgnore(path, nil)
}
func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool, error) {
+ // Normalize pattern to forward slashes on Windows so their config can use
+ // backslashes
+ pattern = filepath.ToSlash(pattern)
+
walker := NewFastGlobWalker(searchPath)
var matches []FileInfo
conf := fastwalk.Config{
@@ -131,19 +96,21 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
if walker.ShouldSkip(path) {
return filepath.SkipDir
}
- return nil
}
if walker.ShouldSkip(path) {
return nil
}
- // Check if path matches the pattern
relPath, err := filepath.Rel(searchPath, path)
if err != nil {
relPath = path
}
+ // Normalize separators to forward slashes
+ relPath = filepath.ToSlash(relPath)
+
+ // Check if path matches the pattern
matched, err := doublestar.Match(pattern, relPath)
if err != nil || !matched {
return nil
@@ -181,13 +148,45 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
return results, truncated, nil
}
-func PrettyPath(path string) string {
- // replace home directory with ~
- homeDir, err := os.UserHomeDir()
- if err == nil {
- path = strings.ReplaceAll(path, homeDir, "~")
+// ShouldExcludeFile checks if a file should be excluded from processing
+// based on common patterns and ignore rules
+func ShouldExcludeFile(rootPath, filePath string) bool {
+ return NewDirectoryLister(rootPath).
+ shouldIgnore(filePath, nil)
+}
+
+// WalkDirectories walks a directory tree and calls the provided function for each directory,
+// respecting hierarchical .gitignore/.crushignore files like git does.
+func WalkDirectories(rootPath string, fn func(path string, d os.DirEntry, err error) error) error {
+ dl := NewDirectoryLister(rootPath)
+
+ conf := fastwalk.Config{
+ Follow: true,
+ ToSlash: fastwalk.DefaultToSlash(),
+ Sort: fastwalk.SortDirsFirst,
}
- return path
+
+ return fastwalk.Walk(&conf, rootPath, func(path string, d os.DirEntry, err error) error {
+ if err != nil {
+ return fn(path, d, err)
+ }
+
+ // Only process directories
+ if !d.IsDir() {
+ return nil
+ }
+
+ // Check if directory should be ignored
+ if dl.shouldIgnore(path, nil) {
+ return filepath.SkipDir
+ }
+
+ return fn(path, d, err)
+ })
+}
+
+func PrettyPath(path string) string {
+ return home.Short(path)
}
func DirTrim(pwd string, lim int) string {
@@ -233,3 +232,19 @@ func HasPrefix(path, prefix string) bool {
// If path is within prefix, Rel will not return a path starting with ".."
return !strings.HasPrefix(rel, "..")
}
+
+// ToUnixLineEndings converts Windows line endings (CRLF) to Unix line endings (LF).
+func ToUnixLineEndings(content string) (string, bool) {
+ if strings.Contains(content, "\r\n") {
+ return strings.ReplaceAll(content, "\r\n", "\n"), true
+ }
+ return content, false
+}
+
+// ToWindowsLineEndings converts Unix line endings (LF) to Windows line endings (CRLF).
+func ToWindowsLineEndings(content string) (string, bool) {
+ if !strings.Contains(content, "\r\n") {
+ return strings.ReplaceAll(content, "\n", "\r\n"), true
+ }
+ return content, false
+}
@@ -0,0 +1,273 @@
+package fsext
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+ "testing/synctest"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestGlobWithDoubleStar(t *testing.T) {
+ t.Run("finds files matching pattern", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ mainGo := filepath.Join(testDir, "src", "main.go")
+ utilsGo := filepath.Join(testDir, "src", "utils.go")
+ helperGo := filepath.Join(testDir, "pkg", "helper.go")
+ readmeMd := filepath.Join(testDir, "README.md")
+
+ for _, file := range []string{mainGo, utilsGo, helperGo, readmeMd} {
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test content"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/main.go", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{mainGo})
+ })
+
+ t.Run("finds directories matching pattern", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ srcDir := filepath.Join(testDir, "src")
+ pkgDir := filepath.Join(testDir, "pkg")
+ internalDir := filepath.Join(testDir, "internal")
+ cmdDir := filepath.Join(testDir, "cmd")
+ pkgFile := filepath.Join(testDir, "pkg.txt")
+
+ for _, dir := range []string{srcDir, pkgDir, internalDir, cmdDir} {
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ }
+
+ require.NoError(t, os.WriteFile(filepath.Join(srcDir, "main.go"), []byte("package main"), 0o644))
+ require.NoError(t, os.WriteFile(pkgFile, []byte("test"), 0o644))
+
+ matches, truncated, err := GlobWithDoubleStar("pkg", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{pkgDir})
+ })
+
+ t.Run("finds nested directories with wildcard patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ srcPkgDir := filepath.Join(testDir, "src", "pkg")
+ libPkgDir := filepath.Join(testDir, "lib", "pkg")
+ mainPkgDir := filepath.Join(testDir, "pkg")
+ otherDir := filepath.Join(testDir, "other")
+
+ for _, dir := range []string{srcPkgDir, libPkgDir, mainPkgDir, otherDir} {
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/pkg", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ var relativeMatches []string
+ for _, match := range matches {
+ rel, err := filepath.Rel(testDir, match)
+ require.NoError(t, err)
+ relativeMatches = append(relativeMatches, filepath.ToSlash(rel))
+ }
+
+ require.ElementsMatch(t, relativeMatches, []string{"pkg", "src/pkg", "lib/pkg"})
+ })
+
+ t.Run("finds directory contents with recursive patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ pkgDir := filepath.Join(testDir, "pkg")
+ pkgFile1 := filepath.Join(pkgDir, "main.go")
+ pkgFile2 := filepath.Join(pkgDir, "utils.go")
+ pkgSubdir := filepath.Join(pkgDir, "internal")
+ pkgSubfile := filepath.Join(pkgSubdir, "helper.go")
+
+ require.NoError(t, os.MkdirAll(pkgSubdir, 0o755))
+
+ for _, file := range []string{pkgFile1, pkgFile2, pkgSubfile} {
+ require.NoError(t, os.WriteFile(file, []byte("package main"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("pkg/**", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ var relativeMatches []string
+ for _, match := range matches {
+ rel, err := filepath.Rel(testDir, match)
+ require.NoError(t, err)
+ relativeMatches = append(relativeMatches, filepath.ToSlash(rel))
+ }
+
+ require.ElementsMatch(t, relativeMatches, []string{
+ "pkg",
+ "pkg/main.go",
+ "pkg/utils.go",
+ "pkg/internal",
+ "pkg/internal/helper.go",
+ })
+ })
+
+ t.Run("respects limit parameter", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ for i := range 10 {
+ file := filepath.Join(testDir, "file", fmt.Sprintf("test%d.txt", i))
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/*.txt", testDir, 5)
+ require.NoError(t, err)
+ require.True(t, truncated, "Expected truncation with limit")
+ require.Len(t, matches, 5, "Expected exactly 5 matches with limit")
+ })
+
+ t.Run("handles nested directory patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ file1 := filepath.Join(testDir, "a", "b", "c", "file1.txt")
+ file2 := filepath.Join(testDir, "a", "b", "file2.txt")
+ file3 := filepath.Join(testDir, "a", "file3.txt")
+ file4 := filepath.Join(testDir, "file4.txt")
+
+ for _, file := range []string{file1, file2, file3, file4} {
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("a/b/c/file1.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{file1})
+ })
+
+ t.Run("returns results sorted by modification time (newest first)", func(t *testing.T) {
+ synctest.Test(t, func(t *testing.T) {
+ testDir := t.TempDir()
+
+ file1 := filepath.Join(testDir, "file1.txt")
+ require.NoError(t, os.WriteFile(file1, []byte("first"), 0o644))
+
+ file2 := filepath.Join(testDir, "file2.txt")
+ require.NoError(t, os.WriteFile(file2, []byte("second"), 0o644))
+
+ file3 := filepath.Join(testDir, "file3.txt")
+ require.NoError(t, os.WriteFile(file3, []byte("third"), 0o644))
+
+ base := time.Now()
+ m1 := base
+ m2 := base.Add(1 * time.Millisecond)
+ m3 := base.Add(2 * time.Millisecond)
+
+ require.NoError(t, os.Chtimes(file1, m1, m1))
+ require.NoError(t, os.Chtimes(file2, m2, m2))
+ require.NoError(t, os.Chtimes(file3, m3, m3))
+
+ matches, truncated, err := GlobWithDoubleStar("*.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{file3, file2, file1})
+ })
+ })
+
+ t.Run("handles empty directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ matches, truncated, err := GlobWithDoubleStar("**", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ // Even empty directories should return the directory itself
+ require.Equal(t, matches, []string{testDir})
+ })
+
+ t.Run("handles non-existent search path", func(t *testing.T) {
+ nonExistentDir := filepath.Join(t.TempDir(), "does", "not", "exist")
+
+ matches, truncated, err := GlobWithDoubleStar("**", nonExistentDir, 0)
+ require.Error(t, err, "Should return error for non-existent search path")
+ require.False(t, truncated)
+ require.Empty(t, matches)
+ })
+
+ t.Run("respects basic ignore patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ rootIgnore := filepath.Join(testDir, ".crushignore")
+
+ require.NoError(t, os.WriteFile(rootIgnore, []byte("*.tmp\nbackup/\n"), 0o644))
+
+ goodFile := filepath.Join(testDir, "good.txt")
+ require.NoError(t, os.WriteFile(goodFile, []byte("content"), 0o644))
+
+ badFile := filepath.Join(testDir, "bad.tmp")
+ require.NoError(t, os.WriteFile(badFile, []byte("temp content"), 0o644))
+
+ goodDir := filepath.Join(testDir, "src")
+ require.NoError(t, os.MkdirAll(goodDir, 0o755))
+
+ ignoredDir := filepath.Join(testDir, "backup")
+ require.NoError(t, os.MkdirAll(ignoredDir, 0o755))
+
+ ignoredFileInDir := filepath.Join(testDir, "backup", "old.txt")
+ require.NoError(t, os.WriteFile(ignoredFileInDir, []byte("old content"), 0o644))
+
+ matches, truncated, err := GlobWithDoubleStar("*.tmp", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Empty(t, matches, "Expected no matches for '*.tmp' pattern (should be ignored)")
+
+ matches, truncated, err = GlobWithDoubleStar("backup", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Empty(t, matches, "Expected no matches for 'backup' pattern (should be ignored)")
+
+ matches, truncated, err = GlobWithDoubleStar("*.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Equal(t, matches, []string{goodFile})
+ })
+
+ t.Run("handles mixed file and directory matching with sorting", func(t *testing.T) {
+ synctest.Test(t, func(t *testing.T) {
+ testDir := t.TempDir()
+
+ oldestFile := filepath.Join(testDir, "old.test")
+ require.NoError(t, os.WriteFile(oldestFile, []byte("old"), 0o644))
+
+ middleDir := filepath.Join(testDir, "mid.test")
+ require.NoError(t, os.MkdirAll(middleDir, 0o755))
+
+ newestFile := filepath.Join(testDir, "new.test")
+ require.NoError(t, os.WriteFile(newestFile, []byte("new"), 0o644))
+
+ base := time.Now()
+ tOldest := base
+ tMiddle := base.Add(1 * time.Millisecond)
+ tNewest := base.Add(2 * time.Millisecond)
+
+ // Reverse the expected order
+ require.NoError(t, os.Chtimes(newestFile, tOldest, tOldest))
+ require.NoError(t, os.Chtimes(middleDir, tMiddle, tMiddle))
+ require.NoError(t, os.Chtimes(oldestFile, tNewest, tNewest))
+
+ matches, truncated, err := GlobWithDoubleStar("*.test", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ // Results should be sorted by mod time, but we set the oldestFile
+ // to have the most recent mod time
+ require.Equal(t, matches, []string{oldestFile, middleDir, newestFile})
+ })
+ })
+}
@@ -2,6 +2,7 @@ package fsext
import (
"os"
+ "path/filepath"
"testing"
"github.com/stretchr/testify/require"
@@ -25,20 +26,103 @@ func TestCrushIgnore(t *testing.T) {
// Create a .crushignore file that ignores .log files
require.NoError(t, os.WriteFile(".crushignore", []byte("*.log\n"), 0o644))
- // Test DirectoryLister
- t.Run("DirectoryLister respects .crushignore", func(t *testing.T) {
- dl := NewDirectoryLister(tempDir)
+ dl := NewDirectoryLister(tempDir)
+ require.True(t, dl.shouldIgnore("test2.log", nil), ".log files should be ignored")
+ require.False(t, dl.shouldIgnore("test1.txt", nil), ".txt files should not be ignored")
+ require.True(t, dl.shouldIgnore("test3.tmp", nil), ".tmp files should be ignored by common patterns")
+}
+
+func TestShouldExcludeFile(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory structure for testing
+ tempDir := t.TempDir()
+
+ // Create directories that should be ignored
+ nodeModules := filepath.Join(tempDir, "node_modules")
+ target := filepath.Join(tempDir, "target")
+ customIgnored := filepath.Join(tempDir, "custom_ignored")
+ normalDir := filepath.Join(tempDir, "src")
+
+ for _, dir := range []string{nodeModules, target, customIgnored, normalDir} {
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ t.Fatalf("Failed to create directory %s: %v", dir, err)
+ }
+ }
+
+ // Create .gitignore file
+ gitignoreContent := "node_modules/\ntarget/\n"
+ if err := os.WriteFile(filepath.Join(tempDir, ".gitignore"), []byte(gitignoreContent), 0o644); err != nil {
+ t.Fatalf("Failed to create .gitignore: %v", err)
+ }
+
+ // Create .crushignore file
+ crushignoreContent := "custom_ignored/\n"
+ if err := os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644); err != nil {
+ t.Fatalf("Failed to create .crushignore: %v", err)
+ }
+
+ // Test that ignored directories are properly ignored
+ require.True(t, ShouldExcludeFile(tempDir, nodeModules), "Expected node_modules to be ignored by .gitignore")
+ require.True(t, ShouldExcludeFile(tempDir, target), "Expected target to be ignored by .gitignore")
+ require.True(t, ShouldExcludeFile(tempDir, customIgnored), "Expected custom_ignored to be ignored by .crushignore")
+
+ // Test that normal directories are not ignored
+ require.False(t, ShouldExcludeFile(tempDir, normalDir), "Expected src directory to not be ignored")
+
+ // Test that the workspace root itself is not ignored
+ require.False(t, ShouldExcludeFile(tempDir, tempDir), "Expected workspace root to not be ignored")
+}
+
+func TestShouldExcludeFileHierarchical(t *testing.T) {
+ t.Parallel()
+
+ // Create a nested directory structure for testing hierarchical ignore
+ tempDir := t.TempDir()
+
+ // Create nested directories
+ subDir := filepath.Join(tempDir, "subdir")
+ nestedNormal := filepath.Join(subDir, "normal_nested")
+
+ for _, dir := range []string{subDir, nestedNormal} {
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ t.Fatalf("Failed to create directory %s: %v", dir, err)
+ }
+ }
+
+ // Create .crushignore in subdir that ignores normal_nested
+ subCrushignore := "normal_nested/\n"
+ if err := os.WriteFile(filepath.Join(subDir, ".crushignore"), []byte(subCrushignore), 0o644); err != nil {
+ t.Fatalf("Failed to create subdir .crushignore: %v", err)
+ }
+
+ // Test hierarchical ignore behavior - this should work because the .crushignore is in the parent directory
+ require.True(t, ShouldExcludeFile(tempDir, nestedNormal), "Expected normal_nested to be ignored by subdir .crushignore")
+ require.False(t, ShouldExcludeFile(tempDir, subDir), "Expected subdir itself to not be ignored")
+}
+
+func TestShouldExcludeFileCommonPatterns(t *testing.T) {
+ t.Parallel()
+
+ tempDir := t.TempDir()
- // Test that .log files are ignored
- require.True(t, dl.gitignore == nil, "gitignore should be nil")
- require.NotNil(t, dl.crushignore, "crushignore should not be nil")
- })
+ // Create directories that should be ignored by common patterns
+ commonIgnored := []string{
+ filepath.Join(tempDir, ".git"),
+ filepath.Join(tempDir, "node_modules"),
+ filepath.Join(tempDir, "__pycache__"),
+ filepath.Join(tempDir, "target"),
+ filepath.Join(tempDir, ".vscode"),
+ }
- // Test FastGlobWalker
- t.Run("FastGlobWalker respects .crushignore", func(t *testing.T) {
- walker := NewFastGlobWalker(tempDir)
+ for _, dir := range commonIgnored {
+ if err := os.MkdirAll(dir, 0o755); err != nil {
+ t.Fatalf("Failed to create directory %s: %v", dir, err)
+ }
+ }
- require.True(t, walker.gitignore == nil, "gitignore should be nil")
- require.NotNil(t, walker.crushignore, "crushignore should not be nil")
- })
+ // Test that common patterns are ignored even without explicit ignore files
+ for _, dir := range commonIgnored {
+ require.True(t, ShouldExcludeFile(tempDir, dir), "Expected %s to be ignored by common patterns", filepath.Base(dir))
+ }
}
@@ -0,0 +1,141 @@
+package fsext
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/charmbracelet/crush/internal/home"
+)
+
+// Lookup searches for a target files or directories starting from dir
+// and walking up the directory tree until filesystem root is reached.
+// It also checks the ownership of files to ensure that the search does
+// not cross ownership boundaries. It skips ownership mismatches without
+// errors.
+// Returns full paths to fount targets.
+// The search includes the starting directory itself.
+func Lookup(dir string, targets ...string) ([]string, error) {
+ if len(targets) == 0 {
+ return nil, nil
+ }
+
+ var found []string
+
+ err := traverseUp(dir, func(cwd string, owner int) error {
+ for _, target := range targets {
+ fpath := filepath.Join(cwd, target)
+ err := probeEnt(fpath, owner)
+
+ // skip to the next file on permission denied
+ if errors.Is(err, os.ErrNotExist) ||
+ errors.Is(err, os.ErrPermission) {
+ continue
+ }
+
+ if err != nil {
+ return fmt.Errorf("error probing file %s: %w", fpath, err)
+ }
+
+ found = append(found, fpath)
+ }
+
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return found, nil
+}
+
+// LookupClosest searches for a target file or directory starting from dir
+// and walking up the directory tree until found or root or home is reached.
+// It also checks the ownership of files to ensure that the search does
+// not cross ownership boundaries.
+// Returns the full path to the target if found, empty string and false otherwise.
+// The search includes the starting directory itself.
+func LookupClosest(dir, target string) (string, bool) {
+ var found string
+
+ err := traverseUp(dir, func(cwd string, owner int) error {
+ fpath := filepath.Join(cwd, target)
+
+ err := probeEnt(fpath, owner)
+ if errors.Is(err, os.ErrNotExist) {
+ return nil
+ }
+
+ if err != nil {
+ return fmt.Errorf("error probing file %s: %w", fpath, err)
+ }
+
+ if cwd == home.Dir() {
+ return filepath.SkipAll
+ }
+
+ found = fpath
+ return filepath.SkipAll
+ })
+
+ return found, err == nil && found != ""
+}
+
+// traverseUp walks up from given directory up until filesystem root reached.
+// It passes absolute path of current directory and staring directory owner ID
+// to callback function. It is up to user to check ownership.
+func traverseUp(dir string, walkFn func(dir string, owner int) error) error {
+ cwd, err := filepath.Abs(dir)
+ if err != nil {
+ return fmt.Errorf("cannot convert CWD to absolute path: %w", err)
+ }
+
+ owner, err := Owner(dir)
+ if err != nil {
+ return fmt.Errorf("cannot get ownership: %w", err)
+ }
+
+ for {
+ err := walkFn(cwd, owner)
+ if err == nil || errors.Is(err, filepath.SkipDir) {
+ parent := filepath.Dir(cwd)
+ if parent == cwd {
+ return nil
+ }
+
+ cwd = parent
+ continue
+ }
+
+ if errors.Is(err, filepath.SkipAll) {
+ return nil
+ }
+
+ return err
+ }
+}
+
+// probeEnt checks if entity at given path exists and belongs to given owner
+func probeEnt(fspath string, owner int) error {
+ _, err := os.Stat(fspath)
+ if err != nil {
+ return fmt.Errorf("cannot stat %s: %w", fspath, err)
+ }
+
+ // special case for ownership check bypass
+ if owner == -1 {
+ return nil
+ }
+
+ fowner, err := Owner(fspath)
+ if err != nil {
+ return fmt.Errorf("cannot get ownership for %s: %w", fspath, err)
+ }
+
+ if fowner != owner {
+ return os.ErrPermission
+ }
+
+ return nil
+}
@@ -0,0 +1,483 @@
+package fsext
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/charmbracelet/crush/internal/home"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLookupClosest(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Change to temp directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(tempDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ t.Run("target found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target file in current directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(testDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target found in parent directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in parent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(subDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target found in grandparent directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create nested subdirectories
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ subSubDir := filepath.Join(subDir, "subsubdir")
+ err = os.Mkdir(subSubDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in grandparent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(subSubDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target not found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ foundPath, found := LookupClosest(testDir, "nonexistent.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("target directory found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target directory in current directory
+ targetDir := filepath.Join(testDir, "targetdir")
+ err := os.Mkdir(targetDir, 0o755)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(testDir, "targetdir")
+ require.True(t, found)
+ require.Equal(t, targetDir, foundPath)
+ })
+
+ t.Run("stops at home directory", func(t *testing.T) {
+ // This test is limited as we can't easily create files above home directory
+ // but we can test the behavior by searching from home directory itself
+ homeDir := home.Dir()
+
+ // Search for a file that doesn't exist from home directory
+ foundPath, found := LookupClosest(homeDir, "nonexistent_file_12345.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("invalid starting directory", func(t *testing.T) {
+ foundPath, found := LookupClosest("/invalid/path/that/does/not/exist", "target.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("relative path handling", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Change to test directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(testDir)
+ require.NoError(t, err)
+ defer os.Chdir(oldWd)
+
+ // Create target file in current directory
+ err = os.WriteFile("target.txt", []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Search using relative path
+ foundPath, found := LookupClosest(".", "target.txt")
+ require.True(t, found)
+
+ // Resolve symlinks to handle macOS /private/var vs /var discrepancy
+ expectedPath, err := filepath.EvalSymlinks(filepath.Join(testDir, "target.txt"))
+ require.NoError(t, err)
+ actualPath, err := filepath.EvalSymlinks(foundPath)
+ require.NoError(t, err)
+ require.Equal(t, expectedPath, actualPath)
+ })
+}
+
+func TestLookupClosestWithOwnership(t *testing.T) {
+ // Note: Testing ownership boundaries is difficult in a cross-platform way
+ // without creating complex directory structures with different owners.
+ // This test focuses on the basic functionality when ownership checks pass.
+
+ tempDir := t.TempDir()
+
+ // Change to temp directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(tempDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ t.Run("search respects same ownership", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory structure
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in parent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Search should find the target assuming same ownership
+ foundPath, found := LookupClosest(subDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+}
+
+func TestLookup(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Change to temp directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(tempDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ t.Run("no targets returns empty slice", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ found, err := Lookup(testDir)
+ require.NoError(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("single target found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target file in current directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 1)
+ require.Equal(t, targetFile, found[0])
+ })
+
+ t.Run("multiple targets found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create multiple target files in current directory
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+ targetFile3 := filepath.Join(testDir, "target3.txt")
+
+ err := os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile3, []byte("test3"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target1.txt", "target2.txt", "target3.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 3)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ require.Contains(t, found, targetFile3)
+ })
+
+ t.Run("targets found in parent directories", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target files in parent directory
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+ err = os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(subDir, "target1.txt", "target2.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ })
+
+ t.Run("targets found across multiple directory levels", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create nested subdirectories
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ subSubDir := filepath.Join(subDir, "subsubdir")
+ err = os.Mkdir(subSubDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target files at different levels
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(subDir, "target2.txt")
+ targetFile3 := filepath.Join(subSubDir, "target3.txt")
+
+ err = os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile3, []byte("test3"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(subSubDir, "target1.txt", "target2.txt", "target3.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 3)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ require.Contains(t, found, targetFile3)
+ })
+
+ t.Run("some targets not found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create only some target files
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+
+ err := os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ // Search for existing and non-existing targets
+ found, err := Lookup(testDir, "target1.txt", "nonexistent.txt", "target2.txt", "another_nonexistent.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ })
+
+ t.Run("no targets found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ found, err := Lookup(testDir, "nonexistent1.txt", "nonexistent2.txt", "nonexistent3.txt")
+ require.NoError(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("target directories found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target directories
+ targetDir1 := filepath.Join(testDir, "targetdir1")
+ targetDir2 := filepath.Join(testDir, "targetdir2")
+ err := os.Mkdir(targetDir1, 0o755)
+ require.NoError(t, err)
+ err = os.Mkdir(targetDir2, 0o755)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "targetdir1", "targetdir2")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetDir1)
+ require.Contains(t, found, targetDir2)
+ })
+
+ t.Run("mixed files and directories", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target files and directories
+ targetFile := filepath.Join(testDir, "target.txt")
+ targetDir := filepath.Join(testDir, "targetdir")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+ err = os.Mkdir(targetDir, 0o755)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target.txt", "targetdir")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile)
+ require.Contains(t, found, targetDir)
+ })
+
+ t.Run("invalid starting directory", func(t *testing.T) {
+ found, err := Lookup("/invalid/path/that/does/not/exist", "target.txt")
+ require.Error(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("relative path handling", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Change to test directory
+ oldWd, _ := os.Getwd()
+ err := os.Chdir(testDir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ os.Chdir(oldWd)
+ })
+
+ // Create target files in current directory
+ err = os.WriteFile("target1.txt", []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile("target2.txt", []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ // Search using relative path
+ found, err := Lookup(".", "target1.txt", "target2.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+
+ // Resolve symlinks to handle macOS /private/var vs /var discrepancy
+ expectedPath1, err := filepath.EvalSymlinks(filepath.Join(testDir, "target1.txt"))
+ require.NoError(t, err)
+ expectedPath2, err := filepath.EvalSymlinks(filepath.Join(testDir, "target2.txt"))
+ require.NoError(t, err)
+
+ // Check that found paths match expected paths (order may vary)
+ foundEvalSymlinks := make([]string, len(found))
+ for i, path := range found {
+ evalPath, err := filepath.EvalSymlinks(path)
+ require.NoError(t, err)
+ foundEvalSymlinks[i] = evalPath
+ }
+
+ require.Contains(t, foundEvalSymlinks, expectedPath1)
+ require.Contains(t, foundEvalSymlinks, expectedPath2)
+ })
+}
+
+func TestProbeEnt(t *testing.T) {
+ t.Run("existing file with correct owner", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Get owner of temp directory
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ // Test probeEnt with correct owner
+ err = probeEnt(testFile, owner)
+ require.NoError(t, err)
+ })
+
+ t.Run("existing directory with correct owner", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test directory
+ testDir := filepath.Join(tempDir, "testdir")
+ err := os.Mkdir(testDir, 0o755)
+ require.NoError(t, err)
+
+ // Get owner of temp directory
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ // Test probeEnt with correct owner
+ err = probeEnt(testDir, owner)
+ require.NoError(t, err)
+ })
+
+ t.Run("nonexistent file", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ nonexistentFile := filepath.Join(tempDir, "nonexistent.txt")
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ err = probeEnt(nonexistentFile, owner)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrNotExist))
+ })
+
+ t.Run("nonexistent file in nonexistent directory", func(t *testing.T) {
+ nonexistentFile := "/this/directory/does/not/exists/nonexistent.txt"
+
+ err := probeEnt(nonexistentFile, -1)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrNotExist))
+ })
+
+ t.Run("ownership bypass with -1", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Test probeEnt with -1 (bypass ownership check)
+ err = probeEnt(testFile, -1)
+ require.NoError(t, err)
+ })
+
+ t.Run("ownership mismatch returns permission error", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Test probeEnt with different owner (use 9999 which is unlikely to be the actual owner)
+ err = probeEnt(testFile, 9999)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrPermission))
+ })
+}
@@ -1,140 +1,207 @@
package fsext
import (
+ "log/slog"
"os"
"path/filepath"
+ "slices"
+ "strings"
+ "sync"
"github.com/charlievieth/fastwalk"
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/home"
ignore "github.com/sabhiram/go-gitignore"
)
-// CommonIgnorePatterns contains commonly ignored files and directories
-var CommonIgnorePatterns = []string{
- // Version control
- ".git",
- ".svn",
- ".hg",
- ".bzr",
-
- // IDE and editor files
- ".vscode",
- ".idea",
- "*.swp",
- "*.swo",
- "*~",
- ".DS_Store",
- "Thumbs.db",
-
- // Build artifacts and dependencies
- "node_modules",
- "target",
- "build",
- "dist",
- "out",
- "bin",
- "obj",
- "*.o",
- "*.so",
- "*.dylib",
- "*.dll",
- "*.exe",
-
- // Logs and temporary files
- "*.log",
- "*.tmp",
- "*.temp",
- ".cache",
- ".tmp",
-
- // Language-specific
- "__pycache__",
- "*.pyc",
- "*.pyo",
- ".pytest_cache",
- "vendor",
- "Cargo.lock",
- "package-lock.json",
- "yarn.lock",
- "pnpm-lock.yaml",
-
- // OS generated files
- ".Trash",
- ".Spotlight-V100",
- ".fseventsd",
-
- // Crush
- ".crush",
-}
+// commonIgnorePatterns contains commonly ignored files and directories
+var commonIgnorePatterns = sync.OnceValue(func() ignore.IgnoreParser {
+ return ignore.CompileIgnoreLines(
+ // Version control
+ ".git",
+ ".svn",
+ ".hg",
+ ".bzr",
+
+ // IDE and editor files
+ ".vscode",
+ ".idea",
+ "*.swp",
+ "*.swo",
+ "*~",
+ ".DS_Store",
+ "Thumbs.db",
+
+ // Build artifacts and dependencies
+ "node_modules",
+ "target",
+ "build",
+ "dist",
+ "out",
+ "bin",
+ "obj",
+ "*.o",
+ "*.so",
+ "*.dylib",
+ "*.dll",
+ "*.exe",
+
+ // Logs and temporary files
+ "*.log",
+ "*.tmp",
+ "*.temp",
+ ".cache",
+ ".tmp",
+
+ // Language-specific
+ "__pycache__",
+ "*.pyc",
+ "*.pyo",
+ ".pytest_cache",
+ "vendor",
+ "Cargo.lock",
+ "package-lock.json",
+ "yarn.lock",
+ "pnpm-lock.yaml",
+
+ // OS generated files
+ ".Trash",
+ ".Spotlight-V100",
+ ".fseventsd",
+
+ // Crush
+ ".crush",
+ )
+})
+
+var homeIgnore = sync.OnceValue(func() ignore.IgnoreParser {
+ home := home.Dir()
+ var lines []string
+ for _, name := range []string{
+ filepath.Join(home, ".gitignore"),
+ filepath.Join(home, ".config", "git", "ignore"),
+ filepath.Join(home, ".config", "crush", "ignore"),
+ } {
+ if bts, err := os.ReadFile(name); err == nil {
+ lines = append(lines, strings.Split(string(bts), "\n")...)
+ }
+ }
+ return ignore.CompileIgnoreLines(lines...)
+})
-type DirectoryLister struct {
- gitignore *ignore.GitIgnore
- crushignore *ignore.GitIgnore
- commonIgnore *ignore.GitIgnore
- rootPath string
+type directoryLister struct {
+ ignores *csync.Map[string, ignore.IgnoreParser]
+ rootPath string
}
-func NewDirectoryLister(rootPath string) *DirectoryLister {
- dl := &DirectoryLister{
+func NewDirectoryLister(rootPath string) *directoryLister {
+ dl := &directoryLister{
rootPath: rootPath,
+ ignores: csync.NewMap[string, ignore.IgnoreParser](),
}
+ dl.getIgnore(rootPath)
+ return dl
+}
- // Load gitignore if it exists
- gitignorePath := filepath.Join(rootPath, ".gitignore")
- if _, err := os.Stat(gitignorePath); err == nil {
- if gi, err := ignore.CompileIgnoreFile(gitignorePath); err == nil {
- dl.gitignore = gi
+// git checks, in order:
+// - ./.gitignore, ../.gitignore, etc, until repo root
+// ~/.config/git/ignore
+// ~/.gitignore
+//
+// This will do the following:
+// - the given ignorePatterns
+// - [commonIgnorePatterns]
+// - ./.gitignore, ../.gitignore, etc, until dl.rootPath
+// - ./.crushignore, ../.crushignore, etc, until dl.rootPath
+// ~/.config/git/ignore
+// ~/.gitignore
+// ~/.config/crush/ignore
+func (dl *directoryLister) shouldIgnore(path string, ignorePatterns []string) bool {
+ if len(ignorePatterns) > 0 {
+ base := filepath.Base(path)
+ for _, pattern := range ignorePatterns {
+ if matched, err := filepath.Match(pattern, base); err == nil && matched {
+ return true
+ }
}
}
- // Load crushignore if it exists
- crushignorePath := filepath.Join(rootPath, ".crushignore")
- if _, err := os.Stat(crushignorePath); err == nil {
- if ci, err := ignore.CompileIgnoreFile(crushignorePath); err == nil {
- dl.crushignore = ci
- }
+ // Don't apply gitignore rules to the root directory itself
+ // In gitignore semantics, patterns don't apply to the repo root
+ if path == dl.rootPath {
+ return false
}
- // Create common ignore patterns
- dl.commonIgnore = ignore.CompileIgnoreLines(CommonIgnorePatterns...)
-
- return dl
-}
-
-func (dl *DirectoryLister) shouldIgnore(path string, ignorePatterns []string) bool {
relPath, err := filepath.Rel(dl.rootPath, path)
if err != nil {
relPath = path
}
- // Check common ignore patterns
- if dl.commonIgnore.MatchesPath(relPath) {
+ if commonIgnorePatterns().MatchesPath(relPath) {
+ slog.Debug("ignoring common pattern", "path", relPath)
return true
}
- // Check gitignore patterns if available
- if dl.gitignore != nil && dl.gitignore.MatchesPath(relPath) {
+ parentDir := filepath.Dir(path)
+ ignoreParser := dl.getIgnore(parentDir)
+ if ignoreParser.MatchesPath(relPath) {
+ slog.Debug("ignoring dir pattern", "path", relPath, "dir", parentDir)
return true
}
- // Check crushignore patterns if available
- if dl.crushignore != nil && dl.crushignore.MatchesPath(relPath) {
+ // For directories, also check with trailing slash (gitignore convention)
+ if ignoreParser.MatchesPath(relPath + "/") {
+ slog.Debug("ignoring dir pattern with slash", "path", relPath+"/", "dir", parentDir)
return true
}
- base := filepath.Base(path)
+ if dl.checkParentIgnores(relPath) {
+ return true
+ }
- for _, pattern := range ignorePatterns {
- matched, err := filepath.Match(pattern, base)
- if err == nil && matched {
+ if homeIgnore().MatchesPath(relPath) {
+ slog.Debug("ignoring home dir pattern", "path", relPath)
+ return true
+ }
+
+ return false
+}
+
+func (dl *directoryLister) checkParentIgnores(path string) bool {
+ parent := filepath.Dir(filepath.Dir(path))
+ for parent != "." && path != "." {
+ if dl.getIgnore(parent).MatchesPath(path) {
+ slog.Debug("ingoring parent dir pattern", "path", path, "dir", parent)
return true
}
+ if parent == dl.rootPath {
+ break
+ }
+ parent = filepath.Dir(parent)
}
return false
}
+func (dl *directoryLister) getIgnore(path string) ignore.IgnoreParser {
+ return dl.ignores.GetOrSet(path, func() ignore.IgnoreParser {
+ var lines []string
+ for _, ign := range []string{".crushignore", ".gitignore"} {
+ name := filepath.Join(path, ign)
+ if content, err := os.ReadFile(name); err == nil {
+ lines = append(lines, strings.Split(string(content), "\n")...)
+ }
+ }
+ if len(lines) == 0 {
+ // Return a no-op parser to avoid nil checks
+ return ignore.CompileIgnoreLines()
+ }
+ return ignore.CompileIgnoreLines(lines...)
+ })
+}
+
// ListDirectory lists files and directories in the specified path,
func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]string, bool, error) {
- var results []string
+ results := csync.NewSlice[string]()
truncated := false
dl := NewDirectoryLister(initialPath)
@@ -144,6 +211,7 @@ func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]st
ToSlash: fastwalk.DefaultToSlash(),
Sort: fastwalk.SortDirsFirst,
}
+
err := fastwalk.Walk(&conf, initialPath, func(path string, d os.DirEntry, err error) error {
if err != nil {
return nil // Skip files we don't have permission to access
@@ -160,19 +228,19 @@ func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]st
if d.IsDir() {
path = path + string(filepath.Separator)
}
- results = append(results, path)
+ results.Append(path)
}
- if limit > 0 && len(results) >= limit {
+ if limit > 0 && results.Len() >= limit {
truncated = true
return filepath.SkipAll
}
return nil
})
- if err != nil && len(results) == 0 {
+ if err != nil && results.Len() == 0 {
return nil, truncated, err
}
- return results, truncated, nil
+ return slices.Collect(results.Seq()), truncated, nil
}
@@ -0,0 +1,66 @@
+package fsext
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func chdir(t *testing.T, dir string) {
+ original, err := os.Getwd()
+ require.NoError(t, err)
+
+ err = os.Chdir(dir)
+ require.NoError(t, err)
+
+ t.Cleanup(func() {
+ err := os.Chdir(original)
+ require.NoError(t, err)
+ })
+}
+
+func TestListDirectory(t *testing.T) {
+ tempDir := t.TempDir()
+ chdir(t, tempDir)
+
+ testFiles := map[string]string{
+ "regular.txt": "content",
+ ".hidden": "hidden content",
+ ".gitignore": ".*\n*.log\n",
+ "subdir/file.go": "package main",
+ "subdir/.another": "more hidden",
+ "build.log": "build output",
+ }
+
+ for filePath, content := range testFiles {
+ dir := filepath.Dir(filePath)
+ if dir != "." {
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ }
+
+ err := os.WriteFile(filePath, []byte(content), 0o644)
+ require.NoError(t, err)
+ }
+
+ files, truncated, err := ListDirectory(".", nil, 0)
+ require.NoError(t, err)
+ assert.False(t, truncated)
+ assert.Equal(t, len(files), 4)
+
+ fileSet := make(map[string]bool)
+ for _, file := range files {
+ fileSet[filepath.ToSlash(file)] = true
+ }
+
+ assert.True(t, fileSet["./regular.txt"])
+ assert.True(t, fileSet["./subdir/"])
+ assert.True(t, fileSet["./subdir/file.go"])
+ assert.True(t, fileSet["./regular.txt"])
+
+ assert.False(t, fileSet["./.hidden"])
+ assert.False(t, fileSet["./.gitignore"])
+ assert.False(t, fileSet["./build.log"])
+}
@@ -0,0 +1,24 @@
+//go:build !windows
+
+package fsext
+
+import (
+ "os"
+ "syscall"
+)
+
+// Owner retrieves the user ID of the owner of the file or directory at the
+// specified path.
+func Owner(path string) (int, error) {
+ info, err := os.Stat(path)
+ if err != nil {
+ return 0, err
+ }
+ var uid int
+ if stat, ok := info.Sys().(*syscall.Stat_t); ok {
+ uid = int(stat.Uid)
+ } else {
+ uid = os.Getuid()
+ }
+ return uid, nil
+}
@@ -0,0 +1,15 @@
+//go:build windows
+
+package fsext
+
+import "os"
+
+// Owner retrieves the user ID of the owner of the file or directory at the
+// specified path.
+func Owner(path string) (int, error) {
+ _, err := os.Stat(path)
+ if err != nil {
+ return 0, err
+ }
+ return -1, nil
+}
@@ -0,0 +1,43 @@
+// Package home provides utilities for dealing with the user's home directory.
+package home
+
+import (
+ "log/slog"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+)
+
+// Dir returns the users home directory, or if it fails, tries to create a new
+// temporary directory and use that instead.
+var Dir = sync.OnceValue(func() string {
+ home, err := os.UserHomeDir()
+ if err == nil {
+ slog.Debug("user home directory", "home", home)
+ return home
+ }
+ tmp, err := os.MkdirTemp("crush", "")
+ if err != nil {
+ slog.Error("could not find the user home directory")
+ return ""
+ }
+ slog.Warn("could not find the user home directory, using a temporary one", "home", tmp)
+ return tmp
+})
+
+// Short replaces the actual home path from [Dir] with `~`.
+func Short(p string) string {
+ if !strings.HasPrefix(p, Dir()) || Dir() == "" {
+ return p
+ }
+ return filepath.Join("~", strings.TrimPrefix(p, Dir()))
+}
+
+// Long replaces the `~` with actual home path from [Dir].
+func Long(p string) string {
+ if !strings.HasPrefix(p, "~") || Dir() == "" {
+ return p
+ }
+ return strings.Replace(p, "~", Dir(), 1)
+}
@@ -0,0 +1,26 @@
+package home
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestDir(t *testing.T) {
+ require.NotEmpty(t, Dir())
+}
+
+func TestShort(t *testing.T) {
+ d := filepath.Join(Dir(), "documents", "file.txt")
+ require.Equal(t, filepath.FromSlash("~/documents/file.txt"), Short(d))
+ ad := filepath.FromSlash("/absolute/path/file.txt")
+ require.Equal(t, ad, Short(ad))
+}
+
+func TestLong(t *testing.T) {
+ d := filepath.FromSlash("~/documents/file.txt")
+ require.Equal(t, filepath.Join(Dir(), "documents", "file.txt"), Long(d))
+ ad := filepath.FromSlash("/absolute/path/file.txt")
+ require.Equal(t, ad, Long(ad))
+}
@@ -12,6 +12,7 @@ import (
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/history"
"github.com/charmbracelet/crush/internal/llm/prompt"
"github.com/charmbracelet/crush/internal/llm/provider"
@@ -25,12 +26,6 @@ import (
"github.com/charmbracelet/crush/internal/shell"
)
-// Common errors
-var (
- ErrRequestCancelled = errors.New("request canceled by user")
- ErrSessionBusy = errors.New("session is currently processing another request")
-)
-
type AgentEventType string
const (
@@ -60,16 +55,21 @@ type Service interface {
IsBusy() bool
Summarize(ctx context.Context, sessionID string) error
UpdateModel() error
+ QueuedPrompts(sessionID string) int
+ ClearQueue(sessionID string)
}
type agent struct {
*pubsub.Broker[AgentEvent]
- agentCfg config.Agent
- sessions session.Service
- messages message.Service
- mcpTools []McpTool
+ agentCfg config.Agent
+ sessions session.Service
+ messages message.Service
+ permissions permission.Service
+ mcpTools []McpTool
tools *csync.LazySlice[tools.BaseTool]
+ // We need this to be able to update it when model changes
+ agentToolFn func() (tools.BaseTool, error)
provider provider.Provider
providerID string
@@ -79,6 +79,7 @@ type agent struct {
summarizeProviderID string
activeRequests *csync.Map[string, context.CancelFunc]
+ promptQueue *csync.Map[string, []string]
}
var agentPromptMap = map[string]prompt.PromptID{
@@ -94,22 +95,23 @@ func NewAgent(
sessions session.Service,
messages message.Service,
history history.Service,
- lspClients map[string]*lsp.Client,
+ lspClients *csync.Map[string, *lsp.Client],
) (Service, error) {
cfg := config.Get()
- var agentTool tools.BaseTool
- if agentCfg.ID == "coder" {
- taskAgentCfg := config.Get().Agents["task"]
- if taskAgentCfg.ID == "" {
- return nil, fmt.Errorf("task agent not found in config")
- }
- taskAgent, err := NewAgent(ctx, taskAgentCfg, permissions, sessions, messages, history, lspClients)
- if err != nil {
- return nil, fmt.Errorf("failed to create task agent: %w", err)
+ var agentToolFn func() (tools.BaseTool, error)
+ if agentCfg.ID == "coder" && slices.Contains(agentCfg.AllowedTools, AgentToolName) {
+ agentToolFn = func() (tools.BaseTool, error) {
+ taskAgentCfg := config.Get().Agents["task"]
+ if taskAgentCfg.ID == "" {
+ return nil, fmt.Errorf("task agent not found in config")
+ }
+ taskAgent, err := NewAgent(ctx, taskAgentCfg, permissions, sessions, messages, history, lspClients)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create task agent: %w", err)
+ }
+ return NewAgentTool(taskAgent, sessions, messages), nil
}
-
- agentTool = NewAgentTool(taskAgent, sessions, messages)
}
providerCfg := config.Get().GetProviderForModel(agentCfg.Model)
@@ -159,11 +161,12 @@ func NewAgent(
if err != nil {
return nil, err
}
+
summarizeOpts := []provider.ProviderClientOption{
- provider.WithModel(config.SelectedModelTypeSmall),
- provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, smallModelProviderCfg.ID)),
+ provider.WithModel(config.SelectedModelTypeLarge),
+ provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, providerCfg.ID)),
}
- summarizeProvider, err := provider.NewProvider(*smallModelProviderCfg, summarizeOpts...)
+ summarizeProvider, err := provider.NewProvider(*providerCfg, summarizeOpts...)
if err != nil {
return nil, err
}
@@ -176,7 +179,7 @@ func NewAgent(
cwd := cfg.WorkingDir()
allTools := []tools.BaseTool{
- tools.NewBashTool(permissions, cwd),
+ tools.NewBashTool(permissions, cwd, cfg.Options.Attribution),
tools.NewDownloadTool(permissions, cwd),
tools.NewEditTool(lspClients, permissions, history, cwd),
tools.NewMultiEditTool(lspClients, permissions, history, cwd),
@@ -192,18 +195,19 @@ func NewAgent(
mcpToolsOnce.Do(func() {
mcpTools = doGetMCPTools(ctx, permissions, cfg)
})
- allTools = append(allTools, mcpTools...)
- if len(lspClients) > 0 {
- allTools = append(allTools, tools.NewDiagnosticsTool(lspClients))
- }
-
- if agentTool != nil {
- allTools = append(allTools, agentTool)
+ withCoderTools := func(t []tools.BaseTool) []tools.BaseTool {
+ if agentCfg.ID == "coder" {
+ t = append(t, mcpTools...)
+ if lspClients.Len() > 0 {
+ t = append(t, tools.NewDiagnosticsTool(lspClients))
+ }
+ }
+ return t
}
if agentCfg.AllowedTools == nil {
- return allTools
+ return withCoderTools(allTools)
}
var filteredTools []tools.BaseTool
@@ -212,7 +216,7 @@ func NewAgent(
filteredTools = append(filteredTools, tool)
}
}
- return filteredTools
+ return withCoderTools(filteredTools)
}
return &agent{
@@ -224,9 +228,12 @@ func NewAgent(
sessions: sessions,
titleProvider: titleProvider,
summarizeProvider: summarizeProvider,
- summarizeProviderID: string(smallModelProviderCfg.ID),
+ summarizeProviderID: string(providerCfg.ID),
+ agentToolFn: agentToolFn,
activeRequests: csync.NewMap[string, context.CancelFunc](),
tools: csync.NewLazySlice(toolFn),
+ promptQueue: csync.NewMap[string, []string](),
+ permissions: permissions,
}, nil
}
@@ -246,6 +253,11 @@ func (a *agent) Cancel(sessionID string) {
slog.Info("Summarize cancellation initiated", "session_id", sessionID)
cancel()
}
+
+ if a.QueuedPrompts(sessionID) > 0 {
+ slog.Info("Clearing queued prompts", "session_id", sessionID)
+ a.promptQueue.Del(sessionID)
+ }
}
func (a *agent) IsBusy() bool {
@@ -264,6 +276,14 @@ func (a *agent) IsSessionBusy(sessionID string) bool {
return busy
}
+func (a *agent) QueuedPrompts(sessionID string) int {
+ l, ok := a.promptQueue.Get(sessionID)
+ if !ok {
+ return 0
+ }
+ return len(l)
+}
+
func (a *agent) generateTitle(ctx context.Context, sessionID string, content string) error {
if content == "" {
return nil
@@ -303,7 +323,13 @@ func (a *agent) generateTitle(ctx context.Context, sessionID string, content str
return fmt.Errorf("no response received from title provider")
}
- title := strings.TrimSpace(strings.ReplaceAll(finalResponse.Content, "\n", " "))
+ title := strings.ReplaceAll(finalResponse.Content, "\n", " ")
+
+ if idx := strings.Index(title, "</think>"); idx > 0 {
+ title = title[idx+len("</think>"):]
+ }
+
+ title = strings.TrimSpace(title)
if title == "" {
return nil
}
@@ -324,14 +350,21 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
if !a.Model().SupportsImages && attachments != nil {
attachments = nil
}
- events := make(chan AgentEvent)
+ events := make(chan AgentEvent, 1)
if a.IsSessionBusy(sessionID) {
- return nil, ErrSessionBusy
+ existing, ok := a.promptQueue.Get(sessionID)
+ if !ok {
+ existing = []string{}
+ }
+ existing = append(existing, content)
+ a.promptQueue.Set(sessionID, existing)
+ return nil, nil
}
genCtx, cancel := context.WithCancel(ctx)
-
a.activeRequests.Set(sessionID, cancel)
+ startTime := time.Now()
+
go func() {
slog.Debug("Request started", "sessionID", sessionID)
defer log.RecoverPanic("agent.Run", func() {
@@ -342,16 +375,24 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
}
result := a.processGeneration(genCtx, sessionID, content, attachmentParts)
- if result.Error != nil && !errors.Is(result.Error, ErrRequestCancelled) && !errors.Is(result.Error, context.Canceled) {
- slog.Error(result.Error.Error())
+ if result.Error != nil {
+ if isCancelledErr(result.Error) {
+ slog.Error("Request canceled", "sessionID", sessionID)
+ } else {
+ slog.Error("Request errored", "sessionID", sessionID, "error", result.Error.Error())
+ event.Error(result.Error)
+ }
+ } else {
+ slog.Debug("Request completed", "sessionID", sessionID)
}
- slog.Debug("Request completed", "sessionID", sessionID)
+ a.eventPromptResponded(sessionID, time.Since(startTime).Truncate(time.Second))
a.activeRequests.Del(sessionID)
cancel()
a.Publish(pubsub.CreatedEvent, result)
events <- result
close(events)
}()
+ a.eventPromptSent(sessionID)
return events, nil
}
@@ -367,7 +408,7 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
defer log.RecoverPanic("agent.Run", func() {
slog.Error("panic while generating title")
})
- titleErr := a.generateTitle(context.Background(), sessionID, content)
+ titleErr := a.generateTitle(ctx, sessionID, content)
if titleErr != nil && !errors.Is(titleErr, context.Canceled) && !errors.Is(titleErr, context.DeadlineExceeded) {
slog.Error("failed to generate title", "error", titleErr)
}
@@ -421,7 +462,36 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
if (agentMessage.FinishReason() == message.FinishReasonToolUse) && toolResults != nil {
// We are not done, we need to respond with the tool response
msgHistory = append(msgHistory, agentMessage, *toolResults)
+ // If there are queued prompts, process the next one
+ nextPrompt, ok := a.promptQueue.Take(sessionID)
+ if ok {
+ for _, prompt := range nextPrompt {
+ // Create a new user message for the queued prompt
+ userMsg, err := a.createUserMessage(ctx, sessionID, prompt, nil)
+ if err != nil {
+ return a.err(fmt.Errorf("failed to create user message for queued prompt: %w", err))
+ }
+ // Append the new user message to the conversation history
+ msgHistory = append(msgHistory, userMsg)
+ }
+ }
+
continue
+ } else if agentMessage.FinishReason() == message.FinishReasonEndTurn {
+ queuePrompts, ok := a.promptQueue.Take(sessionID)
+ if ok {
+ for _, prompt := range queuePrompts {
+ if prompt == "" {
+ continue
+ }
+ userMsg, err := a.createUserMessage(ctx, sessionID, prompt, nil)
+ if err != nil {
+ return a.err(fmt.Errorf("failed to create user message for queued prompt: %w", err))
+ }
+ msgHistory = append(msgHistory, userMsg)
+ }
+ continue
+ }
}
if agentMessage.FinishReason() == "" {
// Kujtim: could not track down where this is happening but this means its cancelled
@@ -446,10 +516,22 @@ func (a *agent) createUserMessage(ctx context.Context, sessionID, content string
})
}
+func (a *agent) getAllTools() ([]tools.BaseTool, error) {
+ allTools := slices.Collect(a.tools.Seq())
+ if a.agentToolFn != nil {
+ agentTool, agentToolErr := a.agentToolFn()
+ if agentToolErr != nil {
+ return nil, agentToolErr
+ }
+ allTools = append(allTools, agentTool)
+ }
+ return allTools, nil
+}
+
func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msgHistory []message.Message) (message.Message, *message.Message, error) {
ctx = context.WithValue(ctx, tools.SessionIDContextKey, sessionID)
- eventChan := a.provider.StreamResponse(ctx, msgHistory, slices.Collect(a.tools.Seq()))
+ // Create the assistant message first so the spinner shows immediately
assistantMsg, err := a.messages.Create(ctx, sessionID, message.CreateMessageParams{
Role: message.Assistant,
Parts: []message.ContentPart{},
@@ -460,20 +542,32 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
return assistantMsg, nil, fmt.Errorf("failed to create assistant message: %w", err)
}
+ allTools, toolsErr := a.getAllTools()
+ if toolsErr != nil {
+ return assistantMsg, nil, toolsErr
+ }
+ // Now collect tools (which may block on MCP initialization)
+ eventChan := a.provider.StreamResponse(ctx, msgHistory, allTools)
+
// Add the session and message ID into the context if needed by tools.
ctx = context.WithValue(ctx, tools.MessageIDContextKey, assistantMsg.ID)
- // Process each event in the stream.
- for event := range eventChan {
- if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
- if errors.Is(processErr, context.Canceled) {
- a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
- } else {
- a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "API Error", processErr.Error())
+loop:
+ for {
+ select {
+ case event, ok := <-eventChan:
+ if !ok {
+ break loop
}
- return assistantMsg, nil, processErr
- }
- if ctx.Err() != nil {
+ if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
+ if errors.Is(processErr, context.Canceled) {
+ a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
+ } else {
+ a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "API Error", processErr.Error())
+ }
+ return assistantMsg, nil, processErr
+ }
+ case <-ctx.Done():
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
return assistantMsg, nil, ctx.Err()
}
@@ -497,7 +591,8 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
default:
// Continue processing
var tool tools.BaseTool
- for availableTool := range a.tools.Seq() {
+ allTools, _ := a.getAllTools()
+ for _, availableTool := range allTools {
if availableTool.Info().Name == toolCall.Name {
tool = availableTool
break
@@ -642,13 +737,13 @@ func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg
if err := a.messages.Update(ctx, *assistantMsg); err != nil {
return fmt.Errorf("failed to update message: %w", err)
}
- return a.TrackUsage(ctx, sessionID, a.Model(), event.Response.Usage)
+ return a.trackUsage(ctx, sessionID, a.Model(), event.Response.Usage)
}
return nil
}
-func (a *agent) TrackUsage(ctx context.Context, sessionID string, model catwalk.Model, usage provider.TokenUsage) error {
+func (a *agent) trackUsage(ctx context.Context, sessionID string, model catwalk.Model, usage provider.TokenUsage) error {
sess, err := a.sessions.Get(ctx, sessionID)
if err != nil {
return fmt.Errorf("failed to get session: %w", err)
@@ -659,6 +754,8 @@ func (a *agent) TrackUsage(ctx context.Context, sessionID string, model catwalk.
model.CostPer1MIn/1e6*float64(usage.InputTokens) +
model.CostPer1MOut/1e6*float64(usage.OutputTokens)
+ a.eventTokensUsed(sessionID, usage, cost)
+
sess.Cost += cost
sess.CompletionTokens = usage.OutputTokens + usage.CacheReadTokens
sess.PromptTokens = usage.InputTokens + usage.CacheCreationTokens
@@ -754,7 +851,7 @@ func (a *agent) Summarize(ctx context.Context, sessionID string) error {
if r.Error != nil {
event = AgentEvent{
Type: AgentEventTypeError,
- Error: fmt.Errorf("failed to summarize: %w", err),
+ Error: fmt.Errorf("failed to summarize: %w", r.Error),
Done: true,
}
a.Publish(pubsub.CreatedEvent, event)
@@ -848,6 +945,13 @@ func (a *agent) Summarize(ctx context.Context, sessionID string) error {
return nil
}
+func (a *agent) ClearQueue(sessionID string) {
+ if a.QueuedPrompts(sessionID) > 0 {
+ slog.Info("Clearing queued prompts", "session_id", sessionID)
+ a.promptQueue.Del(sessionID)
+ }
+}
+
func (a *agent) CancelAll() {
if !a.IsBusy() {
return
@@ -904,54 +1008,65 @@ func (a *agent) UpdateModel() error {
a.providerID = string(currentProviderCfg.ID)
}
- // Check if small model provider has changed (affects title and summarize providers)
+ // Check if providers have changed for title (small) and summarize (large)
smallModelCfg := cfg.Models[config.SelectedModelTypeSmall]
var smallModelProviderCfg config.ProviderConfig
-
for p := range cfg.Providers.Seq() {
if p.ID == smallModelCfg.Provider {
smallModelProviderCfg = p
break
}
}
-
if smallModelProviderCfg.ID == "" {
return fmt.Errorf("provider %s not found in config", smallModelCfg.Provider)
}
- // Check if summarize provider has changed
- if string(smallModelProviderCfg.ID) != a.summarizeProviderID {
- smallModel := cfg.GetModelByType(config.SelectedModelTypeSmall)
- if smallModel == nil {
- return fmt.Errorf("model %s not found in provider %s", smallModelCfg.Model, smallModelProviderCfg.ID)
+ largeModelCfg := cfg.Models[config.SelectedModelTypeLarge]
+ var largeModelProviderCfg config.ProviderConfig
+ for p := range cfg.Providers.Seq() {
+ if p.ID == largeModelCfg.Provider {
+ largeModelProviderCfg = p
+ break
}
+ }
+ if largeModelProviderCfg.ID == "" {
+ return fmt.Errorf("provider %s not found in config", largeModelCfg.Provider)
+ }
- // Recreate title provider
- titleOpts := []provider.ProviderClientOption{
- provider.WithModel(config.SelectedModelTypeSmall),
- provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptTitle, smallModelProviderCfg.ID)),
- // We want the title to be short, so we limit the max tokens
- provider.WithMaxTokens(40),
- }
- newTitleProvider, err := provider.NewProvider(smallModelProviderCfg, titleOpts...)
- if err != nil {
- return fmt.Errorf("failed to create new title provider: %w", err)
- }
+ var maxTitleTokens int64 = 40
+
+ // if the max output is too low for the gemini provider it won't return anything
+ if smallModelCfg.Provider == "gemini" {
+ maxTitleTokens = 1000
+ }
+ // Recreate title provider
+ titleOpts := []provider.ProviderClientOption{
+ provider.WithModel(config.SelectedModelTypeSmall),
+ provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptTitle, smallModelProviderCfg.ID)),
+ provider.WithMaxTokens(maxTitleTokens),
+ }
+ newTitleProvider, err := provider.NewProvider(smallModelProviderCfg, titleOpts...)
+ if err != nil {
+ return fmt.Errorf("failed to create new title provider: %w", err)
+ }
+ a.titleProvider = newTitleProvider
- // Recreate summarize provider
+ // Recreate summarize provider if provider changed (now large model)
+ if string(largeModelProviderCfg.ID) != a.summarizeProviderID {
+ largeModel := cfg.GetModelByType(config.SelectedModelTypeLarge)
+ if largeModel == nil {
+ return fmt.Errorf("model %s not found in provider %s", largeModelCfg.Model, largeModelProviderCfg.ID)
+ }
summarizeOpts := []provider.ProviderClientOption{
- provider.WithModel(config.SelectedModelTypeSmall),
- provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, smallModelProviderCfg.ID)),
+ provider.WithModel(config.SelectedModelTypeLarge),
+ provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, largeModelProviderCfg.ID)),
}
- newSummarizeProvider, err := provider.NewProvider(smallModelProviderCfg, summarizeOpts...)
+ newSummarizeProvider, err := provider.NewProvider(largeModelProviderCfg, summarizeOpts...)
if err != nil {
return fmt.Errorf("failed to create new summarize provider: %w", err)
}
-
- // Update the providers and provider ID
- a.titleProvider = newTitleProvider
a.summarizeProvider = newSummarizeProvider
- a.summarizeProviderID = string(smallModelProviderCfg.ID)
+ a.summarizeProviderID = string(largeModelProviderCfg.ID)
}
return nil
@@ -0,0 +1,15 @@
+package agent
+
+import (
+ "context"
+ "errors"
+)
+
+var (
+ ErrRequestCancelled = errors.New("request canceled by user")
+ ErrSessionBusy = errors.New("session is currently processing another request")
+)
+
+func isCancelledErr(err error) bool {
+ return errors.Is(err, context.Canceled) || errors.Is(err, ErrRequestCancelled)
+}
@@ -0,0 +1,53 @@
+package agent
+
+import (
+ "time"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/event"
+ "github.com/charmbracelet/crush/internal/llm/provider"
+)
+
+func (a *agent) eventPromptSent(sessionID string) {
+ event.PromptSent(
+ a.eventCommon(sessionID)...,
+ )
+}
+
+func (a *agent) eventPromptResponded(sessionID string, duration time.Duration) {
+ event.PromptResponded(
+ append(
+ a.eventCommon(sessionID),
+ "prompt duration pretty", duration.String(),
+ "prompt duration in seconds", int64(duration.Seconds()),
+ )...,
+ )
+}
+
+func (a *agent) eventTokensUsed(sessionID string, usage provider.TokenUsage, cost float64) {
+ event.TokensUsed(
+ append(
+ a.eventCommon(sessionID),
+ "input tokens", usage.InputTokens,
+ "output tokens", usage.OutputTokens,
+ "cache read tokens", usage.CacheReadTokens,
+ "cache creation tokens", usage.CacheCreationTokens,
+ "total tokens", usage.InputTokens+usage.OutputTokens+usage.CacheReadTokens+usage.CacheCreationTokens,
+ "cost", cost,
+ )...,
+ )
+}
+
+func (a *agent) eventCommon(sessionID string) []any {
+ cfg := config.Get()
+ currentModel := cfg.Models[cfg.Agents["coder"].Model]
+
+ return []any{
+ "session id", sessionID,
+ "provider", currentModel.Provider,
+ "model", currentModel.Model,
+ "reasoning effort", currentModel.ReasoningEffort,
+ "thinking mode", currentModel.Think,
+ "yolo mode", a.permissions.SkipRequests(),
+ }
+}
@@ -1,27 +1,87 @@
package agent
import (
+ "cmp"
"context"
"encoding/json"
+ "errors"
"fmt"
"log/slog"
+ "maps"
"slices"
+ "strings"
"sync"
+ "time"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/llm/tools"
"github.com/charmbracelet/crush/internal/permission"
+ "github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/version"
"github.com/mark3labs/mcp-go/client"
"github.com/mark3labs/mcp-go/client/transport"
"github.com/mark3labs/mcp-go/mcp"
)
+// MCPState represents the current state of an MCP client
+type MCPState int
+
+const (
+ MCPStateDisabled MCPState = iota
+ MCPStateStarting
+ MCPStateConnected
+ MCPStateError
+)
+
+func (s MCPState) String() string {
+ switch s {
+ case MCPStateDisabled:
+ return "disabled"
+ case MCPStateStarting:
+ return "starting"
+ case MCPStateConnected:
+ return "connected"
+ case MCPStateError:
+ return "error"
+ default:
+ return "unknown"
+ }
+}
+
+// MCPEventType represents the type of MCP event
+type MCPEventType string
+
+const (
+ MCPEventStateChanged MCPEventType = "state_changed"
+)
+
+// MCPEvent represents an event in the MCP system
+type MCPEvent struct {
+ Type MCPEventType
+ Name string
+ State MCPState
+ Error error
+ ToolCount int
+}
+
+// MCPClientInfo holds information about an MCP client's state
+type MCPClientInfo struct {
+ Name string
+ State MCPState
+ Error error
+ Client *client.Client
+ ToolCount int
+ ConnectedAt time.Time
+}
+
var (
mcpToolsOnce sync.Once
mcpTools []tools.BaseTool
mcpClients = csync.NewMap[string, *client.Client]()
+ mcpStates = csync.NewMap[string, MCPClientInfo]()
+ mcpBroker = pubsub.NewBroker[MCPEvent]()
)
type McpTool struct {
@@ -40,10 +100,14 @@ func (b *McpTool) Info() tools.ToolInfo {
if required == nil {
required = make([]string, 0)
}
+ parameters := b.tool.InputSchema.Properties
+ if parameters == nil {
+ parameters = make(map[string]any)
+ }
return tools.ToolInfo{
Name: fmt.Sprintf("mcp_%s_%s", b.mcpName, b.tool.Name),
Description: b.tool.Description,
- Parameters: b.tool.InputSchema.Properties,
+ Parameters: parameters,
Required: required,
}
}
@@ -53,9 +117,10 @@ func runTool(ctx context.Context, name, toolName string, input string) (tools.To
if err := json.Unmarshal([]byte(input), &args); err != nil {
return tools.NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
- c, ok := mcpClients.Get(name)
- if !ok {
- return tools.NewTextErrorResponse("mcp '" + name + "' not available"), nil
+
+ c, err := getOrRenewClient(ctx, name)
+ if err != nil {
+ return tools.NewTextErrorResponse(err.Error()), nil
}
result, err := c.CallTool(ctx, mcp.CallToolRequest{
Params: mcp.CallToolParams{
@@ -67,16 +132,44 @@ func runTool(ctx context.Context, name, toolName string, input string) (tools.To
return tools.NewTextErrorResponse(err.Error()), nil
}
- output := ""
+ output := make([]string, 0, len(result.Content))
for _, v := range result.Content {
if v, ok := v.(mcp.TextContent); ok {
- output = v.Text
+ output = append(output, v.Text)
} else {
- output = fmt.Sprintf("%v", v)
+ output = append(output, fmt.Sprintf("%v", v))
}
}
+ return tools.NewTextResponse(strings.Join(output, "\n")), nil
+}
+
+func getOrRenewClient(ctx context.Context, name string) (*client.Client, error) {
+ c, ok := mcpClients.Get(name)
+ if !ok {
+ return nil, fmt.Errorf("mcp '%s' not available", name)
+ }
+
+ cfg := config.Get()
+ m := cfg.MCP[name]
+ state, _ := mcpStates.Get(name)
- return tools.NewTextResponse(output), nil
+ timeout := mcpTimeout(m)
+ pingCtx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+ err := c.Ping(pingCtx)
+ if err == nil {
+ return c, nil
+ }
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, state.ToolCount)
+
+ c, err = createAndInitializeClient(ctx, name, m, cfg.Resolver())
+ if err != nil {
+ return nil, err
+ }
+
+ updateMCPState(name, MCPStateConnected, nil, c, state.ToolCount)
+ mcpClients.Set(name, c)
+ return c, nil
}
func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolResponse, error) {
@@ -84,7 +177,7 @@ func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
if sessionID == "" || messageID == "" {
return tools.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
- permissionDescription := fmt.Sprintf("execute %s with the following parameters: %s", b.Info().Name, params.Input)
+ permissionDescription := fmt.Sprintf("execute %s with the following parameters:", b.Info().Name)
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
@@ -103,13 +196,10 @@ func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
return runTool(ctx, b.mcpName, b.tool.Name, params.Input)
}
-func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) []tools.BaseTool {
+func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) ([]tools.BaseTool, error) {
result, err := c.ListTools(ctx, mcp.ListToolsRequest{})
if err != nil {
- slog.Error("error listing tools", "error", err)
- c.Close()
- mcpClients.Del(name)
- return nil
+ return nil, err
}
mcpTools := make([]tools.BaseTool, 0, len(result.Tools))
for _, tool := range result.Tools {
@@ -120,14 +210,58 @@ func getTools(ctx context.Context, name string, permissions permission.Service,
workingDir: workingDir,
})
}
- return mcpTools
+ return mcpTools, nil
+}
+
+// SubscribeMCPEvents returns a channel for MCP events
+func SubscribeMCPEvents(ctx context.Context) <-chan pubsub.Event[MCPEvent] {
+ return mcpBroker.Subscribe(ctx)
+}
+
+// GetMCPStates returns the current state of all MCP clients
+func GetMCPStates() map[string]MCPClientInfo {
+ return maps.Collect(mcpStates.Seq2())
+}
+
+// GetMCPState returns the state of a specific MCP client
+func GetMCPState(name string) (MCPClientInfo, bool) {
+ return mcpStates.Get(name)
+}
+
+// updateMCPState updates the state of an MCP client and publishes an event
+func updateMCPState(name string, state MCPState, err error, client *client.Client, toolCount int) {
+ info := MCPClientInfo{
+ Name: name,
+ State: state,
+ Error: err,
+ Client: client,
+ ToolCount: toolCount,
+ }
+ if state == MCPStateConnected {
+ info.ConnectedAt = time.Now()
+ }
+ mcpStates.Set(name, info)
+
+ // Publish state change event
+ mcpBroker.Publish(pubsub.UpdatedEvent, MCPEvent{
+ Type: MCPEventStateChanged,
+ Name: name,
+ State: state,
+ Error: err,
+ ToolCount: toolCount,
+ })
}
// CloseMCPClients closes all MCP clients. This should be called during application shutdown.
-func CloseMCPClients() {
- for c := range mcpClients.Seq() {
- _ = c.Close()
+func CloseMCPClients() error {
+ var errs []error
+ for name, c := range mcpClients.Seq2() {
+ if err := c.Close(); err != nil {
+ errs = append(errs, fmt.Errorf("close mcp: %s: %w", name, err))
+ }
}
+ mcpBroker.Shutdown()
+ return errors.Join(errs...)
}
var mcpInitRequest = mcp.InitializeRequest{
@@ -143,66 +277,151 @@ var mcpInitRequest = mcp.InitializeRequest{
func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) []tools.BaseTool {
var wg sync.WaitGroup
result := csync.NewSlice[tools.BaseTool]()
+
+ // Initialize states for all configured MCPs
for name, m := range cfg.MCP {
if m.Disabled {
+ updateMCPState(name, MCPStateDisabled, nil, nil, 0)
slog.Debug("skipping disabled mcp", "name", name)
continue
}
+
+ // Set initial starting state
+ updateMCPState(name, MCPStateStarting, nil, nil, 0)
+
wg.Add(1)
go func(name string, m config.MCPConfig) {
- defer wg.Done()
- c, err := createMcpClient(m)
+ defer func() {
+ wg.Done()
+ if r := recover(); r != nil {
+ var err error
+ switch v := r.(type) {
+ case error:
+ err = v
+ case string:
+ err = fmt.Errorf("panic: %s", v)
+ default:
+ err = fmt.Errorf("panic: %v", v)
+ }
+ updateMCPState(name, MCPStateError, err, nil, 0)
+ slog.Error("panic in mcp client initialization", "error", err, "name", name)
+ }
+ }()
+
+ ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
+ defer cancel()
+
+ c, err := createAndInitializeClient(ctx, name, m, cfg.Resolver())
if err != nil {
- slog.Error("error creating mcp client", "error", err, "name", name)
return
}
- if err := c.Start(ctx); err != nil {
- slog.Error("error starting mcp client", "error", err, "name", name)
- _ = c.Close()
- return
- }
- if _, err := c.Initialize(ctx, mcpInitRequest); err != nil {
- slog.Error("error initializing mcp client", "error", err, "name", name)
- _ = c.Close()
+
+ tools, err := getTools(ctx, name, permissions, c, cfg.WorkingDir())
+ if err != nil {
+ slog.Error("error listing tools", "error", err)
+ updateMCPState(name, MCPStateError, err, nil, 0)
+ c.Close()
return
}
- slog.Info("Initialized mcp client", "name", name)
mcpClients.Set(name, c)
-
- result.Append(getTools(ctx, name, permissions, c, cfg.WorkingDir())...)
+ updateMCPState(name, MCPStateConnected, nil, c, len(tools))
+ result.Append(tools...)
}(name, m)
}
wg.Wait()
return slices.Collect(result.Seq())
}
-func createMcpClient(m config.MCPConfig) (*client.Client, error) {
+func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
+ c, err := createMcpClient(name, m, resolver)
+ if err != nil {
+ updateMCPState(name, MCPStateError, err, nil, 0)
+ slog.Error("error creating mcp client", "error", err, "name", name)
+ return nil, err
+ }
+
+ // XXX: ideally we should be able to use context.WithTimeout here, but,
+ // the SSE MCP client will start failing once that context is canceled.
+ timeout := mcpTimeout(m)
+ mcpCtx, cancel := context.WithCancel(ctx)
+ cancelTimer := time.AfterFunc(timeout, cancel)
+ if err := c.Start(mcpCtx); err != nil {
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
+ slog.Error("error starting mcp client", "error", err, "name", name)
+ _ = c.Close()
+ cancel()
+ return nil, err
+ }
+ if _, err := c.Initialize(mcpCtx, mcpInitRequest); err != nil {
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
+ slog.Error("error initializing mcp client", "error", err, "name", name)
+ _ = c.Close()
+ cancel()
+ return nil, err
+ }
+ cancelTimer.Stop()
+ slog.Info("Initialized mcp client", "name", name)
+ return c, nil
+}
+
+func maybeTimeoutErr(err error, timeout time.Duration) error {
+ if errors.Is(err, context.Canceled) {
+ return fmt.Errorf("timed out after %s", timeout)
+ }
+ return err
+}
+
+func createMcpClient(name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
switch m.Type {
case config.MCPStdio:
- return client.NewStdioMCPClient(
- m.Command,
+ command, err := resolver.ResolveValue(m.Command)
+ if err != nil {
+ return nil, fmt.Errorf("invalid mcp command: %w", err)
+ }
+ if strings.TrimSpace(command) == "" {
+ return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
+ }
+ return client.NewStdioMCPClientWithOptions(
+ home.Long(command),
m.ResolvedEnv(),
- m.Args...,
+ m.Args,
+ transport.WithCommandLogger(mcpLogger{name: name}),
)
case config.MCPHttp:
+ if strings.TrimSpace(m.URL) == "" {
+ return nil, fmt.Errorf("mcp http config requires a non-empty 'url' field")
+ }
return client.NewStreamableHttpClient(
m.URL,
transport.WithHTTPHeaders(m.ResolvedHeaders()),
- transport.WithLogger(mcpHTTPLogger{}),
+ transport.WithHTTPLogger(mcpLogger{name: name}),
)
case config.MCPSse:
+ if strings.TrimSpace(m.URL) == "" {
+ return nil, fmt.Errorf("mcp sse config requires a non-empty 'url' field")
+ }
return client.NewSSEMCPClient(
m.URL,
client.WithHeaders(m.ResolvedHeaders()),
+ transport.WithSSELogger(mcpLogger{name: name}),
)
default:
return nil, fmt.Errorf("unsupported mcp type: %s", m.Type)
}
}
-// for MCP's HTTP client.
-type mcpHTTPLogger struct{}
+// for MCP's clients.
+type mcpLogger struct{ name string }
-func (l mcpHTTPLogger) Errorf(format string, v ...any) { slog.Error(fmt.Sprintf(format, v...)) }
-func (l mcpHTTPLogger) Infof(format string, v ...any) { slog.Info(fmt.Sprintf(format, v...)) }
+func (l mcpLogger) Errorf(format string, v ...any) {
+ slog.Error(fmt.Sprintf(format, v...), "name", l.name)
+}
+
+func (l mcpLogger) Infof(format string, v ...any) {
+ slog.Info(fmt.Sprintf(format, v...), "name", l.name)
+}
+
+func mcpTimeout(m config.MCPConfig) time.Duration {
+ return time.Duration(cmp.Or(m.Timeout, 15)) * time.Second
+}
@@ -20,7 +20,8 @@ func CoderPrompt(p string, contextFiles ...string) string {
basePrompt = string(anthropicCoderPrompt)
switch p {
case string(catwalk.InferenceProviderOpenAI):
- basePrompt = string(openaiCoderPrompt)
+ // seems to behave better
+ basePrompt = string(coderV2Prompt)
case string(catwalk.InferenceProviderGemini):
basePrompt = string(geminiCoderPrompt)
}
@@ -44,9 +45,6 @@ var anthropicCoderPrompt []byte
//go:embed gemini.md
var geminiCoderPrompt []byte
-//go:embed openai.md
-var openaiCoderPrompt []byte
-
//go:embed v2.md
var coderV2Prompt []byte
@@ -7,4 +7,3 @@ The file you create will be given to agentic coding agents (such as yourself) th
If there's already a **CRUSH.md**, improve it.
If there are Cursor rules (in `.cursor/rules/` or `.cursorrules`) or Copilot rules (in `.github/copilot-instructions.md`), make sure to include them.
-Add the `.crush` directory to the `.gitignore` file if it's not already there.
@@ -1,96 +0,0 @@
-Please resolve the user's task by editing and testing the code files in your current code execution session.
-You are a deployed coding agent.
-Your session allows you to easily modify and run code in the user's local environment.
-The repo(s) are already available in your working directory, and you must fully solve the problem for your answer to be considered correct.
-
-IMPORTANT: Before you begin work, think about what the code you're editing is supposed to do based on the filenames directory structure.
-
-# Memory
-
-If the current working directory contains a file called CRUSH.md, it will be automatically added to your context. This file serves multiple purposes:
-
-1. Storing frequently used bash commands (build, test, lint, etc.) so you can use them without searching each time
-2. Recording the user's code style preferences (naming conventions, preferred libraries, etc.)
-3. Maintaining useful information about the codebase structure and organization
-
-When you spend time searching for commands to typecheck, lint, build, or test, you should ask the user if it's okay to add those commands to CRUSH.md. Similarly, when learning about code style preferences or important codebase information, ask if it's okay to add that to CRUSH.md so you can remember it for next time.
-
-You MUST adhere to the following criteria when executing the task:
-
-- Working on the repo(s) in the current environment is allowed, even if they are proprietary.
-- Analyzing code for vulnerabilities is allowed.
-- Showing user code and tool call details is allowed.
-- User instructions may overwrite the _CODING GUIDELINES_ section in this developer message.
-- Do not use `ls -R` `find`, or `grep` - these are slow in large repos. Use the Agent tool for searching instead.
-- Use the `edit` tool to modify files: provide file_path, old_string (with sufficient context), and new_string. The edit tool requires:
- - Absolute file paths (starting with /)
- - Unique old_string matches with 3-5 lines of context before and after
- - Exact whitespace and indentation matching
- - For new files: provide file_path and new_string, leave old_string empty
- - For deleting content: provide file_path and old_string, leave new_string empty
-
-# Following conventions
-
-When making changes to files, first understand the file's code conventions. Mimic code style, use existing libraries and utilities, and follow existing patterns.
-
-- NEVER assume that a given library is available, even if it is well known. Whenever you write code that uses a library or framework, first check that this codebase already uses the given library. For example, you might look at neighboring files, or check the package.json (or cargo.toml, and so on depending on the language).
-- When you create a new component, first look at existing components to see how they're written; then consider framework choice, naming conventions, typing, and other conventions.
-- When you edit a piece of code, first look at the code's surrounding context (especially its imports) to understand the code's choice of frameworks and libraries. Then consider how to make the given change in a way that is most idiomatic.
-- Always follow security best practices. Never introduce code that exposes or logs secrets and keys. Never commit secrets or keys to the repository.
-
-# Code style
-
-- IMPORTANT: DO NOT ADD **_ANY_** COMMENTS unless asked
-
-- If completing the user's task requires writing or modifying files:
- - Your code and final answer should follow these _CODING GUIDELINES_:
- - Fix the problem at the root cause rather than applying surface-level patches, when possible.
- - Avoid unneeded complexity in your solution.
- - Ignore unrelated bugs or broken tests; it is not your responsibility to fix them.
- - Update documentation as necessary.
- - Keep changes consistent with the style of the existing codebase. Changes should be minimal and focused on the task.
- - Use `git log` and `git blame` to search the history of the codebase if additional context is required.
- - NEVER add copyright or license headers unless specifically requested.
- - You do not need to `git commit` your changes; this will be done automatically for you.
- - If there is a .pre-commit-config.yaml, use `pre-commit run --files ...` to check that your changes pass the pre-commit checks. However, do not fix pre-existing errors on lines you didn't touch.
- - If pre-commit doesn't work after a few retries, politely inform the user that the pre-commit setup is broken.
- - Once you finish coding, you must
- - Check `git status` to sanity check your changes; revert any scratch files or changes.
- - Remove all inline comments you added as much as possible, even if they look normal. Check using `git diff`. Inline comments must be generally avoided, unless active maintainers of the repo, after long careful study of the code and the issue, will still misinterpret the code without the comments.
- - Check if you accidentally add copyright or license headers. If so, remove them.
- - Try to run pre-commit if it is available.
- - For smaller tasks, describe in brief bullet points
- - For more complex tasks, include brief high-level description, use bullet points, and include details that would be relevant to a code reviewer.
-
-# Doing tasks
-
-The user will primarily request you perform software engineering tasks. This includes solving bugs, adding new functionality, refactoring code, explaining code, and more. For these tasks the following steps are recommended:
-
-1. Use the available search tools to understand the codebase and the user's query.
-2. Implement the solution using all tools available to you
-3. Verify the solution if possible with tests. NEVER assume specific test framework or test script. Check the README or search codebase to determine the testing approach.
-4. VERY IMPORTANT: When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to CRUSH.md so that you will know to run it next time.
-
-NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
-
-# Tool usage policy
-
-- When doing file search, prefer to use the Agent tool in order to reduce context usage.
-- IMPORTANT: All tools are executed in parallel when multiple tool calls are sent in a single message. Only send multiple tool calls when they are safe to run in parallel (no dependencies between them).
-- IMPORTANT: The user does not see the full output of the tool responses, so if you need the output of the tool for the response make sure to summarize it for the user.
-
-# Proactiveness
-
-You are allowed to be proactive, but only when the user asks you to do something. You should strive to strike a balance between:
-
-1. Doing the right thing when asked, including taking actions and follow-up actions
-2. Not surprising the user with actions you take without asking
- For example, if the user asks you how to approach something, you should do your best to answer their question first, and not immediately jump into taking actions.
-3. Do not add additional code explanation summary unless requested by the user. After working on a file, just stop, rather than providing an explanation of what you did.
-
-- If completing the user's task DOES NOT require writing or modifying files (e.g., the user asks a question about the code base):
- - Respond in a friendly tone as a remote teammate, who is knowledgeable, capable and eager to help with coding.
-- When your task involves writing or modifying files:
- - Do NOT tell the user to "save the file" or "copy the code into a file" if you already created or modified the file using `edit`. Instead, reference the file as already saved.
- - Do NOT show the full contents of large files you have already written, unless the user explicitly asks for them.
-- NEVER use emojis in your responses
@@ -9,6 +9,7 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/env"
+ "github.com/charmbracelet/crush/internal/home"
)
type PromptID string
@@ -44,18 +45,7 @@ func getContextFromPaths(workingDir string, contextPaths []string) string {
// expandPath expands ~ and environment variables in file paths
func expandPath(path string) string {
- // Handle tilde expansion
- if strings.HasPrefix(path, "~/") {
- homeDir, err := os.UserHomeDir()
- if err == nil {
- path = filepath.Join(homeDir, path[2:])
- }
- } else if path == "~" {
- homeDir, err := os.UserHomeDir()
- if err == nil {
- path = homeDir
- }
- }
+ path = home.Long(path)
// Handle environment variable expansion using the same pattern as config
if strings.HasPrefix(path, "$") {
@@ -2,10 +2,10 @@ package prompt
import (
"os"
- "path/filepath"
- "runtime"
"strings"
"testing"
+
+ "github.com/charmbracelet/crush/internal/home"
)
func TestExpandPath(t *testing.T) {
@@ -25,16 +25,14 @@ func TestExpandPath(t *testing.T) {
name: "tilde expansion",
input: "~/documents",
expected: func() string {
- home, _ := os.UserHomeDir()
- return filepath.Join(home, "documents")
+ return home.Dir() + "/documents"
},
},
{
name: "tilde only",
input: "~",
expected: func() string {
- home, _ := os.UserHomeDir()
- return home
+ return home.Dir()
},
},
{
@@ -69,55 +67,3 @@ func TestExpandPath(t *testing.T) {
})
}
}
-
-func TestProcessContextPaths(t *testing.T) {
- // Create a temporary directory and file for testing
- tmpDir := t.TempDir()
- testFile := filepath.Join(tmpDir, "test.txt")
- testContent := "test content"
-
- err := os.WriteFile(testFile, []byte(testContent), 0o644)
- if err != nil {
- t.Fatalf("Failed to create test file: %v", err)
- }
-
- // Test with absolute path to file
- result := processContextPaths("", []string{testFile})
- expected := "# From:" + testFile + "\n" + testContent
-
- if result != expected {
- t.Errorf("processContextPaths with absolute path failed.\nGot: %q\nWant: %q", result, expected)
- }
-
- // Test with directory path (should process all files in directory)
- result = processContextPaths("", []string{tmpDir})
- if !strings.Contains(result, testContent) {
- t.Errorf("processContextPaths with directory path failed to include file content")
- }
-
- // Test with tilde expansion (if we can create a file in home directory)
- tmpDir = t.TempDir()
- setHomeEnv(t, tmpDir)
- homeTestFile := filepath.Join(tmpDir, "crush_test_file.txt")
- err = os.WriteFile(homeTestFile, []byte(testContent), 0o644)
- if err == nil {
- defer os.Remove(homeTestFile) // Clean up
-
- tildeFile := "~/crush_test_file.txt"
- result = processContextPaths("", []string{tildeFile})
- expected = "# From:" + homeTestFile + "\n" + testContent
-
- if result != expected {
- t.Errorf("processContextPaths with tilde expansion failed.\nGot: %q\nWant: %q", result, expected)
- }
- }
-}
-
-func setHomeEnv(tb testing.TB, path string) {
- tb.Helper()
- key := "HOME"
- if runtime.GOOS == "windows" {
- key = "USERPROFILE"
- }
- tb.Setenv(key, path)
-}
@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"log/slog"
+ "net/http"
"regexp"
"strconv"
"strings"
@@ -79,6 +80,13 @@ func createAnthropicClient(opts providerClientOptions, tp AnthropicClientType) a
slog.Debug("Skipping X-Api-Key header because Authorization header is provided")
}
+ if opts.baseURL != "" {
+ resolvedBaseURL, err := config.Get().Resolve(opts.baseURL)
+ if err == nil && resolvedBaseURL != "" {
+ anthropicClientOptions = append(anthropicClientOptions, option.WithBaseURL(resolvedBaseURL))
+ }
+ }
+
if config.Get().Options.Debug {
httpClient := log.NewHTTPClient()
anthropicClientOptions = append(anthropicClientOptions, option.WithHTTPClient(httpClient))
@@ -144,6 +152,9 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
}
for _, toolCall := range msg.ToolCalls() {
+ if !toolCall.Finished {
+ continue
+ }
var inputMap map[string]any
err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
if err != nil {
@@ -153,7 +164,6 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
}
if len(blocks) == 0 {
- slog.Warn("There is a message without content, investigate, this should not happen")
continue
}
anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
@@ -166,10 +176,13 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
}
}
- return
+ return anthropicMessages
}
func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolUnionParam {
+ if len(tools) == 0 {
+ return nil
+ }
anthropicTools := make([]anthropic.ToolUnionParam, len(tools))
for i, tool := range tools {
@@ -179,7 +192,7 @@ func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolU
Description: anthropic.String(info.Description),
InputSchema: anthropic.ToolInputSchemaParam{
Properties: info.Parameters,
- // TODO: figure out how we can tell claude the required fields?
+ Required: info.Required,
},
}
@@ -253,9 +266,6 @@ func (a *anthropicClient) preparedMessages(messages []anthropic.MessageParam, to
if a.providerOptions.systemPromptPrefix != "" {
systemBlocks = append(systemBlocks, anthropic.TextBlockParam{
Text: a.providerOptions.systemPromptPrefix,
- CacheControl: anthropic.CacheControlEphemeralParam{
- Type: "ephemeral",
- },
})
}
@@ -295,13 +305,12 @@ func (a *anthropicClient) send(ctx context.Context, messages []message.Message,
)
// If there is an error we are going to see if we can retry the call
if err != nil {
- slog.Error("Anthropic API error", "error", err.Error(), "attempt", attempts, "max_retries", maxRetries)
retry, after, retryErr := a.shouldRetry(attempts, err)
if retryErr != nil {
return nil, retryErr
}
if retry {
- slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries)
+ slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries, "error", err)
select {
case <-ctx.Done():
return nil, ctx.Err()
@@ -450,7 +459,7 @@ func (a *anthropicClient) stream(ctx context.Context, messages []message.Message
return
}
if retry {
- slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries)
+ slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries, "error", err)
select {
case <-ctx.Done():
// context cancelled
@@ -484,17 +493,23 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
}
- if apiErr.StatusCode == 401 {
+ if apiErr.StatusCode == http.StatusUnauthorized {
+ prev := a.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
a.providerOptions.apiKey, err = config.Get().Resolve(a.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
+ // if it didn't change, do not retry.
+ if prev == a.providerOptions.apiKey {
+ return false, 0, err
+ }
a.client = createAnthropicClient(a.providerOptions, a.tp)
return true, 0, nil
}
// Handle context limit exceeded error (400 Bad Request)
- if apiErr.StatusCode == 400 {
+ if apiErr.StatusCode == http.StatusBadRequest {
if adjusted, ok := a.handleContextLimitError(apiErr); ok {
a.adjustedMaxTokens = adjusted
slog.Debug("Adjusted max_tokens due to context limit", "new_max_tokens", adjusted)
@@ -503,7 +518,8 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
}
isOverloaded := strings.Contains(apiErr.Error(), "overloaded") || strings.Contains(apiErr.Error(), "rate limit exceeded")
- if apiErr.StatusCode != 429 && apiErr.StatusCode != 529 && !isOverloaded {
+ // 529 (unofficial): The service is overloaded
+ if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != 529 && !isOverloaded {
return false, 0, err
}
@@ -44,6 +44,14 @@ func createGeminiClient(opts providerClientOptions) (*genai.Client, error) {
APIKey: opts.apiKey,
Backend: genai.BackendGeminiAPI,
}
+ if opts.baseURL != "" {
+ resolvedBaseURL, err := config.Get().Resolve(opts.baseURL)
+ if err == nil && resolvedBaseURL != "" {
+ cc.HTTPOptions = genai.HTTPOptions{
+ BaseURL: resolvedBaseURL,
+ }
+ }
+ }
if config.Get().Options.Debug {
cc.HTTPClient = log.NewHTTPClient()
}
@@ -62,15 +70,14 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
var parts []*genai.Part
parts = append(parts, &genai.Part{Text: msg.Content().String()})
for _, binaryContent := range msg.BinaryContent() {
- imageFormat := strings.Split(binaryContent.MIMEType, "/")
parts = append(parts, &genai.Part{InlineData: &genai.Blob{
- MIMEType: imageFormat[1],
+ MIMEType: binaryContent.MIMEType,
Data: binaryContent.Data,
}})
}
history = append(history, &genai.Content{
Parts: parts,
- Role: "user",
+ Role: genai.RoleUser,
})
case message.Assistant:
var assistantParts []*genai.Part
@@ -81,6 +88,9 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
if len(msg.ToolCalls()) > 0 {
for _, call := range msg.ToolCalls() {
+ if !call.Finished {
+ continue
+ }
args, _ := parseJSONToMap(call.Input)
assistantParts = append(assistantParts, &genai.Part{
FunctionCall: &genai.FunctionCall{
@@ -93,12 +103,13 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
if len(assistantParts) > 0 {
history = append(history, &genai.Content{
- Role: "model",
+ Role: genai.RoleModel,
Parts: assistantParts,
})
}
case message.Tool:
+ var toolParts []*genai.Part
for _, result := range msg.ToolResults() {
response := map[string]any{"result": result.Content}
parsed, err := parseJSONToMap(result.Content)
@@ -118,16 +129,17 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
}
}
- history = append(history, &genai.Content{
- Parts: []*genai.Part{
- {
- FunctionResponse: &genai.FunctionResponse{
- Name: toolCall.Name,
- Response: response,
- },
- },
+ toolParts = append(toolParts, &genai.Part{
+ FunctionResponse: &genai.FunctionResponse{
+ Name: toolCall.Name,
+ Response: response,
},
- Role: "function",
+ })
+ }
+ if len(toolParts) > 0 {
+ history = append(history, &genai.Content{
+ Parts: toolParts,
+ Role: genai.RoleUser,
})
}
}
@@ -216,7 +228,7 @@ func (g *geminiClient) send(ctx context.Context, messages []message.Message, too
return nil, retryErr
}
if retry {
- slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries)
+ slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries, "error", err)
select {
case <-ctx.Done():
return nil, ctx.Err()
@@ -319,6 +331,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
for _, part := range lastMsg.Parts {
lastMsgParts = append(lastMsgParts, *part)
}
+
for resp, err := range chat.SendMessageStream(ctx, lastMsgParts...) {
if err != nil {
retry, after, retryErr := g.shouldRetry(attempts, err)
@@ -327,7 +340,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
return
}
if retry {
- slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries)
+ slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries, "error", err)
select {
case <-ctx.Done():
if ctx.Err() != nil {
@@ -336,7 +349,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
return
case <-time.After(time.Duration(after) * time.Millisecond):
- break
+ continue
}
} else {
eventChan <- ProviderEvent{Type: EventError, Error: err}
@@ -369,19 +382,12 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
Finished: true,
}
- isNew := true
- for _, existing := range toolCalls {
- if existing.Name == newCall.Name && existing.Input == newCall.Input {
- isNew = false
- break
- }
- }
-
- if isNew {
- toolCalls = append(toolCalls, newCall)
- }
+ toolCalls = append(toolCalls, newCall)
}
}
+ } else {
+ // no content received
+ break
}
}
@@ -405,6 +411,11 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
},
}
return
+ } else {
+ eventChan <- ProviderEvent{
+ Type: EventError,
+ Error: errors.New("no content received"),
+ }
}
}
}()
@@ -429,10 +440,16 @@ func (g *geminiClient) shouldRetry(attempts int, err error) (bool, int64, error)
// Check for token expiration (401 Unauthorized)
if contains(errMsg, "unauthorized", "invalid api key", "api key expired") {
+ prev := g.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
g.providerOptions.apiKey, err = config.Get().Resolve(g.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
+ // if it didn't change, do not retry.
+ if prev == g.providerOptions.apiKey {
+ return false, 0, err
+ }
g.client, err = createGeminiClient(g.providerOptions)
if err != nil {
return false, 0, fmt.Errorf("failed to create Gemini client after API key refresh: %w", err)
@@ -2,10 +2,12 @@ package provider
import (
"context"
+ "encoding/json"
"errors"
"fmt"
"io"
"log/slog"
+ "net/http"
"strings"
"time"
@@ -14,6 +16,7 @@ import (
"github.com/charmbracelet/crush/internal/llm/tools"
"github.com/charmbracelet/crush/internal/log"
"github.com/charmbracelet/crush/internal/message"
+ "github.com/google/uuid"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/openai/openai-go/packages/param"
@@ -41,7 +44,7 @@ func createOpenAIClient(opts providerClientOptions) openai.Client {
}
if opts.baseURL != "" {
resolvedBaseURL, err := config.Get().Resolve(opts.baseURL)
- if err == nil {
+ if err == nil && resolvedBaseURL != "" {
openaiClientOptions = append(openaiClientOptions, option.WithBaseURL(resolvedBaseURL))
}
}
@@ -70,8 +73,9 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
systemMessage = o.providerOptions.systemPromptPrefix + "\n" + systemMessage
}
- systemTextBlock := openai.ChatCompletionContentPartTextParam{Text: systemMessage}
+ system := openai.SystemMessage(systemMessage)
if isAnthropicModel && !o.providerOptions.disableCache {
+ systemTextBlock := openai.ChatCompletionContentPartTextParam{Text: systemMessage}
systemTextBlock.SetExtraFields(
map[string]any{
"cache_control": map[string]string{
@@ -79,10 +83,10 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
},
},
)
+ var content []openai.ChatCompletionContentPartTextParam
+ content = append(content, systemTextBlock)
+ system = openai.SystemMessage(content)
}
- var content []openai.ChatCompletionContentPartTextParam
- content = append(content, systemTextBlock)
- system := openai.SystemMessage(content)
openaiMessages = append(openaiMessages, system)
for i, msg := range messages {
@@ -93,9 +97,12 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
switch msg.Role {
case message.User:
var content []openai.ChatCompletionContentPartUnionParam
+
textBlock := openai.ChatCompletionContentPartTextParam{Text: msg.Content().String()}
content = append(content, openai.ChatCompletionContentPartUnionParam{OfText: &textBlock})
+ hasBinaryContent := false
for _, binaryContent := range msg.BinaryContent() {
+ hasBinaryContent = true
imageURL := openai.ChatCompletionContentPartImageImageURLParam{URL: binaryContent.String(catwalk.InferenceProviderOpenAI)}
imageBlock := openai.ChatCompletionContentPartImageParam{ImageURL: imageURL}
@@ -108,53 +115,54 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
},
})
}
-
- openaiMessages = append(openaiMessages, openai.UserMessage(content))
+ if hasBinaryContent || (isAnthropicModel && !o.providerOptions.disableCache) {
+ openaiMessages = append(openaiMessages, openai.UserMessage(content))
+ } else {
+ openaiMessages = append(openaiMessages, openai.UserMessage(msg.Content().String()))
+ }
case message.Assistant:
assistantMsg := openai.ChatCompletionAssistantMessageParam{
Role: "assistant",
}
- hasContent := false
- if msg.Content().String() != "" {
- hasContent = true
- textBlock := openai.ChatCompletionContentPartTextParam{Text: msg.Content().String()}
- if cache && !o.providerOptions.disableCache && isAnthropicModel {
- textBlock.SetExtraFields(map[string]any{
- "cache_control": map[string]string{
- "type": "ephemeral",
- },
- })
+ // Only include finished tool calls; interrupted tool calls must not be resent.
+ if len(msg.ToolCalls()) > 0 {
+ finished := make([]message.ToolCall, 0, len(msg.ToolCalls()))
+ for _, call := range msg.ToolCalls() {
+ if call.Finished {
+ finished = append(finished, call)
+ }
}
- assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
- OfArrayOfContentParts: []openai.ChatCompletionAssistantMessageParamContentArrayOfContentPartUnion{
- {
- OfText: &textBlock,
- },
- },
+ if len(finished) > 0 {
+ assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(finished))
+ for i, call := range finished {
+ assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
+ ID: call.ID,
+ Type: "function",
+ Function: openai.ChatCompletionMessageToolCallFunctionParam{
+ Name: call.Name,
+ Arguments: call.Input,
+ },
+ }
+ }
}
}
-
- if len(msg.ToolCalls()) > 0 {
- hasContent = true
+ if msg.Content().String() != "" {
assistantMsg.Content = openai.ChatCompletionAssistantMessageParamContentUnion{
- OfString: param.NewOpt(msg.Content().String()),
- }
- assistantMsg.ToolCalls = make([]openai.ChatCompletionMessageToolCallParam, len(msg.ToolCalls()))
- for i, call := range msg.ToolCalls() {
- assistantMsg.ToolCalls[i] = openai.ChatCompletionMessageToolCallParam{
- ID: call.ID,
- Type: "function",
- Function: openai.ChatCompletionMessageToolCallFunctionParam{
- Name: call.Name,
- Arguments: call.Input,
- },
- }
+ OfString: param.NewOpt(msg.Content().Text),
}
}
- if !hasContent {
- slog.Warn("There is a message without content, investigate, this should not happen")
+
+ if cache && !o.providerOptions.disableCache && isAnthropicModel {
+ assistantMsg.SetExtraFields(map[string]any{
+ "cache_control": map[string]string{
+ "type": "ephemeral",
+ },
+ })
+ }
+ // Skip empty assistant messages (no content and no finished tool calls)
+ if msg.Content().String() == "" && len(assistantMsg.ToolCalls) == 0 {
continue
}
@@ -171,7 +179,7 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
}
}
- return
+ return openaiMessages
}
func (o *openaiClient) convertTools(tools []tools.BaseTool) []openai.ChatCompletionToolParam {
@@ -243,6 +251,8 @@ func (o *openaiClient) preparedParams(messages []openai.ChatCompletionMessagePar
params.ReasoningEffort = shared.ReasoningEffortMedium
case "high":
params.ReasoningEffort = shared.ReasoningEffortHigh
+ case "minimal":
+ params.ReasoningEffort = shared.ReasoningEffort("minimal")
default:
params.ReasoningEffort = shared.ReasoningEffort(reasoningEffort)
}
@@ -269,7 +279,7 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
return nil, retryErr
}
if retry {
- slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries)
+ slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries, "error", err)
select {
case <-ctx.Done():
return nil, ctx.Err()
@@ -329,21 +339,28 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
acc := openai.ChatCompletionAccumulator{}
currentContent := ""
toolCalls := make([]message.ToolCall, 0)
-
- var currentToolCallID string
- var currentToolCall openai.ChatCompletionMessageToolCall
- var msgToolCalls []openai.ChatCompletionMessageToolCall
- currentToolIndex := 0
+ msgToolCalls := make(map[int64]openai.ChatCompletionMessageToolCall)
+ toolMap := make(map[string]openai.ChatCompletionMessageToolCall)
+ toolCallIDMap := make(map[string]string)
for openaiStream.Next() {
chunk := openaiStream.Current()
// Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index
- if len(chunk.Choices) > 0 && len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 {
- chunk.Choices[0].Delta.ToolCalls[0].Index = int64(currentToolIndex)
- currentToolIndex++
+ if len(chunk.Choices) != 0 && len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 {
+ chunk.Choices[0].Delta.ToolCalls[0].Index = 0
}
acc.AddChunk(chunk)
- // This fixes multiple tool calls for some providers
- for _, choice := range chunk.Choices {
+ for i, choice := range chunk.Choices {
+ reasoning, ok := choice.Delta.JSON.ExtraFields["reasoning"]
+ if ok && reasoning.Raw() != "" {
+ reasoningStr := ""
+ json.Unmarshal([]byte(reasoning.Raw()), &reasoningStr)
+ if reasoningStr != "" {
+ eventChan <- ProviderEvent{
+ Type: EventThinkingDelta,
+ Thinking: reasoningStr,
+ }
+ }
+ }
if choice.Delta.Content != "" {
eventChan <- ProviderEvent{
Type: EventContentDelta,
@@ -352,62 +369,67 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
currentContent += choice.Delta.Content
} else if len(choice.Delta.ToolCalls) > 0 {
toolCall := choice.Delta.ToolCalls[0]
- // Detect tool use start
- if currentToolCallID == "" {
- if toolCall.ID != "" {
- currentToolCallID = toolCall.ID
- eventChan <- ProviderEvent{
- Type: EventToolUseStart,
- ToolCall: &message.ToolCall{
- ID: toolCall.ID,
- Name: toolCall.Function.Name,
- Finished: false,
- },
+ if strings.HasPrefix(toolCall.ID, "functions.") {
+ exID, ok := toolCallIDMap[toolCall.ID]
+ if !ok {
+ newID := uuid.NewString()
+ toolCallIDMap[toolCall.ID] = newID
+ toolCall.ID = newID
+ } else {
+ toolCall.ID = exID
+ }
+ }
+ newToolCall := false
+ if existingToolCall, ok := msgToolCalls[toolCall.Index]; ok { // tool call exists
+ if toolCall.ID != "" && toolCall.ID != existingToolCall.ID {
+ found := false
+ // try to find the tool based on the ID
+ for _, tool := range msgToolCalls {
+ if tool.ID == toolCall.ID {
+ existingToolCall.Function.Arguments += toolCall.Function.Arguments
+ msgToolCalls[toolCall.Index] = existingToolCall
+ toolMap[existingToolCall.ID] = existingToolCall
+ found = true
+ }
}
- currentToolCall = openai.ChatCompletionMessageToolCall{
- ID: toolCall.ID,
- Type: "function",
- Function: openai.ChatCompletionMessageToolCallFunction{
- Name: toolCall.Function.Name,
- Arguments: toolCall.Function.Arguments,
- },
+ if !found {
+ newToolCall = true
}
+ } else {
+ existingToolCall.Function.Arguments += toolCall.Function.Arguments
+ msgToolCalls[toolCall.Index] = existingToolCall
+ toolMap[existingToolCall.ID] = existingToolCall
}
} else {
- // Delta tool use
- if toolCall.ID == "" || toolCall.ID == currentToolCallID {
- currentToolCall.Function.Arguments += toolCall.Function.Arguments
- } else {
- // Detect new tool use
- if toolCall.ID != currentToolCallID {
- msgToolCalls = append(msgToolCalls, currentToolCall)
- currentToolCallID = toolCall.ID
- eventChan <- ProviderEvent{
- Type: EventToolUseStart,
- ToolCall: &message.ToolCall{
- ID: toolCall.ID,
- Name: toolCall.Function.Name,
- Finished: false,
- },
- }
- currentToolCall = openai.ChatCompletionMessageToolCall{
- ID: toolCall.ID,
- Type: "function",
- Function: openai.ChatCompletionMessageToolCallFunction{
- Name: toolCall.Function.Name,
- Arguments: toolCall.Function.Arguments,
- },
- }
- }
+ newToolCall = true
+ }
+ if newToolCall { // new tool call
+ if toolCall.ID == "" {
+ toolCall.ID = uuid.NewString()
}
+ eventChan <- ProviderEvent{
+ Type: EventToolUseStart,
+ ToolCall: &message.ToolCall{
+ ID: toolCall.ID,
+ Name: toolCall.Function.Name,
+ Finished: false,
+ },
+ }
+ msgToolCalls[toolCall.Index] = openai.ChatCompletionMessageToolCall{
+ ID: toolCall.ID,
+ Type: "function",
+ Function: openai.ChatCompletionMessageToolCallFunction{
+ Name: toolCall.Function.Name,
+ Arguments: toolCall.Function.Arguments,
+ },
+ }
+ toolMap[toolCall.ID] = msgToolCalls[toolCall.Index]
}
- }
- // Kujtim: some models send finish stop even for tool calls
- if choice.FinishReason == "tool_calls" || (choice.FinishReason == "stop" && currentToolCallID != "") {
- msgToolCalls = append(msgToolCalls, currentToolCall)
- if len(acc.Choices) > 0 {
- acc.Choices[0].Message.ToolCalls = msgToolCalls
+ toolCalls := []openai.ChatCompletionMessageToolCall{}
+ for _, tc := range toolMap {
+ toolCalls = append(toolCalls, tc)
}
+ acc.Choices[i].Message.ToolCalls = toolCalls
}
}
}
@@ -458,11 +480,11 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
return
}
if retry {
- slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries)
+ slog.Warn("Retrying due to rate limit", "attempt", attempts, "max_retries", maxRetries, "error", err)
select {
case <-ctx.Done():
// context cancelled
- if ctx.Err() == nil {
+ if ctx.Err() != nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
close(eventChan)
@@ -492,20 +514,34 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
retryAfterValues := []string{}
if errors.As(err, &apiErr) {
// Check for token expiration (401 Unauthorized)
- if apiErr.StatusCode == 401 {
+ if apiErr.StatusCode == http.StatusUnauthorized {
+ prev := o.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
o.providerOptions.apiKey, err = config.Get().Resolve(o.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
+ // if it didn't change, do not retry.
+ if prev == o.providerOptions.apiKey {
+ return false, 0, err
+ }
o.client = createOpenAIClient(o.providerOptions)
return true, 0, nil
}
- if apiErr.StatusCode != 429 && apiErr.StatusCode != 500 {
+ if apiErr.StatusCode == http.StatusTooManyRequests {
+ // Check if this is an insufficient quota error (permanent)
+ if apiErr.Type == "insufficient_quota" || apiErr.Code == "insufficient_quota" {
+ return false, 0, fmt.Errorf("OpenAI quota exceeded: %s. Please check your plan and billing details", apiErr.Message)
+ }
+ // Other 429 errors (rate limiting) can be retried
+ } else if apiErr.StatusCode != http.StatusInternalServerError {
return false, 0, err
}
- retryAfterValues = apiErr.Response.Header.Values("Retry-After")
+ if apiErr.Response != nil {
+ retryAfterValues = apiErr.Response.Header.Values("Retry-After")
+ }
}
if apiErr != nil {
@@ -533,6 +569,10 @@ func (o *openaiClient) toolCalls(completion openai.ChatCompletion) []message.Too
if len(completion.Choices) > 0 && len(completion.Choices[0].Message.ToolCalls) > 0 {
for _, call := range completion.Choices[0].Message.ToolCalls {
+ // accumulator for some reason does this.
+ if call.Function.Name == "" {
+ continue
+ }
toolCall := message.ToolCall{
ID: call.ID,
Name: call.Function.Name,
@@ -6,6 +6,7 @@ import (
"net/http"
"net/http/httptest"
"os"
+ "strings"
"testing"
"time"
@@ -17,7 +18,7 @@ import (
)
func TestMain(m *testing.M) {
- _, err := config.Init(".", true)
+ _, err := config.Init(".", "", true)
if err != nil {
panic("Failed to initialize config: " + err.Error())
}
@@ -75,7 +76,7 @@ func TestOpenAIClientStreamChoices(t *testing.T) {
},
}
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second)
defer cancel()
eventsChan := client.stream(ctx, messages, nil)
@@ -88,3 +89,78 @@ func TestOpenAIClientStreamChoices(t *testing.T) {
}
}
}
+
+func TestOpenAIClient429InsufficientQuotaError(t *testing.T) {
+ client := &openaiClient{
+ providerOptions: providerClientOptions{
+ modelType: config.SelectedModelTypeLarge,
+ apiKey: "test-key",
+ systemMessage: "test",
+ config: config.ProviderConfig{
+ ID: "test-openai",
+ APIKey: "test-key",
+ },
+ model: func(config.SelectedModelType) catwalk.Model {
+ return catwalk.Model{
+ ID: "test-model",
+ Name: "test-model",
+ }
+ },
+ },
+ }
+
+ // Test insufficient_quota error should not retry
+ apiErr := &openai.Error{
+ StatusCode: 429,
+ Message: "You exceeded your current quota, please check your plan and billing details. For more information on this error, read the docs: https://platform.openai.com/docs/guides/error-codes/api-errors.",
+ Type: "insufficient_quota",
+ Code: "insufficient_quota",
+ }
+
+ retry, _, err := client.shouldRetry(1, apiErr)
+ if retry {
+ t.Error("Expected shouldRetry to return false for insufficient_quota error, but got true")
+ }
+ if err == nil {
+ t.Error("Expected shouldRetry to return an error for insufficient_quota, but got nil")
+ }
+ if err != nil && !strings.Contains(err.Error(), "quota") {
+ t.Errorf("Expected error message to mention quota, got: %v", err)
+ }
+}
+
+func TestOpenAIClient429RateLimitError(t *testing.T) {
+ client := &openaiClient{
+ providerOptions: providerClientOptions{
+ modelType: config.SelectedModelTypeLarge,
+ apiKey: "test-key",
+ systemMessage: "test",
+ config: config.ProviderConfig{
+ ID: "test-openai",
+ APIKey: "test-key",
+ },
+ model: func(config.SelectedModelType) catwalk.Model {
+ return catwalk.Model{
+ ID: "test-model",
+ Name: "test-model",
+ }
+ },
+ },
+ }
+
+ // Test regular rate limit error should retry
+ apiErr := &openai.Error{
+ StatusCode: 429,
+ Message: "Rate limit reached for requests",
+ Type: "rate_limit_exceeded",
+ Code: "rate_limit_exceeded",
+ }
+
+ retry, _, err := client.shouldRetry(1, apiErr)
+ if !retry {
+ t.Error("Expected shouldRetry to return true for rate_limit_exceeded error, but got false")
+ }
+ if err != nil {
+ t.Errorf("Expected shouldRetry to return nil error for rate_limit_exceeded, but got: %v", err)
+ }
+}
@@ -5,6 +5,7 @@ import (
"fmt"
"github.com/charmbracelet/catwalk/pkg/catwalk"
+
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/llm/tools"
"github.com/charmbracelet/crush/internal/message"
@@ -12,7 +13,7 @@ import (
type EventType string
-const maxRetries = 8
+const maxRetries = 3
const (
EventContentStart EventType = "content_start"
@@ -97,7 +98,7 @@ func (p *baseProvider[C]) cleanMessages(messages []message.Message) (cleaned []m
}
cleaned = append(cleaned, msg)
}
- return
+ return cleaned
}
func (p *baseProvider[C]) SendMessages(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
@@ -139,6 +140,8 @@ func WithMaxTokens(maxTokens int64) ProviderClientOption {
}
func NewProvider(cfg config.ProviderConfig, opts ...ProviderClientOption) (Provider, error) {
+ restore := config.PushPopCrushEnv()
+ defer restore()
resolvedAPIKey, err := config.Get().Resolve(cfg.APIKey)
if err != nil {
return nil, fmt.Errorf("failed to resolve API key for provider %s: %w", cfg.ID, err)
@@ -1,12 +1,16 @@
package tools
import (
+ "bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
+ "html/template"
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/shell"
)
@@ -30,6 +34,7 @@ type BashResponseMetadata struct {
type bashTool struct {
permissions permission.Service
workingDir string
+ attribution *config.Attribution
}
const (
@@ -41,6 +46,22 @@ const (
BashNoOutput = "no output"
)
+//go:embed bash.md
+var bashDescription []byte
+
+var bashDescriptionTpl = template.Must(
+ template.New("bashDescription").
+ Parse(string(bashDescription)),
+)
+
+type bashDescriptionData struct {
+ BannedCommands string
+ MaxOutputLength int
+ AttributionStep string
+ AttributionExample string
+ PRAttribution string
+}
+
var bannedCommands = []string{
// Network/Download tools
"alias",
@@ -114,196 +135,100 @@ var bannedCommands = []string{
"ufw",
}
-func bashDescription() string {
+func (b *bashTool) bashDescription() string {
bannedCommandsStr := strings.Join(bannedCommands, ", ")
- return fmt.Sprintf(`Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
-
-CROSS-PLATFORM SHELL SUPPORT:
-* This tool uses a shell interpreter (mvdan/sh) that mimics the Bash language,
- so you should use Bash syntax in all platforms, including Windows.
- The most common shell builtins and core utils are available in Windows as
- well.
-* Make sure to use forward slashes (/) as path separators in commands, even on
- Windows. Example: "ls C:/foo/bar" instead of "ls C:\foo\bar".
-
-Before executing the command, please follow these steps:
-
-1. Directory Verification:
- - If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location
- - For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory
-
-2. Security Check:
- - For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.
- - Verify that the command is not one of the banned commands: %s.
-
-3. Command Execution:
- - After ensuring proper quoting, execute the command.
- - Capture the output of the command.
-
-4. Output Processing:
- - If the output exceeds %d characters, output will be truncated before being returned to you.
- - Prepare the output for display to the user.
-
-5. Return Result:
- - Provide the processed output of the command.
- - If any errors occurred during execution, include those in the output.
- - The result will also have metadata like the cwd (current working directory) at the end, included with <cwd></cwd> tags.
-
-Usage notes:
-- The command argument is required.
-- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.
-- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files.
-- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).
-- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands.
-- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it.
-<good-example>
-pytest /foo/bar/tests
-</good-example>
-<bad-example>
-cd /foo/bar && pytest tests
-</bad-example>
-
-# Committing changes with git
-
-When the user asks you to create a new git commit, follow these steps carefully:
-
-1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!):
- - Run a git status command to see all untracked files.
- - Run a git diff command to see both staged and unstaged changes that will be committed.
- - Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
-
-2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit.
-
-3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in <commit_analysis> tags:
-
-<commit_analysis>
-- List the files that have been changed or added
-- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
-- Brainstorm the purpose or motivation behind these changes
-- Do not use tools to explore code, beyond what is available in the git context
-- Assess the impact of these changes on the overall project
-- Check for any sensitive information that shouldn't be committed
-- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
-- Ensure your language is clear, concise, and to the point
-- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
-- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
-- Review the draft message to ensure it accurately reflects the changes and their purpose
-</commit_analysis>
-
-4. Create the commit with a message ending with:
-💘 Generated with Crush
-Co-Authored-By: Crush <crush@charm.land>
-
-- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
-<example>
+
+ // Build attribution text based on settings
+ var attributionStep, attributionExample, prAttribution string
+
+ // Default to true if attribution is nil (backward compatibility)
+ generatedWith := b.attribution == nil || b.attribution.GeneratedWith
+ coAuthoredBy := b.attribution == nil || b.attribution.CoAuthoredBy
+
+ // Build PR attribution
+ if generatedWith {
+ prAttribution = "💘 Generated with Crush"
+ }
+
+ if generatedWith || coAuthoredBy {
+ var attributionParts []string
+ if generatedWith {
+ attributionParts = append(attributionParts, "💘 Generated with Crush")
+ }
+ if coAuthoredBy {
+ attributionParts = append(attributionParts, "Co-Authored-By: Crush <crush@charm.land>")
+ }
+
+ if len(attributionParts) > 0 {
+ attributionStep = fmt.Sprintf("4. Create the commit with a message ending with:\n%s", strings.Join(attributionParts, "\n"))
+
+ attributionText := strings.Join(attributionParts, "\n ")
+ attributionExample = fmt.Sprintf(`<example>
git commit -m "$(cat <<'EOF'
Commit message here.
- 💘 Generated with Crush
- Co-Authored-By: 💘 Crush <crush@charm.land>
+ %s
+ EOF
+)"</example>`, attributionText)
+ }
+ }
+
+ if attributionStep == "" {
+ attributionStep = "4. Create the commit with your commit message."
+ attributionExample = `<example>
+git commit -m "$(cat <<'EOF'
+ Commit message here.
EOF
- )"
-</example>
-
-5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
-
-6. Finally, run git status to make sure the commit succeeded.
-
-Important notes:
-- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up
-- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit.
-- NEVER update the git config
-- DO NOT push to the remote repository
-- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
-- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
-- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
-- Return an empty response - the user will see the git output directly
-
-# Creating pull requests
-Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
-
-IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
-
-1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!):
- - Run a git status command to see all untracked files.
- - Run a git diff command to see both staged and unstaged changes that will be committed.
- - Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
- - Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.)
-
-2. Create new branch if needed
-
-3. Commit changes if needed
-
-4. Push to remote with -u flag if needed
-
-5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in <pr_analysis> tags:
-
-<pr_analysis>
-- List the commits since diverging from the main branch
-- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
-- Brainstorm the purpose or motivation behind these changes
-- Assess the impact of these changes on the overall project
-- Do not use tools to explore code, beyond what is available in the git context
-- Check for any sensitive information that shouldn't be committed
-- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what"
-- Ensure the summary accurately reflects all changes since diverging from the main branch
-- Ensure your language is clear, concise, and to the point
-- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
-- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context)
-- Review the draft summary to ensure it accurately reflects the changes and their purpose
-</pr_analysis>
-
-6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
-<example>
-gh pr create --title "the pr title" --body "$(cat <<'EOF'
-## Summary
-<1-3 bullet points>
-
-## Test plan
-[Checklist of TODOs for testing the pull request...]
-
-💘 Generated with Crush
-EOF
-)"
-</example>
-
-Important:
-- Return an empty response - the user will see the gh output directly
-- Never update git config`, bannedCommandsStr, MaxOutputLength)
+)"</example>`
+ }
+
+ var out bytes.Buffer
+ if err := bashDescriptionTpl.Execute(&out, bashDescriptionData{
+ BannedCommands: bannedCommandsStr,
+ MaxOutputLength: MaxOutputLength,
+ AttributionStep: attributionStep,
+ AttributionExample: attributionExample,
+ PRAttribution: prAttribution,
+ }); err != nil {
+ // this should never happen.
+ panic("failed to execute bash description template: " + err.Error())
+ }
+ return out.String()
}
func blockFuncs() []shell.BlockFunc {
return []shell.BlockFunc{
shell.CommandsBlocker(bannedCommands),
- shell.ArgumentsBlocker([][]string{
- // System package managers
- {"apk", "add"},
- {"apt", "install"},
- {"apt-get", "install"},
- {"dnf", "install"},
- {"emerge"},
- {"pacman", "-S"},
- {"pkg", "install"},
- {"yum", "install"},
- {"zypper", "install"},
-
- // Language-specific package managers
- {"brew", "install"},
- {"cargo", "install"},
- {"gem", "install"},
- {"go", "install"},
- {"npm", "install", "-g"},
- {"npm", "install", "--global"},
- {"pip", "install", "--user"},
- {"pip3", "install", "--user"},
- {"pnpm", "add", "-g"},
- {"pnpm", "add", "--global"},
- {"yarn", "global", "add"},
- }),
+
+ // System package managers
+ shell.ArgumentsBlocker("apk", []string{"add"}, nil),
+ shell.ArgumentsBlocker("apt", []string{"install"}, nil),
+ shell.ArgumentsBlocker("apt-get", []string{"install"}, nil),
+ shell.ArgumentsBlocker("dnf", []string{"install"}, nil),
+ shell.ArgumentsBlocker("pacman", nil, []string{"-S"}),
+ shell.ArgumentsBlocker("pkg", []string{"install"}, nil),
+ shell.ArgumentsBlocker("yum", []string{"install"}, nil),
+ shell.ArgumentsBlocker("zypper", []string{"install"}, nil),
+
+ // Language-specific package managers
+ shell.ArgumentsBlocker("brew", []string{"install"}, nil),
+ shell.ArgumentsBlocker("cargo", []string{"install"}, nil),
+ shell.ArgumentsBlocker("gem", []string{"install"}, nil),
+ shell.ArgumentsBlocker("go", []string{"install"}, nil),
+ shell.ArgumentsBlocker("npm", []string{"install"}, []string{"--global"}),
+ shell.ArgumentsBlocker("npm", []string{"install"}, []string{"-g"}),
+ shell.ArgumentsBlocker("pip", []string{"install"}, []string{"--user"}),
+ shell.ArgumentsBlocker("pip3", []string{"install"}, []string{"--user"}),
+ shell.ArgumentsBlocker("pnpm", []string{"add"}, []string{"--global"}),
+ shell.ArgumentsBlocker("pnpm", []string{"add"}, []string{"-g"}),
+ shell.ArgumentsBlocker("yarn", []string{"global", "add"}, nil),
+
+ // `go test -exec` can run arbitrary commands
+ shell.ArgumentsBlocker("go", []string{"test"}, []string{"-exec"}),
}
}
-func NewBashTool(permission permission.Service, workingDir string) BaseTool {
+func NewBashTool(permission permission.Service, workingDir string, attribution *config.Attribution) BaseTool {
// Set up command blocking on the persistent shell
persistentShell := shell.GetPersistentShell(workingDir)
persistentShell.SetBlockFuncs(blockFuncs())
@@ -311,6 +236,7 @@ func NewBashTool(permission permission.Service, workingDir string) BaseTool {
return &bashTool{
permissions: permission,
workingDir: workingDir,
+ attribution: attribution,
}
}
@@ -321,7 +247,7 @@ func (b *bashTool) Name() string {
func (b *bashTool) Info() ToolInfo {
return ToolInfo{
Name: BashToolName,
- Description: bashDescription(),
+ Description: b.bashDescription(),
Parameters: map[string]any{
"command": map[string]any{
"type": "string",
@@ -366,13 +292,14 @@ func (b *bashTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
sessionID, messageID := GetContextValues(ctx)
if sessionID == "" || messageID == "" {
- return ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
+ return ToolResponse{}, fmt.Errorf("session ID and message ID are required for executing shell command")
}
if !isSafeReadOnly {
+ shell := shell.GetPersistentShell(b.workingDir)
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
- Path: b.workingDir,
+ Path: shell.GetWorkingDir(),
ToolCallID: call.ID,
ToolName: BashToolName,
Action: "execute",
@@ -0,0 +1,161 @@
+Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
+
+CROSS-PLATFORM SHELL SUPPORT:
+
+- This tool uses a shell interpreter (mvdan/sh) that mimics the Bash language,
+ so you should use Bash syntax in all platforms, including Windows.
+ The most common shell builtins and core utils are available in Windows as
+ well.
+- Make sure to use forward slashes (/) as path separators in commands, even on
+ Windows. Example: "ls C:/foo/bar" instead of "ls C:\foo\bar".
+
+Before executing the command, please follow these steps:
+
+1. Directory Verification:
+
+- If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location
+- For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory
+
+2. Security Check:
+
+- For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.
+- Verify that the command is not one of the banned commands: {{ .BannedCommands }}.
+
+3. Command Execution:
+
+- After ensuring proper quoting, execute the command.
+- Capture the output of the command.
+
+4. Output Processing:
+
+- If the output exceeds {{ .MaxOutputLength }} characters, output will be truncated before being returned to you.
+- Prepare the output for display to the user.
+
+5. Return Result:
+
+- Provide the processed output of the command.
+- If any errors occurred during execution, include those in the output.
+- The result will also have metadata like the cwd (current working directory) at the end, included with <cwd></cwd> tags.
+
+Usage notes:
+
+- The command argument is required.
+- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.
+- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files.
+- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).
+- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands.
+- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it.
+ <good-example>
+ pytest /foo/bar/tests
+ </good-example>
+ <bad-example>
+ cd /foo/bar && pytest tests
+ </bad-example>
+
+# Committing changes with git
+
+When the user asks you to create a new git commit, follow these steps carefully:
+
+1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!):
+
+- Run a git status command to see all untracked files.
+- Run a git diff command to see both staged and unstaged changes that will be committed.
+- Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
+
+2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit.
+
+3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in <commit_analysis> tags:
+
+<commit_analysis>
+
+- List the files that have been changed or added
+- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
+- Brainstorm the purpose or motivation behind these changes
+- Do not use tools to explore code, beyond what is available in the git context
+- Assess the impact of these changes on the overall project
+- Check for any sensitive information that shouldn't be committed
+- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
+- Ensure your language is clear, concise, and to the point
+- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
+- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
+- Review the draft message to ensure it accurately reflects the changes and their purpose
+ </commit_analysis>
+
+{{ .AttributionStep }}
+
+- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
+ {{ .AttributionExample }}
+
+5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
+
+6. Finally, run git status to make sure the commit succeeded.
+
+Important notes:
+
+- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up
+- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit.
+- NEVER update the git config
+- DO NOT push to the remote repository
+- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
+- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
+- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
+- Return an empty response - the user will see the git output directly
+
+# Creating pull requests
+
+Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
+
+IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
+
+1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!):
+
+- Run a git status command to see all untracked files.
+- Run a git diff command to see both staged and unstaged changes that will be committed.
+- Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
+- Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.)
+
+2. Create new branch if needed
+
+3. Commit changes if needed
+
+4. Push to remote with -u flag if needed
+
+5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in <pr_analysis> tags:
+
+<pr_analysis>
+
+- List the commits since diverging from the main branch
+- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
+- Brainstorm the purpose or motivation behind these changes
+- Assess the impact of these changes on the overall project
+- Do not use tools to explore code, beyond what is available in the git context
+- Check for any sensitive information that shouldn't be committed
+- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what"
+- Ensure the summary accurately reflects all changes since diverging from the main branch
+- Ensure your language is clear, concise, and to the point
+- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
+- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context)
+- Review the draft summary to ensure it accurately reflects the changes and their purpose
+ </pr_analysis>
+
+6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
+ <example>
+ gh pr create --title "the pr title" --body "$(cat <<'EOF'
+
+## Summary
+
+<1-3 bullet points>
+
+## Test plan
+
+[Checklist of TODOs for testing the pull request...]
+
+{{ .PRAttribution }}
+EOF
+)"
+</example>
+
+Important:
+
+- Return an empty response - the user will see the gh output directly
+- Never update git config
@@ -2,51 +2,33 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
- "maps"
"sort"
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
type DiagnosticsParams struct {
FilePath string `json:"file_path"`
}
+
type diagnosticsTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
}
-const (
- DiagnosticsToolName = "diagnostics"
- diagnosticsDescription = `Get diagnostics for a file and/or project.
-WHEN TO USE THIS TOOL:
-- Use when you need to check for errors or warnings in your code
-- Helpful for debugging and ensuring code quality
-- Good for getting a quick overview of issues in a file or project
-HOW TO USE:
-- Provide a path to a file to get diagnostics for that file
-- Leave the path empty to get diagnostics for the entire project
-- Results are displayed in a structured format with severity levels
-FEATURES:
-- Displays errors, warnings, and hints
-- Groups diagnostics by severity
-- Provides detailed information about each diagnostic
-LIMITATIONS:
-- Results are limited to the diagnostics provided by the LSP clients
-- May not cover all possible issues in the code
-- Does not provide suggestions for fixing issues
-TIPS:
-- Use in conjunction with other tools for a comprehensive code review
-- Combine with the LSP client for real-time diagnostics
-`
-)
+const DiagnosticsToolName = "diagnostics"
+
+//go:embed diagnostics.md
+var diagnosticsDescription []byte
-func NewDiagnosticsTool(lspClients map[string]*lsp.Client) BaseTool {
+func NewDiagnosticsTool(lspClients *csync.Map[string, *lsp.Client]) BaseTool {
return &diagnosticsTool{
lspClients,
}
@@ -59,7 +41,7 @@ func (b *diagnosticsTool) Name() string {
func (b *diagnosticsTool) Info() ToolInfo {
return ToolInfo{
Name: DiagnosticsToolName,
- Description: diagnosticsDescription,
+ Description: string(diagnosticsDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -76,228 +58,148 @@ func (b *diagnosticsTool) Run(ctx context.Context, call ToolCall) (ToolResponse,
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
- lsps := b.lspClients
-
- if len(lsps) == 0 {
+ if b.lspClients.Len() == 0 {
return NewTextErrorResponse("no LSP clients available"), nil
}
-
- if params.FilePath != "" {
- notifyLspOpenFile(ctx, params.FilePath, lsps)
- waitForLspDiagnostics(ctx, params.FilePath, lsps)
- }
-
- output := getDiagnostics(params.FilePath, lsps)
-
+ notifyLSPs(ctx, b.lspClients, params.FilePath)
+ output := getDiagnostics(params.FilePath, b.lspClients)
return NewTextResponse(output), nil
}
-func notifyLspOpenFile(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
- for _, client := range lsps {
- err := client.OpenFile(ctx, filePath)
- if err != nil {
+func notifyLSPs(ctx context.Context, lsps *csync.Map[string, *lsp.Client], filepath string) {
+ if filepath == "" {
+ return
+ }
+ for client := range lsps.Seq() {
+ if !client.HandlesFile(filepath) {
continue
}
+ _ = client.OpenFileOnDemand(ctx, filepath)
+ _ = client.NotifyChange(ctx, filepath)
+ client.WaitForDiagnostics(ctx, 5*time.Second)
}
}
-func waitForLspDiagnostics(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
- if len(lsps) == 0 {
- return
- }
-
- diagChan := make(chan struct{}, 1)
-
- for _, client := range lsps {
- originalDiags := make(map[protocol.DocumentURI][]protocol.Diagnostic)
- maps.Copy(originalDiags, client.GetDiagnostics())
-
- handler := func(params json.RawMessage) {
- lsp.HandleDiagnostics(client, params)
- var diagParams protocol.PublishDiagnosticsParams
- if err := json.Unmarshal(params, &diagParams); err != nil {
- return
- }
+func getDiagnostics(filePath string, lsps *csync.Map[string, *lsp.Client]) string {
+ fileDiagnostics := []string{}
+ projectDiagnostics := []string{}
- path, err := diagParams.URI.Path()
+ for lspName, client := range lsps.Seq2() {
+ for location, diags := range client.GetDiagnostics() {
+ path, err := location.Path()
if err != nil {
- slog.Error("Failed to convert diagnostic URI to path", "uri", diagParams.URI, "error", err)
- return
+ slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err)
+ continue
}
-
- if path == filePath || hasDiagnosticsChanged(client.GetDiagnostics(), originalDiags) {
- select {
- case diagChan <- struct{}{}:
- default:
+ isCurrentFile := path == filePath
+ for _, diag := range diags {
+ formattedDiag := formatDiagnostic(path, diag, lspName)
+ if isCurrentFile {
+ fileDiagnostics = append(fileDiagnostics, formattedDiag)
+ } else {
+ projectDiagnostics = append(projectDiagnostics, formattedDiag)
}
}
}
+ }
- client.RegisterNotificationHandler("textDocument/publishDiagnostics", handler)
+ sortDiagnostics(fileDiagnostics)
+ sortDiagnostics(projectDiagnostics)
- if client.IsFileOpen(filePath) {
- err := client.NotifyChange(ctx, filePath)
- if err != nil {
- continue
- }
- } else {
- err := client.OpenFile(ctx, filePath)
- if err != nil {
- continue
- }
- }
- }
+ var output strings.Builder
+ writeDiagnostics(&output, "file_diagnostics", fileDiagnostics)
+ writeDiagnostics(&output, "project_diagnostics", projectDiagnostics)
- select {
- case <-diagChan:
- case <-time.After(5 * time.Second):
- case <-ctx.Done():
+ if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
+ fileErrors := countSeverity(fileDiagnostics, "Error")
+ fileWarnings := countSeverity(fileDiagnostics, "Warn")
+ projectErrors := countSeverity(projectDiagnostics, "Error")
+ projectWarnings := countSeverity(projectDiagnostics, "Warn")
+ output.WriteString("\n<diagnostic_summary>\n")
+ fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
+ fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
+ output.WriteString("</diagnostic_summary>\n")
}
-}
-func hasDiagnosticsChanged(current, original map[protocol.DocumentURI][]protocol.Diagnostic) bool {
- for uri, diags := range current {
- origDiags, exists := original[uri]
- if !exists || len(diags) != len(origDiags) {
- return true
- }
- }
- return false
+ out := output.String()
+ slog.Info("Diagnostics", "output", fmt.Sprintf("%q", out))
+ return out
}
-func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string {
- fileDiagnostics := []string{}
- projectDiagnostics := []string{}
-
- formatDiagnostic := func(pth string, diagnostic protocol.Diagnostic, source string) string {
- severity := "Info"
- switch diagnostic.Severity {
- case protocol.SeverityError:
- severity = "Error"
- case protocol.SeverityWarning:
- severity = "Warn"
- case protocol.SeverityHint:
- severity = "Hint"
- }
-
- location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
-
- sourceInfo := ""
- if diagnostic.Source != "" {
- sourceInfo = diagnostic.Source
- } else if source != "" {
- sourceInfo = source
- }
-
- codeInfo := ""
- if diagnostic.Code != nil {
- codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
- }
-
- tagsInfo := ""
- if len(diagnostic.Tags) > 0 {
- tags := []string{}
- for _, tag := range diagnostic.Tags {
- switch tag {
- case protocol.Unnecessary:
- tags = append(tags, "unnecessary")
- case protocol.Deprecated:
- tags = append(tags, "deprecated")
- }
- }
- if len(tags) > 0 {
- tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
- }
- }
-
- return fmt.Sprintf("%s: %s [%s]%s%s %s",
- severity,
- location,
- sourceInfo,
- codeInfo,
- tagsInfo,
- diagnostic.Message)
+func writeDiagnostics(output *strings.Builder, tag string, in []string) {
+ if len(in) == 0 {
+ return
}
-
- for lspName, client := range lsps {
- diagnostics := client.GetDiagnostics()
- if len(diagnostics) > 0 {
- for location, diags := range diagnostics {
- path, err := location.Path()
- if err != nil {
- slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err)
- continue
- }
- isCurrentFile := path == filePath
-
- for _, diag := range diags {
- formattedDiag := formatDiagnostic(path, diag, lspName)
-
- if isCurrentFile {
- fileDiagnostics = append(fileDiagnostics, formattedDiag)
- } else {
- projectDiagnostics = append(projectDiagnostics, formattedDiag)
- }
- }
- }
- }
+ output.WriteString("\n<" + tag + ">\n")
+ if len(in) > 10 {
+ output.WriteString(strings.Join(in[:10], "\n"))
+ fmt.Fprintf(output, "\n... and %d more diagnostics", len(in)-10)
+ } else {
+ output.WriteString(strings.Join(in, "\n"))
}
+ output.WriteString("\n</" + tag + ">\n")
+}
- sort.Slice(fileDiagnostics, func(i, j int) bool {
- iIsError := strings.HasPrefix(fileDiagnostics[i], "Error")
- jIsError := strings.HasPrefix(fileDiagnostics[j], "Error")
+func sortDiagnostics(in []string) []string {
+ sort.Slice(in, func(i, j int) bool {
+ iIsError := strings.HasPrefix(in[i], "Error")
+ jIsError := strings.HasPrefix(in[j], "Error")
if iIsError != jIsError {
return iIsError // Errors come first
}
- return fileDiagnostics[i] < fileDiagnostics[j] // Then alphabetically
+ return in[i] < in[j] // Then alphabetically
})
+ return in
+}
- sort.Slice(projectDiagnostics, func(i, j int) bool {
- iIsError := strings.HasPrefix(projectDiagnostics[i], "Error")
- jIsError := strings.HasPrefix(projectDiagnostics[j], "Error")
- if iIsError != jIsError {
- return iIsError
- }
- return projectDiagnostics[i] < projectDiagnostics[j]
- })
+func formatDiagnostic(pth string, diagnostic protocol.Diagnostic, source string) string {
+ severity := "Info"
+ switch diagnostic.Severity {
+ case protocol.SeverityError:
+ severity = "Error"
+ case protocol.SeverityWarning:
+ severity = "Warn"
+ case protocol.SeverityHint:
+ severity = "Hint"
+ }
- var output strings.Builder
+ location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
- if len(fileDiagnostics) > 0 {
- output.WriteString("\n<file_diagnostics>\n")
- if len(fileDiagnostics) > 10 {
- output.WriteString(strings.Join(fileDiagnostics[:10], "\n"))
- fmt.Fprintf(&output, "\n... and %d more diagnostics", len(fileDiagnostics)-10)
- } else {
- output.WriteString(strings.Join(fileDiagnostics, "\n"))
- }
- output.WriteString("\n</file_diagnostics>\n")
+ sourceInfo := ""
+ if diagnostic.Source != "" {
+ sourceInfo = diagnostic.Source
+ } else if source != "" {
+ sourceInfo = source
}
- if len(projectDiagnostics) > 0 {
- output.WriteString("\n<project_diagnostics>\n")
- if len(projectDiagnostics) > 10 {
- output.WriteString(strings.Join(projectDiagnostics[:10], "\n"))
- fmt.Fprintf(&output, "\n... and %d more diagnostics", len(projectDiagnostics)-10)
- } else {
- output.WriteString(strings.Join(projectDiagnostics, "\n"))
- }
- output.WriteString("\n</project_diagnostics>\n")
+ codeInfo := ""
+ if diagnostic.Code != nil {
+ codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
}
- if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
- fileErrors := countSeverity(fileDiagnostics, "Error")
- fileWarnings := countSeverity(fileDiagnostics, "Warn")
- projectErrors := countSeverity(projectDiagnostics, "Error")
- projectWarnings := countSeverity(projectDiagnostics, "Warn")
-
- output.WriteString("\n<diagnostic_summary>\n")
- fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
- fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
- output.WriteString("</diagnostic_summary>\n")
+ tagsInfo := ""
+ if len(diagnostic.Tags) > 0 {
+ tags := []string{}
+ for _, tag := range diagnostic.Tags {
+ switch tag {
+ case protocol.Unnecessary:
+ tags = append(tags, "unnecessary")
+ case protocol.Deprecated:
+ tags = append(tags, "deprecated")
+ }
+ }
+ if len(tags) > 0 {
+ tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
+ }
}
- return output.String()
+ return fmt.Sprintf("%s: %s [%s]%s%s %s",
+ severity,
+ location,
+ sourceInfo,
+ codeInfo,
+ tagsInfo,
+ diagnostic.Message)
}
func countSeverity(diagnostics []string, severity string) int {
@@ -0,0 +1,21 @@
+Get diagnostics for a file and/or project.
+WHEN TO USE THIS TOOL:
+
+- Use when you need to check for errors or warnings in your code
+- Helpful for debugging and ensuring code quality
+- Good for getting a quick overview of issues in a file or project
+ HOW TO USE:
+- Provide a path to a file to get diagnostics for that file
+- Leave the path empty to get diagnostics for the entire project
+- Results are displayed in a structured format with severity levels
+ FEATURES:
+- Displays errors, warnings, and hints
+- Groups diagnostics by severity
+- Provides detailed information about each diagnostic
+ LIMITATIONS:
+- Results are limited to the diagnostics provided by the LSP clients
+- May not cover all possible issues in the code
+- Does not provide suggestions for fixing issues
+ TIPS:
+- Use in conjunction with other tools for a comprehensive code review
+- Combine with the LSP client for real-time diagnostics
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -32,38 +33,10 @@ type downloadTool struct {
workingDir string
}
-const (
- DownloadToolName = "download"
- downloadToolDescription = `Downloads binary data from a URL and saves it to a local file.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to download files, images, or other binary data from URLs
-- Helpful for downloading assets, documents, or any file type
-- Useful for saving remote content locally for processing or storage
-
-HOW TO USE:
-- Provide the URL to download from
-- Specify the local file path where the content should be saved
-- Optionally set a timeout for the request
-
-FEATURES:
-- Downloads any file type (binary or text)
-- Automatically creates parent directories if they don't exist
-- Handles large files efficiently with streaming
-- Sets reasonable timeouts to prevent hanging
-- Validates input parameters before making requests
-
-LIMITATIONS:
-- Maximum file size is 100MB
-- Only supports HTTP and HTTPS protocols
-- Cannot handle authentication or cookies
-- Some websites may block automated requests
-- Will overwrite existing files without warning
-
-TIPS:
-- Use absolute paths or paths relative to the working directory
-- Set appropriate timeouts for large files or slow connections`
-)
+const DownloadToolName = "download"
+
+//go:embed download.md
+var downloadDescription []byte
func NewDownloadTool(permissions permission.Service, workingDir string) BaseTool {
return &downloadTool{
@@ -87,7 +60,7 @@ func (t *downloadTool) Name() string {
func (t *downloadTool) Info() ToolInfo {
return ToolInfo{
Name: DownloadToolName,
- Description: downloadToolDescription,
+ Description: string(downloadDescription),
Parameters: map[string]any{
"url": map[string]any{
"type": "string",
@@ -0,0 +1,34 @@
+Downloads binary data from a URL and saves it to a local file.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to download files, images, or other binary data from URLs
+- Helpful for downloading assets, documents, or any file type
+- Useful for saving remote content locally for processing or storage
+
+HOW TO USE:
+
+- Provide the URL to download from
+- Specify the local file path where the content should be saved
+- Optionally set a timeout for the request
+
+FEATURES:
+
+- Downloads any file type (binary or text)
+- Automatically creates parent directories if they don't exist
+- Handles large files efficiently with streaming
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before making requests
+
+LIMITATIONS:
+
+- Maximum file size is 100MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+- Will overwrite existing files without warning
+
+TIPS:
+
+- Use absolute paths or paths relative to the working directory
+- Set appropriate timeouts for large files or slow connections
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -10,6 +11,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -39,71 +41,18 @@ type EditResponseMetadata struct {
}
type editTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
}
-const (
- EditToolName = "edit"
- editDescription = `Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files.
+const EditToolName = "edit"
-Before using this tool:
+//go:embed edit.md
+var editDescription []byte
-1. Use the FileRead tool to understand the file's contents and context
-
-2. Verify the directory path is correct (only applicable when creating new files):
- - Use the LS tool to verify the parent directory exists and is the correct location
-
-To make a file edit, provide the following:
-1. file_path: The absolute path to the file to modify (must be absolute, not relative)
-2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
-3. new_string: The edited text to replace the old_string
-4. replace_all: Replace all occurrences of old_string (default false)
-
-Special cases:
-- To create a new file: provide file_path and new_string, leave old_string empty
-- To delete content: provide file_path and old_string, leave new_string empty
-
-The tool will replace ONE occurrence of old_string with new_string in the specified file by default. Set replace_all to true to replace all occurrences.
-
-CRITICAL REQUIREMENTS FOR USING THIS TOOL:
-
-1. UNIQUENESS: When replace_all is false (default), the old_string MUST uniquely identify the specific instance you want to change. This means:
- - Include AT LEAST 3-5 lines of context BEFORE the change point
- - Include AT LEAST 3-5 lines of context AFTER the change point
- - Include all whitespace, indentation, and surrounding code exactly as it appears in the file
-
-2. SINGLE INSTANCE: When replace_all is false, this tool can only change ONE instance at a time. If you need to change multiple instances:
- - Set replace_all to true to replace all occurrences at once
- - Or make separate calls to this tool for each instance
- - Each call must uniquely identify its specific instance using extensive context
-
-3. VERIFICATION: Before using this tool:
- - Check how many instances of the target text exist in the file
- - If multiple instances exist and replace_all is false, gather enough context to uniquely identify each one
- - Plan separate tool calls for each instance or use replace_all
-
-WARNING: If you do not follow these requirements:
- - The tool will fail if old_string matches multiple locations and replace_all is false
- - The tool will fail if old_string doesn't match exactly (including whitespace)
- - You may change the wrong instance if you don't include enough context
-
-When making edits:
- - Ensure the edit results in idiomatic, correct code
- - Do not leave the code in a broken state
- - Always use absolute file paths (starting with /)
-
-WINDOWS NOTES:
-- File paths should use forward slashes (/) for cross-platform compatibility
-- On Windows, absolute paths start with drive letters (C:/) but forward slashes work throughout
-- File permissions are handled automatically by the Go runtime
-
-Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.`
-)
-
-func NewEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &editTool{
lspClients: lspClients,
permissions: permissions,
@@ -119,7 +68,7 @@ func (e *editTool) Name() string {
func (e *editTool) Info() ToolInfo {
return ToolInfo{
Name: EditToolName,
- Description: editDescription,
+ Description: string(editDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -183,7 +132,8 @@ func (e *editTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
return response, nil
}
- waitForLspDiagnostics(ctx, params.FilePath, e.lspClients)
+ notifyLSPs(ctx, e.lspClients, params.FilePath)
+
text := fmt.Sprintf("<result>\n%s\n</result>\n", response.Content)
text += getDiagnostics(params.FilePath, e.lspClients)
response.Content = text
@@ -299,7 +249,7 @@ func (e *editTool) deleteContent(ctx context.Context, filePath, oldString string
return ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
}
- oldContent := string(content)
+ oldContent, isCrlf := fsext.ToUnixLineEndings(string(content))
var newContent string
var deletionCount int
@@ -356,6 +306,10 @@ func (e *editTool) deleteContent(ctx context.Context, filePath, oldString string
return ToolResponse{}, permission.ErrorPermissionDenied
}
+ if isCrlf {
+ newContent, _ = fsext.ToWindowsLineEndings(newContent)
+ }
+
err = os.WriteFile(filePath, []byte(newContent), 0o644)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
@@ -428,7 +382,7 @@ func (e *editTool) replaceContent(ctx context.Context, filePath, oldString, newS
return ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
}
- oldContent := string(content)
+ oldContent, isCrlf := fsext.ToUnixLineEndings(string(content))
var newContent string
var replacementCount int
@@ -487,6 +441,10 @@ func (e *editTool) replaceContent(ctx context.Context, filePath, oldString, newS
return ToolResponse{}, permission.ErrorPermissionDenied
}
+ if isCrlf {
+ newContent, _ = fsext.ToWindowsLineEndings(newContent)
+ }
+
err = os.WriteFile(filePath, []byte(newContent), 0o644)
if err != nil {
return ToolResponse{}, fmt.Errorf("failed to write file: %w", err)
@@ -0,0 +1,60 @@
+Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files.
+
+Before using this tool:
+
+1. Use the FileRead tool to understand the file's contents and context
+
+2. Verify the directory path is correct (only applicable when creating new files):
+ - Use the LS tool to verify the parent directory exists and is the correct location
+
+To make a file edit, provide the following:
+
+1. file_path: The absolute path to the file to modify (must be absolute, not relative)
+2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
+3. new_string: The edited text to replace the old_string
+4. replace_all: Replace all occurrences of old_string (default false)
+
+Special cases:
+
+- To create a new file: provide file_path and new_string, leave old_string empty
+- To delete content: provide file_path and old_string, leave new_string empty
+
+The tool will replace ONE occurrence of old_string with new_string in the specified file by default. Set replace_all to true to replace all occurrences.
+
+CRITICAL REQUIREMENTS FOR USING THIS TOOL:
+
+1. UNIQUENESS: When replace_all is false (default), the old_string MUST uniquely identify the specific instance you want to change. This means:
+ - Include AT LEAST 3-5 lines of context BEFORE the change point
+ - Include AT LEAST 3-5 lines of context AFTER the change point
+ - Include all whitespace, indentation, and surrounding code exactly as it appears in the file
+
+2. SINGLE INSTANCE: When replace_all is false, this tool can only change ONE instance at a time. If you need to change multiple instances:
+ - Set replace_all to true to replace all occurrences at once
+ - Or make separate calls to this tool for each instance
+ - Each call must uniquely identify its specific instance using extensive context
+
+3. VERIFICATION: Before using this tool:
+ - Check how many instances of the target text exist in the file
+ - If multiple instances exist and replace_all is false, gather enough context to uniquely identify each one
+ - Plan separate tool calls for each instance or use replace_all
+
+WARNING: If you do not follow these requirements:
+
+- The tool will fail if old_string matches multiple locations and replace_all is false
+- The tool will fail if old_string doesn't match exactly (including whitespace)
+- You may change the wrong instance if you don't include enough context
+
+When making edits:
+
+- Ensure the edit results in idiomatic, correct code
+- Do not leave the code in a broken state
+- Always use absolute file paths (starting with /)
+
+WINDOWS NOTES:
+
+- File paths should use forward slashes (/) for cross-platform compatibility
+- On Windows, absolute paths start with drive letters (C:/) but forward slashes work throughout
+- File permissions are handled automatically by the Go runtime
+- Always assumes \n for line endings. The tool will handle \r\n conversion automatically if needed.
+
+Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -33,38 +34,10 @@ type fetchTool struct {
workingDir string
}
-const (
- FetchToolName = "fetch"
- fetchToolDescription = `Fetches content from a URL and returns it in the specified format.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to download content from a URL
-- Helpful for retrieving documentation, API responses, or web content
-- Useful for getting external information to assist with tasks
-
-HOW TO USE:
-- Provide the URL to fetch content from
-- Specify the desired output format (text, markdown, or html)
-- Optionally set a timeout for the request
-
-FEATURES:
-- Supports three output formats: text, markdown, and html
-- Automatically handles HTTP redirects
-- Sets reasonable timeouts to prevent hanging
-- Validates input parameters before making requests
-
-LIMITATIONS:
-- Maximum response size is 5MB
-- Only supports HTTP and HTTPS protocols
-- Cannot handle authentication or cookies
-- Some websites may block automated requests
-
-TIPS:
-- Use text format for plain text content or simple API responses
-- Use markdown format for content that should be rendered with formatting
-- Use html format when you need the raw HTML structure
-- Set appropriate timeouts for potentially slow websites`
-)
+const FetchToolName = "fetch"
+
+//go:embed fetch.md
+var fetchDescription []byte
func NewFetchTool(permissions permission.Service, workingDir string) BaseTool {
return &fetchTool{
@@ -88,7 +61,7 @@ func (t *fetchTool) Name() string {
func (t *fetchTool) Info() ToolInfo {
return ToolInfo{
Name: FetchToolName,
- Description: fetchToolDescription,
+ Description: string(fetchDescription),
Parameters: map[string]any{
"url": map[string]any{
"type": "string",
@@ -0,0 +1,34 @@
+Fetches content from a URL and returns it in the specified format.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to download content from a URL
+- Helpful for retrieving documentation, API responses, or web content
+- Useful for getting external information to assist with tasks
+
+HOW TO USE:
+
+- Provide the URL to fetch content from
+- Specify the desired output format (text, markdown, or html)
+- Optionally set a timeout for the request
+
+FEATURES:
+
+- Supports three output formats: text, markdown, and html
+- Automatically handles HTTP redirects
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before making requests
+
+LIMITATIONS:
+
+- Maximum response size is 5MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+
+TIPS:
+
+- Use text format for plain text content or simple API responses
+- Use markdown format for content that should be rendered with formatting
+- Use html format when you need the raw HTML structure
+- Set appropriate timeouts for potentially slow websites
@@ -3,6 +3,7 @@ package tools
import (
"bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -14,48 +15,10 @@ import (
"github.com/charmbracelet/crush/internal/fsext"
)
-const (
- GlobToolName = "glob"
- globDescription = `Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first).
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find files by name patterns or extensions
-- Great for finding specific file types across a directory structure
-- Useful for discovering files that match certain naming conventions
-
-HOW TO USE:
-- Provide a glob pattern to match against file paths
-- Optionally specify a starting directory (defaults to current working directory)
-- Results are sorted with most recently modified files first
-
-GLOB PATTERN SYNTAX:
-- '*' matches any sequence of non-separator characters
-- '**' matches any sequence of characters, including separators
-- '?' matches any single non-separator character
-- '[...]' matches any character in the brackets
-- '[!...]' matches any character not in the brackets
-
-COMMON PATTERN EXAMPLES:
-- '*.js' - Find all JavaScript files in the current directory
-- '**/*.js' - Find all JavaScript files in any subdirectory
-- 'src/**/*.{ts,tsx}' - Find all TypeScript files in the src directory
-- '*.{html,css,js}' - Find all HTML, CSS, and JS files
-
-LIMITATIONS:
-- Results are limited to 100 files (newest first)
-- Does not search file contents (use Grep tool for that)
-- Hidden files (starting with '.') are skipped
-
-WINDOWS NOTES:
-- Path separators are handled automatically (both / and \ work)
-- Uses ripgrep (rg) command if available, otherwise falls back to built-in Go implementation
-
-TIPS:
-- Patterns should use forward slashes (/) for cross-platform compatibility
-- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep
-- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
-- Always check if results are truncated and refine your search pattern if needed`
-)
+const GlobToolName = "glob"
+
+//go:embed glob.md
+var globDescription []byte
type GlobParams struct {
Pattern string `json:"pattern"`
@@ -84,7 +47,7 @@ func (g *globTool) Name() string {
func (g *globTool) Info() ToolInfo {
return ToolInfo{
Name: GlobToolName,
- Description: globDescription,
+ Description: string(globDescription),
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
@@ -0,0 +1,46 @@
+Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first).
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find files by name patterns or extensions
+- Great for finding specific file types across a directory structure
+- Useful for discovering files that match certain naming conventions
+
+HOW TO USE:
+
+- Provide a glob pattern to match against file paths
+- Optionally specify a starting directory (defaults to current working directory)
+- Results are sorted with most recently modified files first
+
+GLOB PATTERN SYNTAX:
+
+- '\*' matches any sequence of non-separator characters
+- '\*\*' matches any sequence of characters, including separators
+- '?' matches any single non-separator character
+- '[...]' matches any character in the brackets
+- '[!...]' matches any character not in the brackets
+
+COMMON PATTERN EXAMPLES:
+
+- '\*.js' - Find all JavaScript files in the current directory
+- '\*_/_.js' - Find all JavaScript files in any subdirectory
+- 'src/\*_/_.{ts,tsx}' - Find all TypeScript files in the src directory
+- '\*.{html,css,js}' - Find all HTML, CSS, and JS files
+
+LIMITATIONS:
+
+- Results are limited to 100 files (newest first)
+- Does not search file contents (use Grep tool for that)
+- Hidden files (starting with '.') are skipped
+
+WINDOWS NOTES:
+
+- Path separators are handled automatically (both / and \ work)
+- Uses ripgrep (rg) command if available, otherwise falls back to built-in Go implementation
+
+TIPS:
+
+- Patterns should use forward slashes (/) for cross-platform compatibility
+- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep
+- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
+- Always check if results are truncated and refine your search pattern if needed
@@ -3,6 +3,7 @@ package tools
import (
"bufio"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -92,55 +93,10 @@ type grepTool struct {
workingDir string
}
-const (
- GrepToolName = "grep"
- grepDescription = `Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find files containing specific text or patterns
-- Great for searching code bases for function names, variable declarations, or error messages
-- Useful for finding all files that use a particular API or pattern
-
-HOW TO USE:
-- Provide a regex pattern to search for within file contents
-- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
-- Optionally specify a starting directory (defaults to current working directory)
-- Optionally provide an include pattern to filter which files to search
-- Results are sorted with most recently modified files first
-
-REGEX PATTERN SYNTAX (when literal_text=false):
-- Supports standard regular expression syntax
-- 'function' searches for the literal text "function"
-- 'log\..*Error' finds text starting with "log." and ending with "Error"
-- 'import\s+.*\s+from' finds import statements in JavaScript/TypeScript
-
-COMMON INCLUDE PATTERN EXAMPLES:
-- '*.js' - Only search JavaScript files
-- '*.{ts,tsx}' - Only search TypeScript files
-- '*.go' - Only search Go files
-
-LIMITATIONS:
-- Results are limited to 100 files (newest first)
-- Performance depends on the number of files being searched
-- Very large binary files may be skipped
-- Hidden files (starting with '.') are skipped
-
-IGNORE FILE SUPPORT:
-- Respects .gitignore patterns to skip ignored files and directories
-- Respects .crushignore patterns for additional ignore rules
-- Both ignore files are automatically detected in the search root directory
-
-CROSS-PLATFORM NOTES:
-- Uses ripgrep (rg) command if available for better performance
-- Falls back to built-in Go implementation if ripgrep is not available
-- File paths are normalized automatically for cross-platform compatibility
-
-TIPS:
-- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
-- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
-- Always check if results are truncated and refine your search pattern if needed
-- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.`
-)
+const GrepToolName = "grep"
+
+//go:embed grep.md
+var grepDescription []byte
func NewGrepTool(workingDir string) BaseTool {
return &grepTool{
@@ -155,7 +111,7 @@ func (g *grepTool) Name() string {
func (g *grepTool) Info() ToolInfo {
return ToolInfo{
Name: GrepToolName,
- Description: grepDescription,
+ Description: string(grepDescription),
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
@@ -279,11 +235,13 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
return nil, fmt.Errorf("ripgrep not found in $PATH")
}
- cmd.Args = append(
- cmd.Args,
- "--ignore-file", filepath.Join(path, ".gitignore"),
- "--ignore-file", filepath.Join(path, ".crushignore"),
- )
+ // Only add ignore files if they exist
+ for _, ignoreFile := range []string{".gitignore", ".crushignore"} {
+ ignorePath := filepath.Join(path, ignoreFile)
+ if _, err := os.Stat(ignorePath); err == nil {
+ cmd.Args = append(cmd.Args, "--ignore-file", ignorePath)
+ }
+ }
output, err := cmd.Output()
if err != nil {
@@ -301,18 +259,16 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
continue
}
- // Parse ripgrep output format: file:line:content
- parts := strings.SplitN(line, ":", 3)
- if len(parts) < 3 {
+ // Parse ripgrep output using null separation
+ filePath, lineNumStr, lineText, ok := parseRipgrepLine(line)
+ if !ok {
continue
}
- filePath := parts[0]
- lineNum, err := strconv.Atoi(parts[1])
+ lineNum, err := strconv.Atoi(lineNumStr)
if err != nil {
continue
}
- lineText := parts[2]
fileInfo, err := os.Stat(filePath)
if err != nil {
@@ -330,6 +286,33 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
return matches, nil
}
+// parseRipgrepLine parses ripgrep output with null separation to handle Windows paths
+func parseRipgrepLine(line string) (filePath, lineNum, lineText string, ok bool) {
+ // Split on null byte first to separate filename from rest
+ parts := strings.SplitN(line, "\x00", 2)
+ if len(parts) != 2 {
+ return "", "", "", false
+ }
+
+ filePath = parts[0]
+ remainder := parts[1]
+
+ // Now split the remainder on first colon: "linenum:content"
+ colonIndex := strings.Index(remainder, ":")
+ if colonIndex == -1 {
+ return "", "", "", false
+ }
+
+ lineNumStr := remainder[:colonIndex]
+ lineText = remainder[colonIndex+1:]
+
+ if _, err := strconv.Atoi(lineNumStr); err != nil {
+ return "", "", "", false
+ }
+
+ return filePath, lineNumStr, lineText, true
+}
+
func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
matches := []grepMatch{}
@@ -357,14 +340,24 @@ func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error
}
if info.IsDir() {
- return nil // Skip directories
+ // Check if directory should be skipped
+ if walker.ShouldSkip(path) {
+ return filepath.SkipDir
+ }
+ return nil // Continue into directory
}
- // Use walker's shouldSkip method instead of just SkipHidden
+ // Use walker's shouldSkip method for files
if walker.ShouldSkip(path) {
return nil
}
+ // Skip hidden files (starting with a dot) to match ripgrep's default behavior
+ base := filepath.Base(path)
+ if base != "." && strings.HasPrefix(base, ".") {
+ return nil
+ }
+
if includePattern != nil && !includePattern.MatchString(path) {
return nil
}
@@ -0,0 +1,54 @@
+Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find files containing specific text or patterns
+- Great for searching code bases for function names, variable declarations, or error messages
+- Useful for finding all files that use a particular API or pattern
+
+HOW TO USE:
+
+- Provide a regex pattern to search for within file contents
+- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
+- Optionally specify a starting directory (defaults to current working directory)
+- Optionally provide an include pattern to filter which files to search
+- Results are sorted with most recently modified files first
+
+REGEX PATTERN SYNTAX (when literal_text=false):
+
+- Supports standard regular expression syntax
+- 'function' searches for the literal text "function"
+- 'log\..\*Error' finds text starting with "log." and ending with "Error"
+- 'import\s+.\*\s+from' finds import statements in JavaScript/TypeScript
+
+COMMON INCLUDE PATTERN EXAMPLES:
+
+- '\*.js' - Only search JavaScript files
+- '\*.{ts,tsx}' - Only search TypeScript files
+- '\*.go' - Only search Go files
+
+LIMITATIONS:
+
+- Results are limited to 100 files (newest first)
+- Performance depends on the number of files being searched
+- Very large binary files may be skipped
+- Hidden files (starting with '.') are skipped
+
+IGNORE FILE SUPPORT:
+
+- Respects .gitignore patterns to skip ignored files and directories
+- Respects .crushignore patterns for additional ignore rules
+- Both ignore files are automatically detected in the search root directory
+
+CROSS-PLATFORM NOTES:
+
+- Uses ripgrep (rg) command if available for better performance
+- Falls back to built-in Go implementation if ripgrep is not available
+- File paths are normalized automatically for cross-platform compatibility
+
+TIPS:
+
+- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
+- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
+- Always check if results are truncated and refine your search pattern if needed
+- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.
@@ -1,8 +1,6 @@
package tools
import (
- "context"
- "encoding/json"
"os"
"path/filepath"
"regexp"
@@ -59,6 +57,7 @@ func TestGlobToRegexCaching(t *testing.T) {
}
func TestGrepWithIgnoreFiles(t *testing.T) {
+ t.Parallel()
tempDir := t.TempDir()
// Create test files
@@ -84,32 +83,42 @@ func TestGrepWithIgnoreFiles(t *testing.T) {
crushignoreContent := "node_modules/\n"
require.NoError(t, os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644))
- // Create grep tool
- grepTool := NewGrepTool(tempDir)
+ // Test both implementations
+ for name, fn := range map[string]func(pattern, path, include string) ([]grepMatch, error){
+ "regex": searchFilesWithRegex,
+ "rg": func(pattern, path, include string) ([]grepMatch, error) {
+ return searchWithRipgrep(t.Context(), pattern, path, include)
+ },
+ } {
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ if name == "rg" && getRg() == "" {
+ t.Skip("rg is not in $PATH")
+ }
+
+ matches, err := fn("hello world", tempDir, "")
+ require.NoError(t, err)
- // Create grep parameters
- params := GrepParams{
- Pattern: "hello world",
- Path: tempDir,
+ // Convert matches to a set of file paths for easier testing
+ foundFiles := make(map[string]bool)
+ for _, match := range matches {
+ foundFiles[filepath.Base(match.path)] = true
+ }
+
+ // Should find file1.txt and file2.txt
+ require.True(t, foundFiles["file1.txt"], "Should find file1.txt")
+ require.True(t, foundFiles["file2.txt"], "Should find file2.txt")
+
+ // Should NOT find ignored files
+ require.False(t, foundFiles["file3.txt"], "Should not find file3.txt (ignored by .gitignore)")
+ require.False(t, foundFiles["lib.js"], "Should not find lib.js (ignored by .crushignore)")
+ require.False(t, foundFiles["secret.key"], "Should not find secret.key (ignored by .gitignore)")
+
+ // Should find exactly 2 matches
+ require.Equal(t, 2, len(matches), "Should find exactly 2 matches")
+ })
}
- paramsJSON, err := json.Marshal(params)
- require.NoError(t, err)
-
- // Run grep
- call := ToolCall{Input: string(paramsJSON)}
- response, err := grepTool.Run(context.Background(), call)
- require.NoError(t, err)
-
- // Check results - should only find file1.txt and file2.txt
- // ignored/file3.txt should be ignored by .gitignore
- // node_modules/lib.js should be ignored by .crushignore
- // secret.key should be ignored by .gitignore
- result := response.Content
- require.Contains(t, result, "file1.txt")
- require.Contains(t, result, "file2.txt")
- require.NotContains(t, result, "file3.txt")
- require.NotContains(t, result, "lib.js")
- require.NotContains(t, result, "secret.key")
}
func TestSearchImplementations(t *testing.T) {
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"os"
@@ -40,44 +41,13 @@ type lsTool struct {
}
const (
- LSToolName = "ls"
- MaxLSFiles = 1000
- lsDescription = `Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to explore the structure of a directory
-- Helpful for understanding the organization of a project
-- Good first step when getting familiar with a new codebase
-
-HOW TO USE:
-- Provide a path to list (defaults to current working directory)
-- Optionally specify glob patterns to ignore
-- Results are displayed in a tree structure
-
-FEATURES:
-- Displays a hierarchical view of files and directories
-- Automatically skips hidden files/directories (starting with '.')
-- Skips common system directories like __pycache__
-- Can filter out files matching specific patterns
-
-LIMITATIONS:
-- Results are limited to 1000 files
-- Very large directories will be truncated
-- Does not show file sizes or permissions
-- Cannot recursively list all directories in a large project
-
-WINDOWS NOTES:
-- Hidden file detection uses Unix convention (files starting with '.')
-- Windows-specific hidden files (with hidden attribute) are not automatically skipped
-- Common Windows directories like System32, Program Files are not in default ignore list
-- Path separators are handled automatically (both / and \ work)
-
-TIPS:
-- Use Glob tool for finding files by name patterns instead of browsing
-- Use Grep tool for searching file contents
-- Combine with other tools for more effective exploration`
+ LSToolName = "ls"
+ MaxLSFiles = 1000
)
+//go:embed ls.md
+var lsDescription []byte
+
func NewLsTool(permissions permission.Service, workingDir string) BaseTool {
return &lsTool{
workingDir: workingDir,
@@ -92,7 +62,7 @@ func (l *lsTool) Name() string {
func (l *lsTool) Info() ToolInfo {
return ToolInfo{
Name: LSToolName,
- Description: lsDescription,
+ Description: string(lsDescription),
Parameters: map[string]any{
"path": map[string]any{
"type": "string",
@@ -0,0 +1,40 @@
+Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to explore the structure of a directory
+- Helpful for understanding the organization of a project
+- Good first step when getting familiar with a new codebase
+
+HOW TO USE:
+
+- Provide a path to list (defaults to current working directory)
+- Optionally specify glob patterns to ignore
+- Results are displayed in a tree structure
+
+FEATURES:
+
+- Displays a hierarchical view of files and directories
+- Automatically skips hidden files/directories (starting with '.')
+- Skips common system directories like **pycache**
+- Can filter out files matching specific patterns
+
+LIMITATIONS:
+
+- Results are limited to 1000 files
+- Very large directories will be truncated
+- Does not show file sizes or permissions
+- Cannot recursively list all directories in a large project
+
+WINDOWS NOTES:
+
+- Hidden file detection uses Unix convention (files starting with '.')
+- Windows-specific hidden files (with hidden attribute) are not automatically skipped
+- Common Windows directories like System32, Program Files are not in default ignore list
+- Path separators are handled automatically (both / and \ work)
+
+TIPS:
+
+- Use Glob tool for finding files by name patterns instead of browsing
+- Use Grep tool for searching file contents
+- Combine with other tools for more effective exploration
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -10,6 +11,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -43,59 +45,18 @@ type MultiEditResponseMetadata struct {
}
type multiEditTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
}
-const (
- MultiEditToolName = "multiedit"
- multiEditDescription = `This is a tool for making multiple edits to a single file in one operation. It is built on top of the Edit tool and allows you to perform multiple find-and-replace operations efficiently. Prefer this tool over the Edit tool when you need to make multiple edits to the same file.
-
-Before using this tool:
-
-1. Use the Read tool to understand the file's contents and context
-
-2. Verify the directory path is correct
-
-To make multiple file edits, provide the following:
-1. file_path: The absolute path to the file to modify (must be absolute, not relative)
-2. edits: An array of edit operations to perform, where each edit contains:
- - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)
- - new_string: The edited text to replace the old_string
- - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false.
-
-IMPORTANT:
-- All edits are applied in sequence, in the order they are provided
-- Each edit operates on the result of the previous edit
-- All edits must be valid for the operation to succeed - if any edit fails, none will be applied
-- This tool is ideal when you need to make several changes to different parts of the same file
-
-CRITICAL REQUIREMENTS:
-1. All edits follow the same requirements as the single Edit tool
-2. The edits are atomic - either all succeed or none are applied
-3. Plan your edits carefully to avoid conflicts between sequential operations
-
-WARNING:
-- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace)
-- The tool will fail if edits.old_string and edits.new_string are the same
-- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find
-
-When making edits:
-- Ensure all edits result in idiomatic, correct code
-- Do not leave the code in a broken state
-- Always use absolute file paths (starting with /)
-- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
-- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
-
-If you want to create a new file, use:
-- A new file path, including dir name if needed
-- First edit: empty old_string and the new file's contents as new_string
-- Subsequent edits: normal edit operations on the created content`
-)
+const MultiEditToolName = "multiedit"
+
+//go:embed multiedit.md
+var multieditDescription []byte
-func NewMultiEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewMultiEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &multiEditTool{
lspClients: lspClients,
permissions: permissions,
@@ -111,7 +72,7 @@ func (m *multiEditTool) Name() string {
func (m *multiEditTool) Info() ToolInfo {
return ToolInfo{
Name: MultiEditToolName,
- Description: multiEditDescription,
+ Description: string(multieditDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -188,8 +149,10 @@ func (m *multiEditTool) Run(ctx context.Context, call ToolCall) (ToolResponse, e
return response, nil
}
+ // Notify LSP clients about the change
+ notifyLSPs(ctx, m.lspClients, params.FilePath)
+
// Wait for LSP diagnostics and add them to the response
- waitForLspDiagnostics(ctx, params.FilePath, m.lspClients)
text := fmt.Sprintf("<result>\n%s\n</result>\n", response.Content)
text += getDiagnostics(params.FilePath, m.lspClients)
response.Content = text
@@ -335,7 +298,7 @@ func (m *multiEditTool) processMultiEditExistingFile(ctx context.Context, params
return ToolResponse{}, fmt.Errorf("failed to read file: %w", err)
}
- oldContent := string(content)
+ oldContent, isCrlf := fsext.ToUnixLineEndings(string(content))
currentContent := oldContent
// Apply all edits sequentially
@@ -377,6 +340,10 @@ func (m *multiEditTool) processMultiEditExistingFile(ctx context.Context, params
return ToolResponse{}, permission.ErrorPermissionDenied
}
+ if isCrlf {
+ currentContent, _ = fsext.ToWindowsLineEndings(currentContent)
+ }
+
// Write the updated content
err = os.WriteFile(params.FilePath, []byte(currentContent), 0o644)
if err != nil {
@@ -0,0 +1,48 @@
+This is a tool for making multiple edits to a single file in one operation. It is built on top of the Edit tool and allows you to perform multiple find-and-replace operations efficiently. Prefer this tool over the Edit tool when you need to make multiple edits to the same file.
+
+Before using this tool:
+
+1. Use the Read tool to understand the file's contents and context
+
+2. Verify the directory path is correct
+
+To make multiple file edits, provide the following:
+
+1. file_path: The absolute path to the file to modify (must be absolute, not relative)
+2. edits: An array of edit operations to perform, where each edit contains:
+ - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)
+ - new_string: The edited text to replace the old_string
+ - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false.
+
+IMPORTANT:
+
+- All edits are applied in sequence, in the order they are provided
+- Each edit operates on the result of the previous edit
+- All edits must be valid for the operation to succeed - if any edit fails, none will be applied
+- This tool is ideal when you need to make several changes to different parts of the same file
+
+CRITICAL REQUIREMENTS:
+
+1. All edits follow the same requirements as the single Edit tool
+2. The edits are atomic - either all succeed or none are applied
+3. Plan your edits carefully to avoid conflicts between sequential operations
+
+WARNING:
+
+- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace)
+- The tool will fail if edits.old_string and edits.new_string are the same
+- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find
+
+When making edits:
+
+- Ensure all edits result in idiomatic, correct code
+- Do not leave the code in a broken state
+- Always use absolute file paths (starting with /)
+- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
+- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
+
+If you want to create a new file, use:
+
+- A new file path, including dir name if needed
+- First edit: empty old_string and the new file's contents as new_string
+- Subsequent edits: normal edit operations on the created content
@@ -42,8 +42,8 @@ func getRgSearchCmd(ctx context.Context, pattern, path, include string) *exec.Cm
if name == "" {
return nil
}
- // Use -n to show line numbers and include the matched line
- args := []string{"-H", "-n", pattern}
+ // Use -n to show line numbers, -0 for null separation to handle Windows paths
+ args := []string{"-H", "-n", "-0", pattern}
if include != "" {
args = append(args, "--glob", include)
}
@@ -52,21 +52,6 @@ var safeCommands = []string{
"git show",
"git status",
"git tag",
-
- // Go
- "go build",
- "go clean",
- "go doc",
- "go env",
- "go fmt",
- "go help",
- "go install",
- "go list",
- "go mod",
- "go run",
- "go test",
- "go version",
- "go vet",
}
func init() {
@@ -3,6 +3,7 @@ package tools
import (
"bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -27,103 +28,10 @@ type sourcegraphTool struct {
client *http.Client
}
-const (
- SourcegraphToolName = "sourcegraph"
- sourcegraphToolDescription = `Search code across public repositories using Sourcegraph's GraphQL API.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find code examples or implementations across public repositories
-- Helpful for researching how others have solved similar problems
-- Useful for discovering patterns and best practices in open source code
-
-HOW TO USE:
-- Provide a search query using Sourcegraph's query syntax
-- Optionally specify the number of results to return (default: 10)
-- Optionally set a timeout for the request
-
-QUERY SYNTAX:
-- Basic search: "fmt.Println" searches for exact matches
-- File filters: "file:.go fmt.Println" limits to Go files
-- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos
-- Language filters: "lang:go fmt.Println" limits to Go code
-- Boolean operators: "fmt.Println AND log.Fatal" for combined terms
-- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching
-- Quoted strings: "\"exact phrase\"" for exact phrase matching
-- Exclude filters: "-file:test" or "-repo:forks" to exclude matches
-
-ADVANCED FILTERS:
-- Repository filters:
- * "repo:name" - Match repositories with name containing "name"
- * "repo:^github\.com/org/repo$" - Exact repository match
- * "repo:org/repo@branch" - Search specific branch
- * "repo:org/repo rev:branch" - Alternative branch syntax
- * "-repo:name" - Exclude repositories
- * "fork:yes" or "fork:only" - Include or only show forks
- * "archived:yes" or "archived:only" - Include or only show archived repos
- * "visibility:public" or "visibility:private" - Filter by visibility
-
-- File filters:
- * "file:\.js$" - Files with .js extension
- * "file:internal/" - Files in internal directory
- * "-file:test" - Exclude test files
- * "file:has.content(Copyright)" - Files containing "Copyright"
- * "file:has.contributor([email protected])" - Files with specific contributor
-
-- Content filters:
- * "content:\"exact string\"" - Search for exact string
- * "-content:\"unwanted\"" - Exclude files with unwanted content
- * "case:yes" - Case-sensitive search
-
-- Type filters:
- * "type:symbol" - Search for symbols (functions, classes, etc.)
- * "type:file" - Search file content only
- * "type:path" - Search filenames only
- * "type:diff" - Search code changes
- * "type:commit" - Search commit messages
-
-- Commit/diff search:
- * "after:\"1 month ago\"" - Commits after date
- * "before:\"2023-01-01\"" - Commits before date
- * "author:name" - Commits by author
- * "message:\"fix bug\"" - Commits with message
-
-- Result selection:
- * "select:repo" - Show only repository names
- * "select:file" - Show only file paths
- * "select:content" - Show only matching content
- * "select:symbol" - Show only matching symbols
-
-- Result control:
- * "count:100" - Return up to 100 results
- * "count:all" - Return all results
- * "timeout:30s" - Set search timeout
-
-EXAMPLES:
-- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout
-- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks
-- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing
-- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code
-- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images
-- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow
-
-BOOLEAN OPERATORS:
-- "term1 AND term2" - Results containing both terms
-- "term1 OR term2" - Results containing either term
-- "term1 NOT term2" - Results with term1 but not term2
-- "term1 and (term2 or term3)" - Grouping with parentheses
-
-LIMITATIONS:
-- Only searches public repositories
-- Rate limits may apply
-- Complex queries may take longer to execute
-- Maximum of 20 results per query
-
-TIPS:
-- Use specific file extensions to narrow results
-- Add repo: filters for more targeted searches
-- Use type:symbol to find function/method definitions
-- Use type:file to find relevant files`
-)
+const SourcegraphToolName = "sourcegraph"
+
+//go:embed sourcegraph.md
+var sourcegraphDescription []byte
func NewSourcegraphTool() BaseTool {
return &sourcegraphTool{
@@ -145,7 +53,7 @@ func (t *sourcegraphTool) Name() string {
func (t *sourcegraphTool) Info() ToolInfo {
return ToolInfo{
Name: SourcegraphToolName,
- Description: sourcegraphToolDescription,
+ Description: string(sourcegraphDescription),
Parameters: map[string]any{
"query": map[string]any{
"type": "string",
@@ -0,0 +1,102 @@
+Search code across public repositories using Sourcegraph's GraphQL API.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find code examples or implementations across public repositories
+- Helpful for researching how others have solved similar problems
+- Useful for discovering patterns and best practices in open source code
+
+HOW TO USE:
+
+- Provide a search query using Sourcegraph's query syntax
+- Optionally specify the number of results to return (default: 10)
+- Optionally set a timeout for the request
+
+QUERY SYNTAX:
+
+- Basic search: "fmt.Println" searches for exact matches
+- File filters: "file:.go fmt.Println" limits to Go files
+- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos
+- Language filters: "lang:go fmt.Println" limits to Go code
+- Boolean operators: "fmt.Println AND log.Fatal" for combined terms
+- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching
+- Quoted strings: "\"exact phrase\"" for exact phrase matching
+- Exclude filters: "-file:test" or "-repo:forks" to exclude matches
+
+ADVANCED FILTERS:
+
+- Repository filters:
+ - "repo:name" - Match repositories with name containing "name"
+ - "repo:^github\.com/org/repo$" - Exact repository match
+ - "repo:org/repo@branch" - Search specific branch
+ - "repo:org/repo rev:branch" - Alternative branch syntax
+ - "-repo:name" - Exclude repositories
+ - "fork:yes" or "fork:only" - Include or only show forks
+ - "archived:yes" or "archived:only" - Include or only show archived repos
+ - "visibility:public" or "visibility:private" - Filter by visibility
+
+- File filters:
+ - "file:\.js$" - Files with .js extension
+ - "file:internal/" - Files in internal directory
+ - "-file:test" - Exclude test files
+ - "file:has.content(Copyright)" - Files containing "Copyright"
+ - "file:has.contributor([email protected])" - Files with specific contributor
+
+- Content filters:
+ - "content:\"exact string\"" - Search for exact string
+ - "-content:\"unwanted\"" - Exclude files with unwanted content
+ - "case:yes" - Case-sensitive search
+
+- Type filters:
+ - "type:symbol" - Search for symbols (functions, classes, etc.)
+ - "type:file" - Search file content only
+ - "type:path" - Search filenames only
+ - "type:diff" - Search code changes
+ - "type:commit" - Search commit messages
+
+- Commit/diff search:
+ - "after:\"1 month ago\"" - Commits after date
+ - "before:\"2023-01-01\"" - Commits before date
+ - "author:name" - Commits by author
+ - "message:\"fix bug\"" - Commits with message
+
+- Result selection:
+ - "select:repo" - Show only repository names
+ - "select:file" - Show only file paths
+ - "select:content" - Show only matching content
+ - "select:symbol" - Show only matching symbols
+
+- Result control:
+ - "count:100" - Return up to 100 results
+ - "count:all" - Return all results
+ - "timeout:30s" - Set search timeout
+
+EXAMPLES:
+
+- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout
+- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks
+- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing
+- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code
+- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images
+- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow
+
+BOOLEAN OPERATORS:
+
+- "term1 AND term2" - Results containing both terms
+- "term1 OR term2" - Results containing either term
+- "term1 NOT term2" - Results with term1 but not term2
+- "term1 and (term2 or term3)" - Grouping with parentheses
+
+LIMITATIONS:
+
+- Only searches public repositories
+- Rate limits may apply
+- Complex queries may take longer to execute
+- Maximum of 20 results per query
+
+TIPS:
+
+- Use specific file extensions to narrow results
+- Add repo: filters for more targeted searches
+- Use type:symbol to find function/method definitions
+- Use type:file to find relevant files
@@ -3,6 +3,7 @@ package tools
import (
"bufio"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -11,10 +12,14 @@ import (
"strings"
"unicode/utf8"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/permission"
)
+//go:embed view.md
+var viewDescription []byte
+
type ViewParams struct {
FilePath string `json:"file_path"`
Offset int `json:"offset"`
@@ -28,7 +33,7 @@ type ViewPermissionsParams struct {
}
type viewTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
workingDir string
permissions permission.Service
}
@@ -43,45 +48,9 @@ const (
MaxReadSize = 250 * 1024
DefaultReadLimit = 2000
MaxLineLength = 2000
- viewDescription = `File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to read the contents of a specific file
-- Helpful for examining source code, configuration files, or log files
-- Perfect for looking at text-based file formats
-
-HOW TO USE:
-- Provide the path to the file you want to view
-- Optionally specify an offset to start reading from a specific line
-- Optionally specify a limit to control how many lines are read
-- Do not use this for directories use the ls tool instead
-
-FEATURES:
-- Displays file contents with line numbers for easy reference
-- Can read from any position in a file using the offset parameter
-- Handles large files by limiting the number of lines read
-- Automatically truncates very long lines for better display
-- Suggests similar file names when the requested file isn't found
-
-LIMITATIONS:
-- Maximum file size is 250KB
-- Default reading limit is 2000 lines
-- Lines longer than 2000 characters are truncated
-- Cannot display binary files or images
-- Images can be identified but not displayed
-
-WINDOWS NOTES:
-- Handles both Windows (CRLF) and Unix (LF) line endings automatically
-- File paths work with both forward slashes (/) and backslashes (\)
-- Text encoding is detected automatically for most common formats
-
-TIPS:
-- Use with Glob tool to first find files you want to view
-- For code exploration, first use Grep to find relevant files, then View to examine them
-- When viewing large files, use the offset parameter to read specific sections`
)
-func NewViewTool(lspClients map[string]*lsp.Client, permissions permission.Service, workingDir string) BaseTool {
+func NewViewTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, workingDir string) BaseTool {
return &viewTool{
lspClients: lspClients,
workingDir: workingDir,
@@ -96,7 +65,7 @@ func (v *viewTool) Name() string {
func (v *viewTool) Info() ToolInfo {
return ToolInfo{
Name: ViewToolName,
- Description: viewDescription,
+ Description: string(viewDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -233,7 +202,7 @@ func (v *viewTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
return ToolResponse{}, fmt.Errorf("error reading file: %w", err)
}
- notifyLspOpenFile(ctx, filePath, v.lspClients)
+ notifyLSPs(ctx, v.lspClients, filePath)
output := "<file>\n"
// Format the output with line numbers
output += addLineNumbers(content, params.Offset+1)
@@ -0,0 +1,42 @@
+File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to read the contents of a specific file
+- Helpful for examining source code, configuration files, or log files
+- Perfect for looking at text-based file formats
+
+HOW TO USE:
+
+- Provide the path to the file you want to view
+- Optionally specify an offset to start reading from a specific line
+- Optionally specify a limit to control how many lines are read
+- Do not use this for directories use the ls tool instead
+
+FEATURES:
+
+- Displays file contents with line numbers for easy reference
+- Can read from any position in a file using the offset parameter
+- Handles large files by limiting the number of lines read
+- Automatically truncates very long lines for better display
+- Suggests similar file names when the requested file isn't found
+
+LIMITATIONS:
+
+- Maximum file size is 250KB
+- Default reading limit is 2000 lines
+- Lines longer than 2000 characters are truncated
+- Cannot display binary files or images
+- Images can be identified but not displayed
+
+WINDOWS NOTES:
+
+- Handles both Windows (CRLF) and Unix (LF) line endings automatically
+- File paths work with both forward slashes (/) and backslashes (\)
+- Text encoding is detected automatically for most common formats
+
+TIPS:
+
+- Use with Glob tool to first find files you want to view
+- For code exploration, first use Grep to find relevant files, then View to examine them
+- When viewing large files, use the offset parameter to read specific sections
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -10,6 +11,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -18,6 +20,9 @@ import (
"github.com/charmbracelet/crush/internal/permission"
)
+//go:embed write.md
+var writeDescription []byte
+
type WriteParams struct {
FilePath string `json:"file_path"`
Content string `json:"content"`
@@ -30,7 +35,7 @@ type WritePermissionsParams struct {
}
type writeTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
@@ -42,43 +47,9 @@ type WriteResponseMetadata struct {
Removals int `json:"removals"`
}
-const (
- WriteToolName = "write"
- writeDescription = `File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to create a new file
-- Helpful for updating existing files with modified content
-- Perfect for saving generated code, configurations, or text data
-
-HOW TO USE:
-- Provide the path to the file you want to write
-- Include the content to be written to the file
-- The tool will create any necessary parent directories
-
-FEATURES:
-- Can create new files or overwrite existing ones
-- Creates parent directories automatically if they don't exist
-- Checks if the file has been modified since last read for safety
-- Avoids unnecessary writes when content hasn't changed
-
-LIMITATIONS:
-- You should read a file before writing to it to avoid conflicts
-- Cannot append to files (rewrites the entire file)
-
-WINDOWS NOTES:
-- File permissions (0o755, 0o644) are Unix-style but work on Windows with appropriate translations
-- Use forward slashes (/) in paths for cross-platform compatibility
-- Windows file attributes and permissions are handled automatically by the Go runtime
-
-TIPS:
-- Use the View tool first to examine existing files before modifying them
-- Use the LS tool to verify the correct location when creating new files
-- Combine with Glob and Grep tools to find and modify multiple files
-- Always include descriptive comments when making changes to existing code`
-)
+const WriteToolName = "write"
-func NewWriteTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewWriteTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &writeTool{
lspClients: lspClients,
permissions: permissions,
@@ -94,7 +65,7 @@ func (w *writeTool) Name() string {
func (w *writeTool) Info() ToolInfo {
return ToolInfo{
Name: WriteToolName,
- Description: writeDescription,
+ Description: string(writeDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -221,7 +192,8 @@ func (w *writeTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error
recordFileWrite(filePath)
recordFileRead(filePath)
- waitForLspDiagnostics(ctx, filePath, w.lspClients)
+
+ notifyLSPs(ctx, w.lspClients, params.FilePath)
result := fmt.Sprintf("File successfully written: %s", filePath)
result = fmt.Sprintf("<result>\n%s\n</result>", result)
@@ -0,0 +1,38 @@
+File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to create a new file
+- Helpful for updating existing files with modified content
+- Perfect for saving generated code, configurations, or text data
+
+HOW TO USE:
+
+- Provide the path to the file you want to write
+- Include the content to be written to the file
+- The tool will create any necessary parent directories
+
+FEATURES:
+
+- Can create new files or overwrite existing ones
+- Creates parent directories automatically if they don't exist
+- Checks if the file has been modified since last read for safety
+- Avoids unnecessary writes when content hasn't changed
+
+LIMITATIONS:
+
+- You should read a file before writing to it to avoid conflicts
+- Cannot append to files (rewrites the entire file)
+
+WINDOWS NOTES:
+
+- File permissions (0o755, 0o644) are Unix-style but work on Windows with appropriate translations
+- Use forward slashes (/) in paths for cross-platform compatibility
+- Windows file attributes and permissions are handled automatically by the Go runtime
+
+TIPS:
+
+- Use the View tool first to examine existing files before modifying them
+- Use the LS tool to verify the correct location when creating new files
+- Combine with Glob and Grep tools to find and modify multiple files
+- Always include descriptive comments when making changes to existing code
@@ -2,7 +2,6 @@ package log
import (
"bytes"
- "context"
"encoding/json"
"io"
"log/slog"
@@ -13,9 +12,6 @@ import (
// NewHTTPClient creates an HTTP client with debug logging enabled when debug mode is on.
func NewHTTPClient() *http.Client {
- if !slog.Default().Enabled(context.TODO(), slog.LevelDebug) {
- return http.DefaultClient
- }
return &http.Client{
Transport: &HTTPRoundTripLogger{
Transport: http.DefaultTransport,
@@ -9,6 +9,7 @@ import (
"sync/atomic"
"time"
+ "github.com/charmbracelet/crush/internal/event"
"gopkg.in/natefinch/lumberjack.v2"
)
@@ -48,6 +49,8 @@ func Initialized() bool {
func RecoverPanic(name string, cleanup func()) {
if r := recover(); r != nil {
+ event.Error(r, "panic", true, "name", name)
+
// Create a timestamped panic log file
timestamp := time.Now().Format("20060102-150405")
filename := fmt.Sprintf("crush-panic-%s-%s.log", name, timestamp)
@@ -1,274 +1,166 @@
package lsp
import (
- "bufio"
"context"
"encoding/json"
"fmt"
- "io"
"log/slog"
+ "maps"
"os"
- "os/exec"
"path/filepath"
"strings"
- "sync"
"sync/atomic"
"time"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/log"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/home"
+ powernap "github.com/charmbracelet/x/powernap/pkg/lsp"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/transport"
)
type Client struct {
- Cmd *exec.Cmd
- stdin io.WriteCloser
- stdout *bufio.Reader
- stderr io.ReadCloser
+ client *powernap.Client
+ name string
- // Request ID counter
- nextID atomic.Int32
+ // File types this LSP server handles (e.g., .go, .rs, .py)
+ fileTypes []string
- // Response handlers
- handlers map[int32]chan *Message
- handlersMu sync.RWMutex
+ // Configuration for this LSP client
+ config config.LSPConfig
- // Server request handlers
- serverRequestHandlers map[string]ServerRequestHandler
- serverHandlersMu sync.RWMutex
-
- // Notification handlers
- notificationHandlers map[string]NotificationHandler
- notificationMu sync.RWMutex
+ // Diagnostic change callback
+ onDiagnosticsChanged func(name string, count int)
// Diagnostic cache
- diagnostics map[protocol.DocumentURI][]protocol.Diagnostic
- diagnosticsMu sync.RWMutex
+ diagnostics *csync.VersionedMap[protocol.DocumentURI, []protocol.Diagnostic]
// Files are currently opened by the LSP
- openFiles map[string]*OpenFileInfo
- openFilesMu sync.RWMutex
+ openFiles *csync.Map[string, *OpenFileInfo]
// Server state
serverState atomic.Value
}
-func NewClient(ctx context.Context, command string, args ...string) (*Client, error) {
- cmd := exec.CommandContext(ctx, command, args...)
- // Copy env
- cmd.Env = os.Environ()
-
- stdin, err := cmd.StdinPipe()
+// New creates a new LSP client using the powernap implementation.
+func New(ctx context.Context, name string, config config.LSPConfig, resolver config.VariableResolver) (*Client, error) {
+ // Convert working directory to file URI
+ workDir, err := os.Getwd()
if err != nil {
- return nil, fmt.Errorf("failed to create stdin pipe: %w", err)
+ return nil, fmt.Errorf("failed to get working directory: %w", err)
}
- stdout, err := cmd.StdoutPipe()
+ rootURI := string(protocol.URIFromPath(workDir))
+
+ command, err := resolver.ResolveValue(config.Command)
if err != nil {
- return nil, fmt.Errorf("failed to create stdout pipe: %w", err)
+ return nil, fmt.Errorf("invalid lsp command: %w", err)
+ }
+
+ // Create powernap client config
+ clientConfig := powernap.ClientConfig{
+ Command: home.Long(command),
+ Args: config.Args,
+ RootURI: rootURI,
+ Environment: func() map[string]string {
+ env := make(map[string]string)
+ maps.Copy(env, config.Env)
+ return env
+ }(),
+ Settings: config.Options,
+ InitOptions: config.InitOptions,
+ WorkspaceFolders: []protocol.WorkspaceFolder{
+ {
+ URI: rootURI,
+ Name: filepath.Base(workDir),
+ },
+ },
}
- stderr, err := cmd.StderrPipe()
+ // Create the powernap client
+ powernapClient, err := powernap.NewClient(clientConfig)
if err != nil {
- return nil, fmt.Errorf("failed to create stderr pipe: %w", err)
+ return nil, fmt.Errorf("failed to create lsp client: %w", err)
}
client := &Client{
- Cmd: cmd,
- stdin: stdin,
- stdout: bufio.NewReader(stdout),
- stderr: stderr,
- handlers: make(map[int32]chan *Message),
- notificationHandlers: make(map[string]NotificationHandler),
- serverRequestHandlers: make(map[string]ServerRequestHandler),
- diagnostics: make(map[protocol.DocumentURI][]protocol.Diagnostic),
- openFiles: make(map[string]*OpenFileInfo),
+ client: powernapClient,
+ name: name,
+ fileTypes: config.FileTypes,
+ diagnostics: csync.NewVersionedMap[protocol.DocumentURI, []protocol.Diagnostic](),
+ openFiles: csync.NewMap[string, *OpenFileInfo](),
+ config: config,
}
// Initialize server state
client.serverState.Store(StateStarting)
- // Start the LSP server process
- if err := cmd.Start(); err != nil {
- return nil, fmt.Errorf("failed to start LSP server: %w", err)
- }
-
- // Handle stderr in a separate goroutine
- go func() {
- scanner := bufio.NewScanner(stderr)
- for scanner.Scan() {
- slog.Error("LSP Server", "err", scanner.Text())
- }
- if err := scanner.Err(); err != nil {
- slog.Error("Error reading", "err", err)
- }
- }()
-
- // Start message handling loop
- go func() {
- defer log.RecoverPanic("LSP-message-handler", func() {
- slog.Error("LSP message handler crashed, LSP functionality may be impaired")
- })
- client.handleMessages()
- }()
-
return client, nil
}
-func (c *Client) RegisterNotificationHandler(method string, handler NotificationHandler) {
- c.notificationMu.Lock()
- defer c.notificationMu.Unlock()
- c.notificationHandlers[method] = handler
-}
-
-func (c *Client) RegisterServerRequestHandler(method string, handler ServerRequestHandler) {
- c.serverHandlersMu.Lock()
- defer c.serverHandlersMu.Unlock()
- c.serverRequestHandlers[method] = handler
-}
-
-func (c *Client) InitializeLSPClient(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
- initParams := &protocol.InitializeParams{
- WorkspaceFoldersInitializeParams: protocol.WorkspaceFoldersInitializeParams{
- WorkspaceFolders: []protocol.WorkspaceFolder{
- {
- URI: protocol.URI(protocol.URIFromPath(workspaceDir)),
- Name: workspaceDir,
- },
- },
- },
-
- XInitializeParams: protocol.XInitializeParams{
- ProcessID: int32(os.Getpid()),
- ClientInfo: &protocol.ClientInfo{
- Name: "mcp-language-server",
- Version: "0.1.0",
- },
- RootPath: workspaceDir,
- RootURI: protocol.URIFromPath(workspaceDir),
- Capabilities: protocol.ClientCapabilities{
- Workspace: protocol.WorkspaceClientCapabilities{
- Configuration: true,
- DidChangeConfiguration: protocol.DidChangeConfigurationClientCapabilities{
- DynamicRegistration: true,
- },
- DidChangeWatchedFiles: protocol.DidChangeWatchedFilesClientCapabilities{
- DynamicRegistration: true,
- RelativePatternSupport: true,
- },
- },
- TextDocument: protocol.TextDocumentClientCapabilities{
- Synchronization: &protocol.TextDocumentSyncClientCapabilities{
- DynamicRegistration: true,
- DidSave: true,
- },
- Completion: protocol.CompletionClientCapabilities{
- CompletionItem: protocol.ClientCompletionItemOptions{},
- },
- CodeLens: &protocol.CodeLensClientCapabilities{
- DynamicRegistration: true,
- },
- DocumentSymbol: protocol.DocumentSymbolClientCapabilities{},
- CodeAction: protocol.CodeActionClientCapabilities{
- CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{
- CodeActionKind: protocol.ClientCodeActionKindOptions{
- ValueSet: []protocol.CodeActionKind{},
- },
- },
- },
- PublishDiagnostics: protocol.PublishDiagnosticsClientCapabilities{
- VersionSupport: true,
- },
- SemanticTokens: protocol.SemanticTokensClientCapabilities{
- Requests: protocol.ClientSemanticTokensRequestOptions{
- Range: &protocol.Or_ClientSemanticTokensRequestOptions_range{},
- Full: &protocol.Or_ClientSemanticTokensRequestOptions_full{},
- },
- TokenTypes: []string{},
- TokenModifiers: []string{},
- Formats: []protocol.TokenFormat{},
- },
- },
- Window: protocol.WindowClientCapabilities{},
- },
- InitializationOptions: map[string]any{
- "codelenses": map[string]bool{
- "generate": true,
- "regenerate_cgo": true,
- "test": true,
- "tidy": true,
- "upgrade_dependency": true,
- "vendor": true,
- "vulncheck": false,
- },
- },
- },
- }
-
- var result protocol.InitializeResult
- if err := c.Call(ctx, "initialize", initParams, &result); err != nil {
- return nil, fmt.Errorf("initialize failed: %w", err)
+// Initialize initializes the LSP client and returns the server capabilities.
+func (c *Client) Initialize(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
+ if err := c.client.Initialize(ctx, false); err != nil {
+ return nil, fmt.Errorf("failed to initialize the lsp client: %w", err)
+ }
+
+ // Convert powernap capabilities to protocol capabilities
+ caps := c.client.GetCapabilities()
+ protocolCaps := protocol.ServerCapabilities{
+ TextDocumentSync: caps.TextDocumentSync,
+ CompletionProvider: func() *protocol.CompletionOptions {
+ if caps.CompletionProvider != nil {
+ return &protocol.CompletionOptions{
+ TriggerCharacters: caps.CompletionProvider.TriggerCharacters,
+ AllCommitCharacters: caps.CompletionProvider.AllCommitCharacters,
+ ResolveProvider: caps.CompletionProvider.ResolveProvider,
+ }
+ }
+ return nil
+ }(),
}
- if err := c.Notify(ctx, "initialized", struct{}{}); err != nil {
- return nil, fmt.Errorf("initialized notification failed: %w", err)
+ result := &protocol.InitializeResult{
+ Capabilities: protocolCaps,
}
- // Register handlers
c.RegisterServerRequestHandler("workspace/applyEdit", HandleApplyEdit)
c.RegisterServerRequestHandler("workspace/configuration", HandleWorkspaceConfiguration)
c.RegisterServerRequestHandler("client/registerCapability", HandleRegisterCapability)
c.RegisterNotificationHandler("window/showMessage", HandleServerMessage)
- c.RegisterNotificationHandler("textDocument/publishDiagnostics",
- func(params json.RawMessage) { HandleDiagnostics(c, params) })
-
- // Notify the LSP server
- err := c.Initialized(ctx, protocol.InitializedParams{})
- if err != nil {
- return nil, fmt.Errorf("initialization failed: %w", err)
- }
+ c.RegisterNotificationHandler("textDocument/publishDiagnostics", func(_ context.Context, _ string, params json.RawMessage) {
+ HandleDiagnostics(c, params)
+ })
- return &result, nil
+ return result, nil
}
-func (c *Client) Close() error {
+// Close closes the LSP client.
+func (c *Client) Close(ctx context.Context) error {
// Try to close all open files first
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ ctx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
- // Attempt to close files but continue shutdown regardless
c.CloseAllFiles(ctx)
- // Close stdin to signal the server
- if err := c.stdin.Close(); err != nil {
- return fmt.Errorf("failed to close stdin: %w", err)
+ // Shutdown and exit the client
+ if err := c.client.Shutdown(ctx); err != nil {
+ slog.Warn("Failed to shutdown LSP client", "error", err)
}
- // Use a channel to handle the Wait with timeout
- done := make(chan error, 1)
- go func() {
- done <- c.Cmd.Wait()
- }()
-
- // Wait for process to exit with timeout
- select {
- case err := <-done:
- return err
- case <-time.After(2 * time.Second):
- // If we timeout, try to kill the process
- if err := c.Cmd.Process.Kill(); err != nil {
- return fmt.Errorf("failed to kill process: %w", err)
- }
- return fmt.Errorf("process killed after timeout")
- }
+ return c.client.Exit()
}
+// ServerState represents the state of an LSP server
type ServerState int
const (
StateStarting ServerState = iota
StateReady
StateError
+ StateDisabled
)
// GetServerState returns the current state of the LSP server
@@ -284,8 +176,17 @@ func (c *Client) SetServerState(state ServerState) {
c.serverState.Store(state)
}
-// WaitForServerReady waits for the server to be ready by polling the server
-// with a simple request until it responds successfully or times out
+// GetName returns the name of the LSP client
+func (c *Client) GetName() string {
+ return c.name
+}
+
+// SetDiagnosticsCallback sets the callback function for diagnostic changes
+func (c *Client) SetDiagnosticsCallback(callback func(name string, count int)) {
+ c.onDiagnosticsChanged = callback
+}
+
+// WaitForServerReady waits for the server to be ready
func (c *Client) WaitForServerReady(ctx context.Context) error {
cfg := config.Get()
@@ -300,20 +201,11 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
ticker := time.NewTicker(500 * time.Millisecond)
defer ticker.Stop()
- if cfg.Options.DebugLSP {
+ if cfg != nil && cfg.Options.DebugLSP {
slog.Debug("Waiting for LSP server to be ready...")
}
- // Determine server type for specialized initialization
- serverType := c.detectServerType()
-
- // For TypeScript-like servers, we need to open some key files first
- if serverType == ServerTypeTypeScript {
- if cfg.Options.DebugLSP {
- slog.Debug("TypeScript-like server detected, opening key configuration files")
- }
- c.openKeyConfigFiles(ctx)
- }
+ c.openKeyConfigFiles(ctx)
for {
select {
@@ -321,285 +213,63 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
c.SetServerState(StateError)
return fmt.Errorf("timeout waiting for LSP server to be ready")
case <-ticker.C:
- // Try a ping method appropriate for this server type
- err := c.pingServerByType(ctx, serverType)
- if err == nil {
- // Server responded successfully
- c.SetServerState(StateReady)
- if cfg.Options.DebugLSP {
- slog.Debug("LSP server is ready")
+ // Check if client is running
+ if !c.client.IsRunning() {
+ if cfg != nil && cfg.Options.DebugLSP {
+ slog.Debug("LSP server not ready yet", "server", c.name)
}
- return nil
- } else {
- slog.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
+ continue
}
- }
- }
-}
-
-// ServerType represents the type of LSP server
-type ServerType int
-
-const (
- ServerTypeUnknown ServerType = iota
- ServerTypeGo
- ServerTypeTypeScript
- ServerTypeRust
- ServerTypePython
- ServerTypeGeneric
-)
-
-// detectServerType tries to determine what type of LSP server we're dealing with
-func (c *Client) detectServerType() ServerType {
- if c.Cmd == nil {
- return ServerTypeUnknown
- }
-
- cmdPath := strings.ToLower(c.Cmd.Path)
-
- switch {
- case strings.Contains(cmdPath, "gopls"):
- return ServerTypeGo
- case strings.Contains(cmdPath, "typescript") || strings.Contains(cmdPath, "vtsls") || strings.Contains(cmdPath, "tsserver"):
- return ServerTypeTypeScript
- case strings.Contains(cmdPath, "rust-analyzer"):
- return ServerTypeRust
- case strings.Contains(cmdPath, "pyright") || strings.Contains(cmdPath, "pylsp") || strings.Contains(cmdPath, "python"):
- return ServerTypePython
- default:
- return ServerTypeGeneric
- }
-}
-
-// openKeyConfigFiles opens important configuration files that help initialize the server
-func (c *Client) openKeyConfigFiles(ctx context.Context) {
- workDir := config.Get().WorkingDir()
- serverType := c.detectServerType()
-
- var filesToOpen []string
-
- switch serverType {
- case ServerTypeTypeScript:
- // TypeScript servers need these config files to properly initialize
- filesToOpen = []string{
- filepath.Join(workDir, "tsconfig.json"),
- filepath.Join(workDir, "package.json"),
- filepath.Join(workDir, "jsconfig.json"),
- }
-
- // Also find and open a few TypeScript files to help the server initialize
- c.openTypeScriptFiles(ctx, workDir)
- case ServerTypeGo:
- filesToOpen = []string{
- filepath.Join(workDir, "go.mod"),
- filepath.Join(workDir, "go.sum"),
- }
- case ServerTypeRust:
- filesToOpen = []string{
- filepath.Join(workDir, "Cargo.toml"),
- filepath.Join(workDir, "Cargo.lock"),
- }
- }
- // Try to open each file, ignoring errors if they don't exist
- for _, file := range filesToOpen {
- if _, err := os.Stat(file); err == nil {
- // File exists, try to open it
- if err := c.OpenFile(ctx, file); err != nil {
- slog.Debug("Failed to open key config file", "file", file, "error", err)
- } else {
- slog.Debug("Opened key config file for initialization", "file", file)
+ // Server is ready
+ c.SetServerState(StateReady)
+ if cfg != nil && cfg.Options.DebugLSP {
+ slog.Debug("LSP server is ready")
}
+ return nil
}
}
}
-// pingServerByType sends a ping request appropriate for the server type
-func (c *Client) pingServerByType(ctx context.Context, serverType ServerType) error {
- switch serverType {
- case ServerTypeTypeScript:
- // For TypeScript, try a document symbol request on an open file
- return c.pingTypeScriptServer(ctx)
- case ServerTypeGo:
- // For Go, workspace/symbol works well
- return c.pingWithWorkspaceSymbol(ctx)
- case ServerTypeRust:
- // For Rust, workspace/symbol works well
- return c.pingWithWorkspaceSymbol(ctx)
- default:
- // Default ping method
- return c.pingWithWorkspaceSymbol(ctx)
- }
+// OpenFileInfo contains information about an open file
+type OpenFileInfo struct {
+ Version int32
+ URI protocol.DocumentURI
}
-// pingTypeScriptServer tries to ping a TypeScript server with appropriate methods
-func (c *Client) pingTypeScriptServer(ctx context.Context) error {
- // First try workspace/symbol which works for many servers
- if err := c.pingWithWorkspaceSymbol(ctx); err == nil {
- return nil
+// HandlesFile checks if this LSP client handles the given file based on its extension.
+func (c *Client) HandlesFile(path string) bool {
+ // If no file types are specified, handle all files (backward compatibility)
+ if len(c.fileTypes) == 0 {
+ return true
}
- // If that fails, try to find an open file and request document symbols
- c.openFilesMu.RLock()
- defer c.openFilesMu.RUnlock()
-
- // If we have any open files, try to get document symbols for one
- for uri := range c.openFiles {
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path for TypeScript symbol collection", "uri", uri, "error", err)
- continue
+ name := strings.ToLower(filepath.Base(path))
+ for _, filetype := range c.fileTypes {
+ suffix := strings.ToLower(filetype)
+ if !strings.HasPrefix(suffix, ".") {
+ suffix = "." + suffix
}
-
- if strings.HasSuffix(filePath, ".ts") || strings.HasSuffix(filePath, ".js") ||
- strings.HasSuffix(filePath, ".tsx") || strings.HasSuffix(filePath, ".jsx") {
- var symbols []protocol.DocumentSymbol
- err := c.Call(ctx, "textDocument/documentSymbol", protocol.DocumentSymbolParams{
- TextDocument: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
- },
- }, &symbols)
- if err == nil {
- return nil
- }
+ if strings.HasSuffix(name, suffix) {
+ slog.Debug("handles file", "name", c.name, "file", name, "filetype", filetype)
+ return true
}
}
-
- // If we have no open TypeScript files, try to find and open one
- workDir := config.Get().WorkingDir()
- err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip directories and non-TypeScript files
- if d.IsDir() {
- return nil
- }
-
- ext := filepath.Ext(path)
- if ext == ".ts" || ext == ".js" || ext == ".tsx" || ext == ".jsx" {
- // Found a TypeScript file, try to open it
- if err := c.OpenFile(ctx, path); err == nil {
- // Successfully opened, stop walking
- return filepath.SkipAll
- }
- }
-
- return nil
- })
- if err != nil {
- slog.Debug("Error walking directory for TypeScript files", "error", err)
- }
-
- // Final fallback - just try a generic capability
- return c.pingWithServerCapabilities(ctx)
+ slog.Debug("doesn't handle file", "name", c.name, "file", name)
+ return false
}
-// openTypeScriptFiles finds and opens TypeScript files to help initialize the server
-func (c *Client) openTypeScriptFiles(ctx context.Context, workDir string) {
- cfg := config.Get()
- filesOpened := 0
- maxFilesToOpen := 5 // Limit to a reasonable number of files
-
- // Find and open TypeScript files
- err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip directories and non-TypeScript files
- if d.IsDir() {
- // Skip common directories to avoid wasting time
- if shouldSkipDir(path) {
- return filepath.SkipDir
- }
- return nil
- }
-
- // Check if we've opened enough files
- if filesOpened >= maxFilesToOpen {
- return filepath.SkipAll
- }
-
- // Check file extension
- ext := filepath.Ext(path)
- if ext == ".ts" || ext == ".tsx" || ext == ".js" || ext == ".jsx" {
- // Try to open the file
- if err := c.OpenFile(ctx, path); err == nil {
- filesOpened++
- if cfg.Options.DebugLSP {
- slog.Debug("Opened TypeScript file for initialization", "file", path)
- }
- }
- }
-
+// OpenFile opens a file in the LSP server.
+func (c *Client) OpenFile(ctx context.Context, filepath string) error {
+ if !c.HandlesFile(filepath) {
return nil
- })
-
- if err != nil && cfg.Options.DebugLSP {
- slog.Debug("Error walking directory for TypeScript files", "error", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Opened TypeScript files for initialization", "count", filesOpened)
}
-}
-// shouldSkipDir returns true if the directory should be skipped during file search
-func shouldSkipDir(path string) bool {
- dirName := filepath.Base(path)
-
- // Skip hidden directories
- if strings.HasPrefix(dirName, ".") {
- return true
- }
-
- // Skip common directories that won't contain relevant source files
- skipDirs := map[string]bool{
- "node_modules": true,
- "dist": true,
- "build": true,
- "coverage": true,
- "vendor": true,
- "target": true,
- }
-
- return skipDirs[dirName]
-}
-
-// pingWithWorkspaceSymbol tries a workspace/symbol request
-func (c *Client) pingWithWorkspaceSymbol(ctx context.Context) error {
- var result []protocol.SymbolInformation
- return c.Call(ctx, "workspace/symbol", protocol.WorkspaceSymbolParams{
- Query: "",
- }, &result)
-}
-
-// pingWithServerCapabilities tries to get server capabilities
-func (c *Client) pingWithServerCapabilities(ctx context.Context) error {
- // This is a very lightweight request that should work for most servers
- return c.Notify(ctx, "$/cancelRequest", struct{ ID int }{ID: -1})
-}
-
-type OpenFileInfo struct {
- Version int32
- URI protocol.DocumentURI
-}
-
-func (c *Client) OpenFile(ctx context.Context, filepath string) error {
uri := string(protocol.URIFromPath(filepath))
- c.openFilesMu.Lock()
- if _, exists := c.openFiles[uri]; exists {
- c.openFilesMu.Unlock()
+ if _, exists := c.openFiles.Get(uri); exists {
return nil // Already open
}
- c.openFilesMu.Unlock()
// Skip files that do not exist or cannot be read
content, err := os.ReadFile(filepath)
@@ -607,29 +277,20 @@ func (c *Client) OpenFile(ctx context.Context, filepath string) error {
return fmt.Errorf("error reading file: %w", err)
}
- params := protocol.DidOpenTextDocumentParams{
- TextDocument: protocol.TextDocumentItem{
- URI: protocol.DocumentURI(uri),
- LanguageID: DetectLanguageID(uri),
- Version: 1,
- Text: string(content),
- },
- }
-
- if err := c.Notify(ctx, "textDocument/didOpen", params); err != nil {
+ // Notify the server about the opened document
+ if err = c.client.NotifyDidOpenTextDocument(ctx, uri, string(DetectLanguageID(uri)), 1, string(content)); err != nil {
return err
}
- c.openFilesMu.Lock()
- c.openFiles[uri] = &OpenFileInfo{
+ c.openFiles.Set(uri, &OpenFileInfo{
Version: 1,
URI: protocol.DocumentURI(uri),
- }
- c.openFilesMu.Unlock()
+ })
return nil
}
+// NotifyChange notifies the server about a file change.
func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
uri := string(protocol.URIFromPath(filepath))
@@ -638,121 +299,61 @@ func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
return fmt.Errorf("error reading file: %w", err)
}
- c.openFilesMu.Lock()
- fileInfo, isOpen := c.openFiles[uri]
+ fileInfo, isOpen := c.openFiles.Get(uri)
if !isOpen {
- c.openFilesMu.Unlock()
return fmt.Errorf("cannot notify change for unopened file: %s", filepath)
}
// Increment version
fileInfo.Version++
- version := fileInfo.Version
- c.openFilesMu.Unlock()
- params := protocol.DidChangeTextDocumentParams{
- TextDocument: protocol.VersionedTextDocumentIdentifier{
- TextDocumentIdentifier: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
+ // Create change event
+ changes := []protocol.TextDocumentContentChangeEvent{
+ {
+ Value: protocol.TextDocumentContentChangeWholeDocument{
+ Text: string(content),
},
- Version: version,
- },
- ContentChanges: []protocol.TextDocumentContentChangeEvent{
- {
- Value: protocol.TextDocumentContentChangeWholeDocument{
- Text: string(content),
- },
- },
- },
- }
-
- return c.Notify(ctx, "textDocument/didChange", params)
-}
-
-func (c *Client) CloseFile(ctx context.Context, filepath string) error {
- cfg := config.Get()
- uri := string(protocol.URIFromPath(filepath))
-
- c.openFilesMu.Lock()
- if _, exists := c.openFiles[uri]; !exists {
- c.openFilesMu.Unlock()
- return nil // Already closed
- }
- c.openFilesMu.Unlock()
-
- params := protocol.DidCloseTextDocumentParams{
- TextDocument: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
},
}
- if cfg.Options.DebugLSP {
- slog.Debug("Closing file", "file", filepath)
- }
- if err := c.Notify(ctx, "textDocument/didClose", params); err != nil {
- return err
- }
-
- c.openFilesMu.Lock()
- delete(c.openFiles, uri)
- c.openFilesMu.Unlock()
-
- return nil
+ return c.client.NotifyDidChangeTextDocument(ctx, uri, int(fileInfo.Version), changes)
}
+// IsFileOpen checks if a file is currently open.
func (c *Client) IsFileOpen(filepath string) bool {
uri := string(protocol.URIFromPath(filepath))
- c.openFilesMu.RLock()
- defer c.openFilesMu.RUnlock()
- _, exists := c.openFiles[uri]
+ _, exists := c.openFiles.Get(uri)
return exists
}
-// CloseAllFiles closes all currently open files
+// CloseAllFiles closes all currently open files.
func (c *Client) CloseAllFiles(ctx context.Context) {
cfg := config.Get()
- c.openFilesMu.Lock()
- filesToClose := make([]string, 0, len(c.openFiles))
-
- // First collect all URIs that need to be closed
- for uri := range c.openFiles {
- // Convert URI back to file path using proper URI handling
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path for file closing", "uri", uri, "error", err)
- continue
+ debugLSP := cfg != nil && cfg.Options.DebugLSP
+ for uri := range c.openFiles.Seq2() {
+ if debugLSP {
+ slog.Debug("Closing file", "file", uri)
}
- filesToClose = append(filesToClose, filePath)
- }
- c.openFilesMu.Unlock()
-
- // Then close them all
- for _, filePath := range filesToClose {
- err := c.CloseFile(ctx, filePath)
- if err != nil && cfg.Options.DebugLSP {
- slog.Warn("Error closing file", "file", filePath, "error", err)
+ if err := c.client.NotifyDidCloseTextDocument(ctx, uri); err != nil {
+ slog.Warn("Error closing rile", "uri", uri, "error", err)
+ continue
}
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Closed all files", "files", filesToClose)
+ c.openFiles.Del(uri)
}
}
+// GetFileDiagnostics returns diagnostics for a specific file.
func (c *Client) GetFileDiagnostics(uri protocol.DocumentURI) []protocol.Diagnostic {
- c.diagnosticsMu.RLock()
- defer c.diagnosticsMu.RUnlock()
-
- return c.diagnostics[uri]
+ diags, _ := c.diagnostics.Get(uri)
+ return diags
}
-// GetDiagnostics returns all diagnostics for all files
+// GetDiagnostics returns all diagnostics for all files.
func (c *Client) GetDiagnostics() map[protocol.DocumentURI][]protocol.Diagnostic {
- return c.diagnostics
+ return maps.Collect(c.diagnostics.Seq2())
}
-// OpenFileOnDemand opens a file only if it's not already open
-// This is used for lazy-loading files when they're actually needed
+// OpenFileOnDemand opens a file only if it's not already open.
func (c *Client) OpenFileOnDemand(ctx context.Context, filepath string) error {
// Check if the file is already open
if c.IsFileOpen(filepath) {
@@ -763,8 +364,7 @@ func (c *Client) OpenFileOnDemand(ctx context.Context, filepath string) error {
return c.OpenFile(ctx, filepath)
}
-// GetDiagnosticsForFile ensures a file is open and returns its diagnostics
-// This is useful for on-demand diagnostics when using lazy loading
+// GetDiagnosticsForFile ensures a file is open and returns its diagnostics.
func (c *Client) GetDiagnosticsForFile(ctx context.Context, filepath string) ([]protocol.Diagnostic, error) {
documentURI := protocol.URIFromPath(filepath)
@@ -779,16 +379,84 @@ func (c *Client) GetDiagnosticsForFile(ctx context.Context, filepath string) ([]
}
// Get diagnostics
- c.diagnosticsMu.RLock()
- diagnostics := c.diagnostics[documentURI]
- c.diagnosticsMu.RUnlock()
+ diagnostics, _ := c.diagnostics.Get(documentURI)
return diagnostics, nil
}
-// ClearDiagnosticsForURI removes diagnostics for a specific URI from the cache
+// ClearDiagnosticsForURI removes diagnostics for a specific URI from the cache.
func (c *Client) ClearDiagnosticsForURI(uri protocol.DocumentURI) {
- c.diagnosticsMu.Lock()
- defer c.diagnosticsMu.Unlock()
- delete(c.diagnostics, uri)
+ c.diagnostics.Del(uri)
+}
+
+// RegisterNotificationHandler registers a notification handler.
+func (c *Client) RegisterNotificationHandler(method string, handler transport.NotificationHandler) {
+ c.client.RegisterNotificationHandler(method, handler)
+}
+
+// RegisterServerRequestHandler handles server requests.
+func (c *Client) RegisterServerRequestHandler(method string, handler transport.Handler) {
+ c.client.RegisterHandler(method, handler)
+}
+
+// DidChangeWatchedFiles sends a workspace/didChangeWatchedFiles notification to the server.
+func (c *Client) DidChangeWatchedFiles(ctx context.Context, params protocol.DidChangeWatchedFilesParams) error {
+ return c.client.NotifyDidChangeWatchedFiles(ctx, params.Changes)
+}
+
+// openKeyConfigFiles opens important configuration files that help initialize the server.
+func (c *Client) openKeyConfigFiles(ctx context.Context) {
+ wd, err := os.Getwd()
+ if err != nil {
+ return
+ }
+
+ // Try to open each file, ignoring errors if they don't exist
+ for _, file := range c.config.RootMarkers {
+ file = filepath.Join(wd, file)
+ if _, err := os.Stat(file); err == nil {
+ // File exists, try to open it
+ if err := c.OpenFile(ctx, file); err != nil {
+ slog.Debug("Failed to open key config file", "file", file, "error", err)
+ } else {
+ slog.Debug("Opened key config file for initialization", "file", file)
+ }
+ }
+ }
+}
+
+// WaitForDiagnostics waits until diagnostics change or the timeout is reached.
+func (c *Client) WaitForDiagnostics(ctx context.Context, d time.Duration) {
+ ticker := time.NewTicker(200 * time.Millisecond)
+ defer ticker.Stop()
+ timeout := time.After(d)
+ pv := c.diagnostics.Version()
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case <-timeout:
+ return
+ case <-ticker.C:
+ if pv != c.diagnostics.Version() {
+ return
+ }
+ }
+ }
+}
+
+// HasRootMarkers checks if any of the specified root marker patterns exist in the given directory.
+// Uses glob patterns to match files, allowing for more flexible matching.
+func HasRootMarkers(dir string, rootMarkers []string) bool {
+ if len(rootMarkers) == 0 {
+ return true
+ }
+ for _, pattern := range rootMarkers {
+ // Use fsext.GlobWithDoubleStar to find matches
+ matches, _, err := fsext.GlobWithDoubleStar(pattern, dir, 1)
+ if err == nil && len(matches) > 0 {
+ return true
+ }
+ }
+ return false
}
@@ -0,0 +1,57 @@
+package lsp
+
+import (
+ "context"
+ "testing"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/env"
+)
+
+func TestClient(t *testing.T) {
+ ctx := context.Background()
+
+ // Create a simple config for testing
+ cfg := config.LSPConfig{
+ Command: "$THE_CMD", // Use echo as a dummy command that won't fail
+ Args: []string{"hello"},
+ FileTypes: []string{"go"},
+ Env: map[string]string{},
+ }
+
+ // Test creating a powernap client - this will likely fail with echo
+ // but we can still test the basic structure
+ client, err := New(ctx, "test", cfg, config.NewEnvironmentVariableResolver(env.NewFromMap(map[string]string{
+ "THE_CMD": "echo",
+ })))
+ if err != nil {
+ // Expected to fail with echo command, skip the rest
+ t.Skipf("Powernap client creation failed as expected with dummy command: %v", err)
+ return
+ }
+
+ // If we get here, test basic interface methods
+ if client.GetName() != "test" {
+ t.Errorf("Expected name 'test', got '%s'", client.GetName())
+ }
+
+ if !client.HandlesFile("test.go") {
+ t.Error("Expected client to handle .go files")
+ }
+
+ if client.HandlesFile("test.py") {
+ t.Error("Expected client to not handle .py files")
+ }
+
+ // Test server state
+ client.SetServerState(StateReady)
+ if client.GetServerState() != StateReady {
+ t.Error("Expected server state to be StateReady")
+ }
+
+ // Clean up - expect this to fail with echo command
+ if err := client.Close(t.Context()); err != nil {
+ // Expected to fail with echo command
+ t.Logf("Close failed as expected with dummy command: %v", err)
+ }
+}
@@ -1,22 +1,22 @@
package lsp
import (
+ "context"
"encoding/json"
"log/slog"
"github.com/charmbracelet/crush/internal/config"
-
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/lsp/util"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
-// Requests
-
-func HandleWorkspaceConfiguration(params json.RawMessage) (any, error) {
+// HandleWorkspaceConfiguration handles workspace configuration requests
+func HandleWorkspaceConfiguration(_ context.Context, _ string, params json.RawMessage) (any, error) {
return []map[string]any{{}}, nil
}
-func HandleRegisterCapability(params json.RawMessage) (any, error) {
+// HandleRegisterCapability handles capability registration requests
+func HandleRegisterCapability(_ context.Context, _ string, params json.RawMessage) (any, error) {
var registerParams protocol.RegistrationParams
if err := json.Unmarshal(params, ®isterParams); err != nil {
slog.Error("Error unmarshaling registration params", "error", err)
@@ -32,22 +32,20 @@ func HandleRegisterCapability(params json.RawMessage) (any, error) {
slog.Error("Error marshaling registration options", "error", err)
continue
}
-
var options protocol.DidChangeWatchedFilesRegistrationOptions
if err := json.Unmarshal(optionsJSON, &options); err != nil {
slog.Error("Error unmarshaling registration options", "error", err)
continue
}
-
// Store the file watchers registrations
notifyFileWatchRegistration(reg.ID, options.Watchers)
}
}
-
return nil, nil
}
-func HandleApplyEdit(params json.RawMessage) (any, error) {
+// HandleApplyEdit handles workspace edit requests
+func HandleApplyEdit(_ context.Context, _ string, params json.RawMessage) (any, error) {
var edit protocol.ApplyWorkspaceEditParams
if err := json.Unmarshal(params, &edit); err != nil {
return nil, err
@@ -80,21 +78,32 @@ func notifyFileWatchRegistration(id string, watchers []protocol.FileSystemWatche
}
}
-// Notifications
-
-func HandleServerMessage(params json.RawMessage) {
+// HandleServerMessage handles server messages
+func HandleServerMessage(_ context.Context, method string, params json.RawMessage) {
cfg := config.Get()
- var msg struct {
- Type int `json:"type"`
- Message string `json:"message"`
+ if !cfg.Options.DebugLSP {
+ return
}
- if err := json.Unmarshal(params, &msg); err == nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Server message", "type", msg.Type, "message", msg.Message)
- }
+
+ var msg protocol.ShowMessageParams
+ if err := json.Unmarshal(params, &msg); err != nil {
+ slog.Debug("Server message", "type", msg.Type, "message", msg.Message)
+ return
+ }
+
+ switch msg.Type {
+ case protocol.Error:
+ slog.Error("LSP Server", "message", msg.Message)
+ case protocol.Warning:
+ slog.Warn("LSP Server", "message", msg.Message)
+ case protocol.Info:
+ slog.Info("LSP Server", "message", msg.Message)
+ case protocol.Log:
+ slog.Debug("LSP Server", "message", msg.Message)
}
}
+// HandleDiagnostics handles diagnostic notifications from the LSP server
func HandleDiagnostics(client *Client, params json.RawMessage) {
var diagParams protocol.PublishDiagnosticsParams
if err := json.Unmarshal(params, &diagParams); err != nil {
@@ -102,8 +111,16 @@ func HandleDiagnostics(client *Client, params json.RawMessage) {
return
}
- client.diagnosticsMu.Lock()
- defer client.diagnosticsMu.Unlock()
+ client.diagnostics.Set(diagParams.URI, diagParams.Diagnostics)
- client.diagnostics[diagParams.URI] = diagParams.Diagnostics
+ // Calculate total diagnostic count
+ totalCount := 0
+ for _, diagnostics := range client.diagnostics.Seq2() {
+ totalCount += len(diagnostics)
+ }
+
+ // Trigger callback if set
+ if client.onDiagnosticsChanged != nil {
+ client.onDiagnosticsChanged(client.name, totalCount)
+ }
}
@@ -4,7 +4,7 @@ import (
"path/filepath"
"strings"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
func DetectLanguageID(uri string) protocol.LanguageKind {
@@ -1,554 +0,0 @@
-// Generated code. Do not edit
-package lsp
-
-import (
- "context"
-
- "github.com/charmbracelet/crush/internal/lsp/protocol"
-)
-
-// Implementation sends a textDocument/implementation request to the LSP server.
-// A request to resolve the implementation locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
-func (c *Client) Implementation(ctx context.Context, params protocol.ImplementationParams) (protocol.Or_Result_textDocument_implementation, error) {
- var result protocol.Or_Result_textDocument_implementation
- err := c.Call(ctx, "textDocument/implementation", params, &result)
- return result, err
-}
-
-// TypeDefinition sends a textDocument/typeDefinition request to the LSP server.
-// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
-func (c *Client) TypeDefinition(ctx context.Context, params protocol.TypeDefinitionParams) (protocol.Or_Result_textDocument_typeDefinition, error) {
- var result protocol.Or_Result_textDocument_typeDefinition
- err := c.Call(ctx, "textDocument/typeDefinition", params, &result)
- return result, err
-}
-
-// DocumentColor sends a textDocument/documentColor request to the LSP server.
-// A request to list all color symbols found in a given text document. The request's parameter is of type DocumentColorParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
-func (c *Client) DocumentColor(ctx context.Context, params protocol.DocumentColorParams) ([]protocol.ColorInformation, error) {
- var result []protocol.ColorInformation
- err := c.Call(ctx, "textDocument/documentColor", params, &result)
- return result, err
-}
-
-// ColorPresentation sends a textDocument/colorPresentation request to the LSP server.
-// A request to list all presentation for a color. The request's parameter is of type ColorPresentationParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
-func (c *Client) ColorPresentation(ctx context.Context, params protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) {
- var result []protocol.ColorPresentation
- err := c.Call(ctx, "textDocument/colorPresentation", params, &result)
- return result, err
-}
-
-// FoldingRange sends a textDocument/foldingRange request to the LSP server.
-// A request to provide folding ranges in a document. The request's parameter is of type FoldingRangeParams, the response is of type FoldingRangeList or a Thenable that resolves to such.
-func (c *Client) FoldingRange(ctx context.Context, params protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) {
- var result []protocol.FoldingRange
- err := c.Call(ctx, "textDocument/foldingRange", params, &result)
- return result, err
-}
-
-// Declaration sends a textDocument/declaration request to the LSP server.
-// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Declaration or a typed array of DeclarationLink or a Thenable that resolves to such.
-func (c *Client) Declaration(ctx context.Context, params protocol.DeclarationParams) (protocol.Or_Result_textDocument_declaration, error) {
- var result protocol.Or_Result_textDocument_declaration
- err := c.Call(ctx, "textDocument/declaration", params, &result)
- return result, err
-}
-
-// SelectionRange sends a textDocument/selectionRange request to the LSP server.
-// A request to provide selection ranges in a document. The request's parameter is of type SelectionRangeParams, the response is of type SelectionRange SelectionRange[] or a Thenable that resolves to such.
-func (c *Client) SelectionRange(ctx context.Context, params protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) {
- var result []protocol.SelectionRange
- err := c.Call(ctx, "textDocument/selectionRange", params, &result)
- return result, err
-}
-
-// PrepareCallHierarchy sends a textDocument/prepareCallHierarchy request to the LSP server.
-// A request to result a CallHierarchyItem in a document at a given position. Can be used as an input to an incoming or outgoing call hierarchy. Since 3.16.0
-func (c *Client) PrepareCallHierarchy(ctx context.Context, params protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) {
- var result []protocol.CallHierarchyItem
- err := c.Call(ctx, "textDocument/prepareCallHierarchy", params, &result)
- return result, err
-}
-
-// IncomingCalls sends a callHierarchy/incomingCalls request to the LSP server.
-// A request to resolve the incoming calls for a given CallHierarchyItem. Since 3.16.0
-func (c *Client) IncomingCalls(ctx context.Context, params protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) {
- var result []protocol.CallHierarchyIncomingCall
- err := c.Call(ctx, "callHierarchy/incomingCalls", params, &result)
- return result, err
-}
-
-// OutgoingCalls sends a callHierarchy/outgoingCalls request to the LSP server.
-// A request to resolve the outgoing calls for a given CallHierarchyItem. Since 3.16.0
-func (c *Client) OutgoingCalls(ctx context.Context, params protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) {
- var result []protocol.CallHierarchyOutgoingCall
- err := c.Call(ctx, "callHierarchy/outgoingCalls", params, &result)
- return result, err
-}
-
-// SemanticTokensFull sends a textDocument/semanticTokens/full request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensFull(ctx context.Context, params protocol.SemanticTokensParams) (protocol.SemanticTokens, error) {
- var result protocol.SemanticTokens
- err := c.Call(ctx, "textDocument/semanticTokens/full", params, &result)
- return result, err
-}
-
-// SemanticTokensFullDelta sends a textDocument/semanticTokens/full/delta request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensFullDelta(ctx context.Context, params protocol.SemanticTokensDeltaParams) (protocol.Or_Result_textDocument_semanticTokens_full_delta, error) {
- var result protocol.Or_Result_textDocument_semanticTokens_full_delta
- err := c.Call(ctx, "textDocument/semanticTokens/full/delta", params, &result)
- return result, err
-}
-
-// SemanticTokensRange sends a textDocument/semanticTokens/range request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensRange(ctx context.Context, params protocol.SemanticTokensRangeParams) (protocol.SemanticTokens, error) {
- var result protocol.SemanticTokens
- err := c.Call(ctx, "textDocument/semanticTokens/range", params, &result)
- return result, err
-}
-
-// LinkedEditingRange sends a textDocument/linkedEditingRange request to the LSP server.
-// A request to provide ranges that can be edited together. Since 3.16.0
-func (c *Client) LinkedEditingRange(ctx context.Context, params protocol.LinkedEditingRangeParams) (protocol.LinkedEditingRanges, error) {
- var result protocol.LinkedEditingRanges
- err := c.Call(ctx, "textDocument/linkedEditingRange", params, &result)
- return result, err
-}
-
-// WillCreateFiles sends a workspace/willCreateFiles request to the LSP server.
-// The will create files request is sent from the client to the server before files are actually created as long as the creation is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are created. Hence the WorkspaceEdit can not manipulate the content of the file to be created. Since 3.16.0
-func (c *Client) WillCreateFiles(ctx context.Context, params protocol.CreateFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willCreateFiles", params, &result)
- return result, err
-}
-
-// WillRenameFiles sends a workspace/willRenameFiles request to the LSP server.
-// The will rename files request is sent from the client to the server before files are actually renamed as long as the rename is triggered from within the client. Since 3.16.0
-func (c *Client) WillRenameFiles(ctx context.Context, params protocol.RenameFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willRenameFiles", params, &result)
- return result, err
-}
-
-// WillDeleteFiles sends a workspace/willDeleteFiles request to the LSP server.
-// The did delete files notification is sent from the client to the server when files were deleted from within the client. Since 3.16.0
-func (c *Client) WillDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willDeleteFiles", params, &result)
- return result, err
-}
-
-// Moniker sends a textDocument/moniker request to the LSP server.
-// A request to get the moniker of a symbol at a given text document position. The request parameter is of type TextDocumentPositionParams. The response is of type Moniker Moniker[] or null.
-func (c *Client) Moniker(ctx context.Context, params protocol.MonikerParams) ([]protocol.Moniker, error) {
- var result []protocol.Moniker
- err := c.Call(ctx, "textDocument/moniker", params, &result)
- return result, err
-}
-
-// PrepareTypeHierarchy sends a textDocument/prepareTypeHierarchy request to the LSP server.
-// A request to result a TypeHierarchyItem in a document at a given position. Can be used as an input to a subtypes or supertypes type hierarchy. Since 3.17.0
-func (c *Client) PrepareTypeHierarchy(ctx context.Context, params protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "textDocument/prepareTypeHierarchy", params, &result)
- return result, err
-}
-
-// Supertypes sends a typeHierarchy/supertypes request to the LSP server.
-// A request to resolve the supertypes for a given TypeHierarchyItem. Since 3.17.0
-func (c *Client) Supertypes(ctx context.Context, params protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "typeHierarchy/supertypes", params, &result)
- return result, err
-}
-
-// Subtypes sends a typeHierarchy/subtypes request to the LSP server.
-// A request to resolve the subtypes for a given TypeHierarchyItem. Since 3.17.0
-func (c *Client) Subtypes(ctx context.Context, params protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "typeHierarchy/subtypes", params, &result)
- return result, err
-}
-
-// InlineValue sends a textDocument/inlineValue request to the LSP server.
-// A request to provide inline values in a document. The request's parameter is of type InlineValueParams, the response is of type InlineValue InlineValue[] or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) InlineValue(ctx context.Context, params protocol.InlineValueParams) ([]protocol.InlineValue, error) {
- var result []protocol.InlineValue
- err := c.Call(ctx, "textDocument/inlineValue", params, &result)
- return result, err
-}
-
-// InlayHint sends a textDocument/inlayHint request to the LSP server.
-// A request to provide inlay hints in a document. The request's parameter is of type InlayHintsParams, the response is of type InlayHint InlayHint[] or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) InlayHint(ctx context.Context, params protocol.InlayHintParams) ([]protocol.InlayHint, error) {
- var result []protocol.InlayHint
- err := c.Call(ctx, "textDocument/inlayHint", params, &result)
- return result, err
-}
-
-// Resolve sends a inlayHint/resolve request to the LSP server.
-// A request to resolve additional properties for an inlay hint. The request's parameter is of type InlayHint, the response is of type InlayHint or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) Resolve(ctx context.Context, params protocol.InlayHint) (protocol.InlayHint, error) {
- var result protocol.InlayHint
- err := c.Call(ctx, "inlayHint/resolve", params, &result)
- return result, err
-}
-
-// Diagnostic sends a textDocument/diagnostic request to the LSP server.
-// The document diagnostic request definition. Since 3.17.0
-func (c *Client) Diagnostic(ctx context.Context, params protocol.DocumentDiagnosticParams) (protocol.DocumentDiagnosticReport, error) {
- var result protocol.DocumentDiagnosticReport
- err := c.Call(ctx, "textDocument/diagnostic", params, &result)
- return result, err
-}
-
-// DiagnosticWorkspace sends a workspace/diagnostic request to the LSP server.
-// The workspace diagnostic request definition. Since 3.17.0
-func (c *Client) DiagnosticWorkspace(ctx context.Context, params protocol.WorkspaceDiagnosticParams) (protocol.WorkspaceDiagnosticReport, error) {
- var result protocol.WorkspaceDiagnosticReport
- err := c.Call(ctx, "workspace/diagnostic", params, &result)
- return result, err
-}
-
-// InlineCompletion sends a textDocument/inlineCompletion request to the LSP server.
-// A request to provide inline completions in a document. The request's parameter is of type InlineCompletionParams, the response is of type InlineCompletion InlineCompletion[] or a Thenable that resolves to such. Since 3.18.0 PROPOSED
-func (c *Client) InlineCompletion(ctx context.Context, params protocol.InlineCompletionParams) (protocol.Or_Result_textDocument_inlineCompletion, error) {
- var result protocol.Or_Result_textDocument_inlineCompletion
- err := c.Call(ctx, "textDocument/inlineCompletion", params, &result)
- return result, err
-}
-
-// TextDocumentContent sends a workspace/textDocumentContent request to the LSP server.
-// The workspace/textDocumentContent request is sent from the client to the server to request the content of a text document. Since 3.18.0 PROPOSED
-func (c *Client) TextDocumentContent(ctx context.Context, params protocol.TextDocumentContentParams) (string, error) {
- var result string
- err := c.Call(ctx, "workspace/textDocumentContent", params, &result)
- return result, err
-}
-
-// Initialize sends a initialize request to the LSP server.
-// The initialize request is sent from the client to the server. It is sent once as the request after starting up the server. The requests parameter is of type InitializeParams the response if of type InitializeResult of a Thenable that resolves to such.
-func (c *Client) Initialize(ctx context.Context, params protocol.ParamInitialize) (protocol.InitializeResult, error) {
- var result protocol.InitializeResult
- err := c.Call(ctx, "initialize", params, &result)
- return result, err
-}
-
-// Shutdown sends a shutdown request to the LSP server.
-// A shutdown request is sent from the client to the server. It is sent once when the client decides to shutdown the server. The only notification that is sent after a shutdown request is the exit event.
-func (c *Client) Shutdown(ctx context.Context) error {
- return c.Call(ctx, "shutdown", nil, nil)
-}
-
-// WillSaveWaitUntil sends a textDocument/willSaveWaitUntil request to the LSP server.
-// A document will save request is sent from the client to the server before the document is actually saved. The request can return an array of TextEdits which will be applied to the text document before it is saved. Please note that clients might drop results if computing the text edits took too long or if a server constantly fails on this request. This is done to keep the save fast and reliable.
-func (c *Client) WillSaveWaitUntil(ctx context.Context, params protocol.WillSaveTextDocumentParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/willSaveWaitUntil", params, &result)
- return result, err
-}
-
-// Completion sends a textDocument/completion request to the LSP server.
-// Request to request completion at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type CompletionItem CompletionItem[] or CompletionList or a Thenable that resolves to such. The request can delay the computation of the CompletionItem.detail detail and CompletionItem.documentation documentation properties to the completionItem/resolve request. However, properties that are needed for the initial sorting and filtering, like sortText, filterText, insertText, and textEdit, must not be changed during resolve.
-func (c *Client) Completion(ctx context.Context, params protocol.CompletionParams) (protocol.Or_Result_textDocument_completion, error) {
- var result protocol.Or_Result_textDocument_completion
- err := c.Call(ctx, "textDocument/completion", params, &result)
- return result, err
-}
-
-// ResolveCompletionItem sends a completionItem/resolve request to the LSP server.
-// Request to resolve additional information for a given completion item.The request's parameter is of type CompletionItem the response is of type CompletionItem or a Thenable that resolves to such.
-func (c *Client) ResolveCompletionItem(ctx context.Context, params protocol.CompletionItem) (protocol.CompletionItem, error) {
- var result protocol.CompletionItem
- err := c.Call(ctx, "completionItem/resolve", params, &result)
- return result, err
-}
-
-// Hover sends a textDocument/hover request to the LSP server.
-// Request to request hover information at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type Hover or a Thenable that resolves to such.
-func (c *Client) Hover(ctx context.Context, params protocol.HoverParams) (protocol.Hover, error) {
- var result protocol.Hover
- err := c.Call(ctx, "textDocument/hover", params, &result)
- return result, err
-}
-
-// SignatureHelp sends a textDocument/signatureHelp request to the LSP server.
-func (c *Client) SignatureHelp(ctx context.Context, params protocol.SignatureHelpParams) (protocol.SignatureHelp, error) {
- var result protocol.SignatureHelp
- err := c.Call(ctx, "textDocument/signatureHelp", params, &result)
- return result, err
-}
-
-// Definition sends a textDocument/definition request to the LSP server.
-// A request to resolve the definition location of a symbol at a given text document position. The request's parameter is of type TextDocumentPosition the response is of either type Definition or a typed array of DefinitionLink or a Thenable that resolves to such.
-func (c *Client) Definition(ctx context.Context, params protocol.DefinitionParams) (protocol.Or_Result_textDocument_definition, error) {
- var result protocol.Or_Result_textDocument_definition
- err := c.Call(ctx, "textDocument/definition", params, &result)
- return result, err
-}
-
-// References sends a textDocument/references request to the LSP server.
-// A request to resolve project-wide references for the symbol denoted by the given text document position. The request's parameter is of type ReferenceParams the response is of type Location Location[] or a Thenable that resolves to such.
-func (c *Client) References(ctx context.Context, params protocol.ReferenceParams) ([]protocol.Location, error) {
- var result []protocol.Location
- err := c.Call(ctx, "textDocument/references", params, &result)
- return result, err
-}
-
-// DocumentHighlight sends a textDocument/documentHighlight request to the LSP server.
-// Request to resolve a DocumentHighlight for a given text document position. The request's parameter is of type TextDocumentPosition the request response is an array of type DocumentHighlight or a Thenable that resolves to such.
-func (c *Client) DocumentHighlight(ctx context.Context, params protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) {
- var result []protocol.DocumentHighlight
- err := c.Call(ctx, "textDocument/documentHighlight", params, &result)
- return result, err
-}
-
-// DocumentSymbol sends a textDocument/documentSymbol request to the LSP server.
-// A request to list all symbols found in a given text document. The request's parameter is of type TextDocumentIdentifier the response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such.
-func (c *Client) DocumentSymbol(ctx context.Context, params protocol.DocumentSymbolParams) (protocol.Or_Result_textDocument_documentSymbol, error) {
- var result protocol.Or_Result_textDocument_documentSymbol
- err := c.Call(ctx, "textDocument/documentSymbol", params, &result)
- return result, err
-}
-
-// CodeAction sends a textDocument/codeAction request to the LSP server.
-// A request to provide commands for the given text document and range.
-func (c *Client) CodeAction(ctx context.Context, params protocol.CodeActionParams) ([]protocol.Or_Result_textDocument_codeAction_Item0_Elem, error) {
- var result []protocol.Or_Result_textDocument_codeAction_Item0_Elem
- err := c.Call(ctx, "textDocument/codeAction", params, &result)
- return result, err
-}
-
-// ResolveCodeAction sends a codeAction/resolve request to the LSP server.
-// Request to resolve additional information for a given code action.The request's parameter is of type CodeAction the response is of type CodeAction or a Thenable that resolves to such.
-func (c *Client) ResolveCodeAction(ctx context.Context, params protocol.CodeAction) (protocol.CodeAction, error) {
- var result protocol.CodeAction
- err := c.Call(ctx, "codeAction/resolve", params, &result)
- return result, err
-}
-
-// Symbol sends a workspace/symbol request to the LSP server.
-// A request to list project-wide symbols matching the query string given by the WorkspaceSymbolParams. The response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such. Since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients need to advertise support for WorkspaceSymbols via the client capability workspace.symbol.resolveSupport.
-func (c *Client) Symbol(ctx context.Context, params protocol.WorkspaceSymbolParams) (protocol.Or_Result_workspace_symbol, error) {
- var result protocol.Or_Result_workspace_symbol
- err := c.Call(ctx, "workspace/symbol", params, &result)
- return result, err
-}
-
-// ResolveWorkspaceSymbol sends a workspaceSymbol/resolve request to the LSP server.
-// A request to resolve the range inside the workspace symbol's location. Since 3.17.0
-func (c *Client) ResolveWorkspaceSymbol(ctx context.Context, params protocol.WorkspaceSymbol) (protocol.WorkspaceSymbol, error) {
- var result protocol.WorkspaceSymbol
- err := c.Call(ctx, "workspaceSymbol/resolve", params, &result)
- return result, err
-}
-
-// CodeLens sends a textDocument/codeLens request to the LSP server.
-// A request to provide code lens for the given text document.
-func (c *Client) CodeLens(ctx context.Context, params protocol.CodeLensParams) ([]protocol.CodeLens, error) {
- var result []protocol.CodeLens
- err := c.Call(ctx, "textDocument/codeLens", params, &result)
- return result, err
-}
-
-// ResolveCodeLens sends a codeLens/resolve request to the LSP server.
-// A request to resolve a command for a given code lens.
-func (c *Client) ResolveCodeLens(ctx context.Context, params protocol.CodeLens) (protocol.CodeLens, error) {
- var result protocol.CodeLens
- err := c.Call(ctx, "codeLens/resolve", params, &result)
- return result, err
-}
-
-// DocumentLink sends a textDocument/documentLink request to the LSP server.
-// A request to provide document links
-func (c *Client) DocumentLink(ctx context.Context, params protocol.DocumentLinkParams) ([]protocol.DocumentLink, error) {
- var result []protocol.DocumentLink
- err := c.Call(ctx, "textDocument/documentLink", params, &result)
- return result, err
-}
-
-// ResolveDocumentLink sends a documentLink/resolve request to the LSP server.
-// Request to resolve additional information for a given document link. The request's parameter is of type DocumentLink the response is of type DocumentLink or a Thenable that resolves to such.
-func (c *Client) ResolveDocumentLink(ctx context.Context, params protocol.DocumentLink) (protocol.DocumentLink, error) {
- var result protocol.DocumentLink
- err := c.Call(ctx, "documentLink/resolve", params, &result)
- return result, err
-}
-
-// Formatting sends a textDocument/formatting request to the LSP server.
-// A request to format a whole document.
-func (c *Client) Formatting(ctx context.Context, params protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/formatting", params, &result)
- return result, err
-}
-
-// RangeFormatting sends a textDocument/rangeFormatting request to the LSP server.
-// A request to format a range in a document.
-func (c *Client) RangeFormatting(ctx context.Context, params protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/rangeFormatting", params, &result)
- return result, err
-}
-
-// RangesFormatting sends a textDocument/rangesFormatting request to the LSP server.
-// A request to format ranges in a document. Since 3.18.0 PROPOSED
-func (c *Client) RangesFormatting(ctx context.Context, params protocol.DocumentRangesFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/rangesFormatting", params, &result)
- return result, err
-}
-
-// OnTypeFormatting sends a textDocument/onTypeFormatting request to the LSP server.
-// A request to format a document on type.
-func (c *Client) OnTypeFormatting(ctx context.Context, params protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/onTypeFormatting", params, &result)
- return result, err
-}
-
-// Rename sends a textDocument/rename request to the LSP server.
-// A request to rename a symbol.
-func (c *Client) Rename(ctx context.Context, params protocol.RenameParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "textDocument/rename", params, &result)
- return result, err
-}
-
-// PrepareRename sends a textDocument/prepareRename request to the LSP server.
-// A request to test and perform the setup necessary for a rename. Since 3.16 - support for default behavior
-func (c *Client) PrepareRename(ctx context.Context, params protocol.PrepareRenameParams) (protocol.PrepareRenameResult, error) {
- var result protocol.PrepareRenameResult
- err := c.Call(ctx, "textDocument/prepareRename", params, &result)
- return result, err
-}
-
-// ExecuteCommand sends a workspace/executeCommand request to the LSP server.
-// A request send from the client to the server to execute a command. The request might return a workspace edit which the client will apply to the workspace.
-func (c *Client) ExecuteCommand(ctx context.Context, params protocol.ExecuteCommandParams) (any, error) {
- var result any
- err := c.Call(ctx, "workspace/executeCommand", params, &result)
- return result, err
-}
-
-// DidChangeWorkspaceFolders sends a workspace/didChangeWorkspaceFolders notification to the LSP server.
-// The workspace/didChangeWorkspaceFolders notification is sent from the client to the server when the workspace folder configuration changes.
-func (c *Client) DidChangeWorkspaceFolders(ctx context.Context, params protocol.DidChangeWorkspaceFoldersParams) error {
- return c.Notify(ctx, "workspace/didChangeWorkspaceFolders", params)
-}
-
-// WorkDoneProgressCancel sends a window/workDoneProgress/cancel notification to the LSP server.
-// The window/workDoneProgress/cancel notification is sent from the client to the server to cancel a progress initiated on the server side.
-func (c *Client) WorkDoneProgressCancel(ctx context.Context, params protocol.WorkDoneProgressCancelParams) error {
- return c.Notify(ctx, "window/workDoneProgress/cancel", params)
-}
-
-// DidCreateFiles sends a workspace/didCreateFiles notification to the LSP server.
-// The did create files notification is sent from the client to the server when files were created from within the client. Since 3.16.0
-func (c *Client) DidCreateFiles(ctx context.Context, params protocol.CreateFilesParams) error {
- return c.Notify(ctx, "workspace/didCreateFiles", params)
-}
-
-// DidRenameFiles sends a workspace/didRenameFiles notification to the LSP server.
-// The did rename files notification is sent from the client to the server when files were renamed from within the client. Since 3.16.0
-func (c *Client) DidRenameFiles(ctx context.Context, params protocol.RenameFilesParams) error {
- return c.Notify(ctx, "workspace/didRenameFiles", params)
-}
-
-// DidDeleteFiles sends a workspace/didDeleteFiles notification to the LSP server.
-// The will delete files request is sent from the client to the server before files are actually deleted as long as the deletion is triggered from within the client. Since 3.16.0
-func (c *Client) DidDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) error {
- return c.Notify(ctx, "workspace/didDeleteFiles", params)
-}
-
-// DidOpenNotebookDocument sends a notebookDocument/didOpen notification to the LSP server.
-// A notification sent when a notebook opens. Since 3.17.0
-func (c *Client) DidOpenNotebookDocument(ctx context.Context, params protocol.DidOpenNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didOpen", params)
-}
-
-// DidChangeNotebookDocument sends a notebookDocument/didChange notification to the LSP server.
-func (c *Client) DidChangeNotebookDocument(ctx context.Context, params protocol.DidChangeNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didChange", params)
-}
-
-// DidSaveNotebookDocument sends a notebookDocument/didSave notification to the LSP server.
-// A notification sent when a notebook document is saved. Since 3.17.0
-func (c *Client) DidSaveNotebookDocument(ctx context.Context, params protocol.DidSaveNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didSave", params)
-}
-
-// DidCloseNotebookDocument sends a notebookDocument/didClose notification to the LSP server.
-// A notification sent when a notebook closes. Since 3.17.0
-func (c *Client) DidCloseNotebookDocument(ctx context.Context, params protocol.DidCloseNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didClose", params)
-}
-
-// Initialized sends a initialized notification to the LSP server.
-// The initialized notification is sent from the client to the server after the client is fully initialized and the server is allowed to send requests from the server to the client.
-func (c *Client) Initialized(ctx context.Context, params protocol.InitializedParams) error {
- return c.Notify(ctx, "initialized", params)
-}
-
-// Exit sends a exit notification to the LSP server.
-// The exit event is sent from the client to the server to ask the server to exit its process.
-func (c *Client) Exit(ctx context.Context) error {
- return c.Notify(ctx, "exit", nil)
-}
-
-// DidChangeConfiguration sends a workspace/didChangeConfiguration notification to the LSP server.
-// The configuration change notification is sent from the client to the server when the client's configuration has changed. The notification contains the changed configuration as defined by the language client.
-func (c *Client) DidChangeConfiguration(ctx context.Context, params protocol.DidChangeConfigurationParams) error {
- return c.Notify(ctx, "workspace/didChangeConfiguration", params)
-}
-
-// DidOpen sends a textDocument/didOpen notification to the LSP server.
-// The document open notification is sent from the client to the server to signal newly opened text documents. The document's truth is now managed by the client and the server must not try to read the document's truth using the document's uri. Open in this sense means it is managed by the client. It doesn't necessarily mean that its content is presented in an editor. An open notification must not be sent more than once without a corresponding close notification send before. This means open and close notification must be balanced and the max open count is one.
-func (c *Client) DidOpen(ctx context.Context, params protocol.DidOpenTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didOpen", params)
-}
-
-// DidChange sends a textDocument/didChange notification to the LSP server.
-// The document change notification is sent from the client to the server to signal changes to a text document.
-func (c *Client) DidChange(ctx context.Context, params protocol.DidChangeTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didChange", params)
-}
-
-// DidClose sends a textDocument/didClose notification to the LSP server.
-// The document close notification is sent from the client to the server when the document got closed in the client. The document's truth now exists where the document's uri points to (e.g. if the document's uri is a file uri the truth now exists on disk). As with the open notification the close notification is about managing the document's content. Receiving a close notification doesn't mean that the document was open in an editor before. A close notification requires a previous open notification to be sent.
-func (c *Client) DidClose(ctx context.Context, params protocol.DidCloseTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didClose", params)
-}
-
-// DidSave sends a textDocument/didSave notification to the LSP server.
-// The document save notification is sent from the client to the server when the document got saved in the client.
-func (c *Client) DidSave(ctx context.Context, params protocol.DidSaveTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didSave", params)
-}
-
-// WillSave sends a textDocument/willSave notification to the LSP server.
-// A document will save notification is sent from the client to the server before the document is actually saved.
-func (c *Client) WillSave(ctx context.Context, params protocol.WillSaveTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/willSave", params)
-}
-
-// DidChangeWatchedFiles sends a workspace/didChangeWatchedFiles notification to the LSP server.
-// The watched files notification is sent from the client to the server when the client detects changes to file watched by the language client.
-func (c *Client) DidChangeWatchedFiles(ctx context.Context, params protocol.DidChangeWatchedFilesParams) error {
- return c.Notify(ctx, "workspace/didChangeWatchedFiles", params)
-}
-
-// SetTrace sends a $/setTrace notification to the LSP server.
-func (c *Client) SetTrace(ctx context.Context, params protocol.SetTraceParams) error {
- return c.Notify(ctx, "$/setTrace", params)
-}
-
-// Progress sends a $/progress notification to the LSP server.
-func (c *Client) Progress(ctx context.Context, params protocol.ProgressParams) error {
- return c.Notify(ctx, "$/progress", params)
-}
@@ -1,48 +0,0 @@
-package lsp
-
-import (
- "encoding/json"
-)
-
-// Message represents a JSON-RPC 2.0 message
-type Message struct {
- JSONRPC string `json:"jsonrpc"`
- ID int32 `json:"id,omitempty"`
- Method string `json:"method,omitempty"`
- Params json.RawMessage `json:"params,omitempty"`
- Result json.RawMessage `json:"result,omitempty"`
- Error *ResponseError `json:"error,omitempty"`
-}
-
-// ResponseError represents a JSON-RPC 2.0 error
-type ResponseError struct {
- Code int `json:"code"`
- Message string `json:"message"`
-}
-
-func NewRequest(id int32, method string, params any) (*Message, error) {
- paramsJSON, err := json.Marshal(params)
- if err != nil {
- return nil, err
- }
-
- return &Message{
- JSONRPC: "2.0",
- ID: id,
- Method: method,
- Params: paramsJSON,
- }, nil
-}
-
-func NewNotification(method string, params any) (*Message, error) {
- paramsJSON, err := json.Marshal(params)
- if err != nil {
- return nil, err
- }
-
- return &Message{
- JSONRPC: "2.0",
- Method: method,
- Params: paramsJSON,
- }, nil
-}
@@ -1,27 +0,0 @@
-Copyright 2009 The Go Authors.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google LLC nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -1,117 +0,0 @@
-package protocol
-
-import "fmt"
-
-// WorkspaceSymbolResult is an interface for types that represent workspace symbols
-type WorkspaceSymbolResult interface {
- GetName() string
- GetLocation() Location
- isWorkspaceSymbol() // marker method
-}
-
-func (ws *WorkspaceSymbol) GetName() string { return ws.Name }
-func (ws *WorkspaceSymbol) GetLocation() Location {
- switch v := ws.Location.Value.(type) {
- case Location:
- return v
- case LocationUriOnly:
- return Location{URI: v.URI}
- }
- return Location{}
-}
-func (ws *WorkspaceSymbol) isWorkspaceSymbol() {}
-
-func (si *SymbolInformation) GetName() string { return si.Name }
-func (si *SymbolInformation) GetLocation() Location { return si.Location }
-func (si *SymbolInformation) isWorkspaceSymbol() {}
-
-// Results converts the Value to a slice of WorkspaceSymbolResult
-func (r Or_Result_workspace_symbol) Results() ([]WorkspaceSymbolResult, error) {
- if r.Value == nil {
- return make([]WorkspaceSymbolResult, 0), nil
- }
- switch v := r.Value.(type) {
- case []WorkspaceSymbol:
- results := make([]WorkspaceSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- case []SymbolInformation:
- results := make([]WorkspaceSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- default:
- return nil, fmt.Errorf("unknown symbol type: %T", r.Value)
- }
-}
-
-// DocumentSymbolResult is an interface for types that represent document symbols
-type DocumentSymbolResult interface {
- GetRange() Range
- GetName() string
- isDocumentSymbol() // marker method
-}
-
-func (ds *DocumentSymbol) GetRange() Range { return ds.Range }
-func (ds *DocumentSymbol) GetName() string { return ds.Name }
-func (ds *DocumentSymbol) isDocumentSymbol() {}
-
-func (si *SymbolInformation) GetRange() Range { return si.Location.Range }
-
-// Note: SymbolInformation already has GetName() implemented above
-func (si *SymbolInformation) isDocumentSymbol() {}
-
-// Results converts the Value to a slice of DocumentSymbolResult
-func (r Or_Result_textDocument_documentSymbol) Results() ([]DocumentSymbolResult, error) {
- if r.Value == nil {
- return make([]DocumentSymbolResult, 0), nil
- }
- switch v := r.Value.(type) {
- case []DocumentSymbol:
- results := make([]DocumentSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- case []SymbolInformation:
- results := make([]DocumentSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- default:
- return nil, fmt.Errorf("unknown document symbol type: %T", v)
- }
-}
-
-// TextEditResult is an interface for types that can be used as text edits
-type TextEditResult interface {
- GetRange() Range
- GetNewText() string
- isTextEdit() // marker method
-}
-
-func (te *TextEdit) GetRange() Range { return te.Range }
-func (te *TextEdit) GetNewText() string { return te.NewText }
-func (te *TextEdit) isTextEdit() {}
-
-// AsTextEdit converts Or_TextDocumentEdit_edits_Elem to TextEdit
-func (e Or_TextDocumentEdit_edits_Elem) AsTextEdit() (TextEdit, error) {
- if e.Value == nil {
- return TextEdit{}, fmt.Errorf("nil text edit")
- }
- switch v := e.Value.(type) {
- case TextEdit:
- return v, nil
- case AnnotatedTextEdit:
- return TextEdit{
- Range: v.Range,
- NewText: v.NewText,
- }, nil
- default:
- return TextEdit{}, fmt.Errorf("unknown text edit type: %T", e.Value)
- }
-}
@@ -1,73 +0,0 @@
-package protocol
-
-import (
- "fmt"
- "log/slog"
-)
-
-// PatternInfo is an interface for types that represent glob patterns
-type PatternInfo interface {
- GetPattern() string
- GetBasePath() string
- isPattern() // marker method
-}
-
-// StringPattern implements PatternInfo for string patterns
-type StringPattern struct {
- Pattern string
-}
-
-func (p StringPattern) GetPattern() string { return p.Pattern }
-func (p StringPattern) GetBasePath() string { return "" }
-func (p StringPattern) isPattern() {}
-
-// RelativePatternInfo implements PatternInfo for RelativePattern
-type RelativePatternInfo struct {
- RP RelativePattern
- BasePath string
-}
-
-func (p RelativePatternInfo) GetPattern() string { return string(p.RP.Pattern) }
-func (p RelativePatternInfo) GetBasePath() string { return p.BasePath }
-func (p RelativePatternInfo) isPattern() {}
-
-// AsPattern converts GlobPattern to a PatternInfo object
-func (g *GlobPattern) AsPattern() (PatternInfo, error) {
- if g.Value == nil {
- return nil, fmt.Errorf("nil pattern")
- }
-
- var err error
-
- switch v := g.Value.(type) {
- case string:
- return StringPattern{Pattern: v}, nil
-
- case RelativePattern:
- // Handle BaseURI which could be string or DocumentUri
- basePath := ""
- switch baseURI := v.BaseURI.Value.(type) {
- case string:
- basePath, err = DocumentURI(baseURI).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path", "uri", baseURI, "error", err)
- return nil, fmt.Errorf("invalid URI: %s", baseURI)
- }
-
- case DocumentURI:
- basePath, err = baseURI.Path()
- if err != nil {
- slog.Error("Failed to convert DocumentURI to path", "uri", baseURI, "error", err)
- return nil, fmt.Errorf("invalid DocumentURI: %s", baseURI)
- }
-
- default:
- return nil, fmt.Errorf("unknown BaseURI type: %T", v.BaseURI.Value)
- }
-
- return RelativePatternInfo{RP: v, BasePath: basePath}, nil
-
- default:
- return nil, fmt.Errorf("unknown pattern type: %T", g.Value)
- }
-}
@@ -1,30 +0,0 @@
-package protocol
-
-var TableKindMap = map[SymbolKind]string{
- File: "File",
- Module: "Module",
- Namespace: "Namespace",
- Package: "Package",
- Class: "Class",
- Method: "Method",
- Property: "Property",
- Field: "Field",
- Constructor: "Constructor",
- Enum: "Enum",
- Interface: "Interface",
- Function: "Function",
- Variable: "Variable",
- Constant: "Constant",
- String: "String",
- Number: "Number",
- Boolean: "Boolean",
- Array: "Array",
- Object: "Object",
- Key: "Key",
- Null: "Null",
- EnumMember: "EnumMember",
- Struct: "Struct",
- Event: "Event",
- Operator: "Operator",
- TypeParameter: "TypeParameter",
-}
@@ -1,81 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package protocol
-
-import (
- "encoding/json"
- "fmt"
-)
-
-// DocumentChange is a union of various file edit operations.
-//
-// Exactly one field of this struct is non-nil; see [DocumentChange.Valid].
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#resourceChanges
-type DocumentChange struct {
- TextDocumentEdit *TextDocumentEdit
- CreateFile *CreateFile
- RenameFile *RenameFile
- DeleteFile *DeleteFile
-}
-
-// Valid reports whether the DocumentChange sum-type value is valid,
-// that is, exactly one of create, delete, edit, or rename.
-func (d DocumentChange) Valid() bool {
- n := 0
- if d.TextDocumentEdit != nil {
- n++
- }
- if d.CreateFile != nil {
- n++
- }
- if d.RenameFile != nil {
- n++
- }
- if d.DeleteFile != nil {
- n++
- }
- return n == 1
-}
-
-func (d *DocumentChange) UnmarshalJSON(data []byte) error {
- var m map[string]any
- if err := json.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if _, ok := m["textDocument"]; ok {
- d.TextDocumentEdit = new(TextDocumentEdit)
- return json.Unmarshal(data, d.TextDocumentEdit)
- }
-
- // The {Create,Rename,Delete}File types all share a 'kind' field.
- kind := m["kind"]
- switch kind {
- case "create":
- d.CreateFile = new(CreateFile)
- return json.Unmarshal(data, d.CreateFile)
- case "rename":
- d.RenameFile = new(RenameFile)
- return json.Unmarshal(data, d.RenameFile)
- case "delete":
- d.DeleteFile = new(DeleteFile)
- return json.Unmarshal(data, d.DeleteFile)
- }
- return fmt.Errorf("DocumentChanges: unexpected kind: %q", kind)
-}
-
-func (d *DocumentChange) MarshalJSON() ([]byte, error) {
- if d.TextDocumentEdit != nil {
- return json.Marshal(d.TextDocumentEdit)
- } else if d.CreateFile != nil {
- return json.Marshal(d.CreateFile)
- } else if d.RenameFile != nil {
- return json.Marshal(d.RenameFile)
- } else if d.DeleteFile != nil {
- return json.Marshal(d.DeleteFile)
- }
- return nil, fmt.Errorf("empty DocumentChanges union value")
-}
@@ -1,3072 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Code generated for LSP. DO NOT EDIT.
-
-package protocol
-
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.9 (hash c94395b5da53729e6dff931293b051009ccaaaa4).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.9/protocol/metaModel.json
-// LSP metaData.version = 3.17.0.
-
-import "bytes"
-import "encoding/json"
-
-import "fmt"
-
-// UnmarshalError indicates that a JSON value did not conform to
-// one of the expected cases of an LSP union type.
-type UnmarshalError struct {
- msg string
-}
-
-func (e UnmarshalError) Error() string {
- return e.msg
-}
-func (t Or_CancelParams_id) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_CancelParams_id) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder41 := json.NewDecoder(bytes.NewReader(x))
- decoder41.DisallowUnknownFields()
- var int32Val int32
- if err := decoder41.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder42 := json.NewDecoder(bytes.NewReader(x))
- decoder42.DisallowUnknownFields()
- var stringVal string
- if err := decoder42.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_ClientSemanticTokensRequestOptions_full) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ClientSemanticTokensRequestFullDelta:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ClientSemanticTokensRequestFullDelta bool]", t)
-}
-
-func (t *Or_ClientSemanticTokensRequestOptions_full) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder220 := json.NewDecoder(bytes.NewReader(x))
- decoder220.DisallowUnknownFields()
- var boolVal bool
- if err := decoder220.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder221 := json.NewDecoder(bytes.NewReader(x))
- decoder221.DisallowUnknownFields()
- var h221 ClientSemanticTokensRequestFullDelta
- if err := decoder221.Decode(&h221); err == nil {
- t.Value = h221
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ClientSemanticTokensRequestFullDelta bool]"}
-}
-
-func (t Or_ClientSemanticTokensRequestOptions_range) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Lit_ClientSemanticTokensRequestOptions_range_Item1:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]", t)
-}
-
-func (t *Or_ClientSemanticTokensRequestOptions_range) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder217 := json.NewDecoder(bytes.NewReader(x))
- decoder217.DisallowUnknownFields()
- var boolVal bool
- if err := decoder217.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder218 := json.NewDecoder(bytes.NewReader(x))
- decoder218.DisallowUnknownFields()
- var h218 Lit_ClientSemanticTokensRequestOptions_range_Item1
- if err := decoder218.Decode(&h218); err == nil {
- t.Value = h218
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]"}
-}
-
-func (t Or_CompletionItemDefaults_editRange) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case EditRangeWithInsertReplace:
- return json.Marshal(x)
- case Range:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [EditRangeWithInsertReplace Range]", t)
-}
-
-func (t *Or_CompletionItemDefaults_editRange) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder183 := json.NewDecoder(bytes.NewReader(x))
- decoder183.DisallowUnknownFields()
- var h183 EditRangeWithInsertReplace
- if err := decoder183.Decode(&h183); err == nil {
- t.Value = h183
- return nil
- }
- decoder184 := json.NewDecoder(bytes.NewReader(x))
- decoder184.DisallowUnknownFields()
- var h184 Range
- if err := decoder184.Decode(&h184); err == nil {
- t.Value = h184
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [EditRangeWithInsertReplace Range]"}
-}
-
-func (t Or_CompletionItem_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_CompletionItem_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder25 := json.NewDecoder(bytes.NewReader(x))
- decoder25.DisallowUnknownFields()
- var stringVal string
- if err := decoder25.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder26 := json.NewDecoder(bytes.NewReader(x))
- decoder26.DisallowUnknownFields()
- var h26 MarkupContent
- if err := decoder26.Decode(&h26); err == nil {
- t.Value = h26
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_CompletionItem_textEdit) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InsertReplaceEdit:
- return json.Marshal(x)
- case TextEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InsertReplaceEdit TextEdit]", t)
-}
-
-func (t *Or_CompletionItem_textEdit) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder29 := json.NewDecoder(bytes.NewReader(x))
- decoder29.DisallowUnknownFields()
- var h29 InsertReplaceEdit
- if err := decoder29.Decode(&h29); err == nil {
- t.Value = h29
- return nil
- }
- decoder30 := json.NewDecoder(bytes.NewReader(x))
- decoder30.DisallowUnknownFields()
- var h30 TextEdit
- if err := decoder30.Decode(&h30); err == nil {
- t.Value = h30
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InsertReplaceEdit TextEdit]"}
-}
-
-func (t Or_Declaration) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case []Location:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location []Location]", t)
-}
-
-func (t *Or_Declaration) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder237 := json.NewDecoder(bytes.NewReader(x))
- decoder237.DisallowUnknownFields()
- var h237 Location
- if err := decoder237.Decode(&h237); err == nil {
- t.Value = h237
- return nil
- }
- decoder238 := json.NewDecoder(bytes.NewReader(x))
- decoder238.DisallowUnknownFields()
- var h238 []Location
- if err := decoder238.Decode(&h238); err == nil {
- t.Value = h238
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"}
-}
-
-func (t Or_Definition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case []Location:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location []Location]", t)
-}
-
-func (t *Or_Definition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder224 := json.NewDecoder(bytes.NewReader(x))
- decoder224.DisallowUnknownFields()
- var h224 Location
- if err := decoder224.Decode(&h224); err == nil {
- t.Value = h224
- return nil
- }
- decoder225 := json.NewDecoder(bytes.NewReader(x))
- decoder225.DisallowUnknownFields()
- var h225 []Location
- if err := decoder225.Decode(&h225); err == nil {
- t.Value = h225
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"}
-}
-
-func (t Or_Diagnostic_code) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_Diagnostic_code) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder179 := json.NewDecoder(bytes.NewReader(x))
- decoder179.DisallowUnknownFields()
- var int32Val int32
- if err := decoder179.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder180 := json.NewDecoder(bytes.NewReader(x))
- decoder180.DisallowUnknownFields()
- var stringVal string
- if err := decoder180.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_DidChangeConfigurationRegistrationOptions_section) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []string:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]string string]", t)
-}
-
-func (t *Or_DidChangeConfigurationRegistrationOptions_section) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder22 := json.NewDecoder(bytes.NewReader(x))
- decoder22.DisallowUnknownFields()
- var stringVal string
- if err := decoder22.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder23 := json.NewDecoder(bytes.NewReader(x))
- decoder23.DisallowUnknownFields()
- var h23 []string
- if err := decoder23.Decode(&h23); err == nil {
- t.Value = h23
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]string string]"}
-}
-
-func (t Or_DocumentDiagnosticReport) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case RelatedFullDocumentDiagnosticReport:
- return json.Marshal(x)
- case RelatedUnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_DocumentDiagnosticReport) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder247 := json.NewDecoder(bytes.NewReader(x))
- decoder247.DisallowUnknownFields()
- var h247 RelatedFullDocumentDiagnosticReport
- if err := decoder247.Decode(&h247); err == nil {
- t.Value = h247
- return nil
- }
- decoder248 := json.NewDecoder(bytes.NewReader(x))
- decoder248.DisallowUnknownFields()
- var h248 RelatedUnchangedDocumentDiagnosticReport
- if err := decoder248.Decode(&h248); err == nil {
- t.Value = h248
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder16 := json.NewDecoder(bytes.NewReader(x))
- decoder16.DisallowUnknownFields()
- var h16 FullDocumentDiagnosticReport
- if err := decoder16.Decode(&h16); err == nil {
- t.Value = h16
- return nil
- }
- decoder17 := json.NewDecoder(bytes.NewReader(x))
- decoder17.DisallowUnknownFields()
- var h17 UnchangedDocumentDiagnosticReport
- if err := decoder17.Decode(&h17); err == nil {
- t.Value = h17
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_DocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookCellTextDocumentFilter:
- return json.Marshal(x)
- case TextDocumentFilter:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookCellTextDocumentFilter TextDocumentFilter]", t)
-}
-
-func (t *Or_DocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder270 := json.NewDecoder(bytes.NewReader(x))
- decoder270.DisallowUnknownFields()
- var h270 NotebookCellTextDocumentFilter
- if err := decoder270.Decode(&h270); err == nil {
- t.Value = h270
- return nil
- }
- decoder271 := json.NewDecoder(bytes.NewReader(x))
- decoder271.DisallowUnknownFields()
- var h271 TextDocumentFilter
- if err := decoder271.Decode(&h271); err == nil {
- t.Value = h271
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookCellTextDocumentFilter TextDocumentFilter]"}
-}
-
-func (t Or_GlobPattern) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Pattern:
- return json.Marshal(x)
- case RelativePattern:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Pattern RelativePattern]", t)
-}
-
-func (t *Or_GlobPattern) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder274 := json.NewDecoder(bytes.NewReader(x))
- decoder274.DisallowUnknownFields()
- var h274 Pattern
- if err := decoder274.Decode(&h274); err == nil {
- t.Value = h274
- return nil
- }
- decoder275 := json.NewDecoder(bytes.NewReader(x))
- decoder275.DisallowUnknownFields()
- var h275 RelativePattern
- if err := decoder275.Decode(&h275); err == nil {
- t.Value = h275
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Pattern RelativePattern]"}
-}
-
-func (t Or_Hover_contents) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkedString:
- return json.Marshal(x)
- case MarkupContent:
- return json.Marshal(x)
- case []MarkedString:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkedString MarkupContent []MarkedString]", t)
-}
-
-func (t *Or_Hover_contents) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder34 := json.NewDecoder(bytes.NewReader(x))
- decoder34.DisallowUnknownFields()
- var h34 MarkedString
- if err := decoder34.Decode(&h34); err == nil {
- t.Value = h34
- return nil
- }
- decoder35 := json.NewDecoder(bytes.NewReader(x))
- decoder35.DisallowUnknownFields()
- var h35 MarkupContent
- if err := decoder35.Decode(&h35); err == nil {
- t.Value = h35
- return nil
- }
- decoder36 := json.NewDecoder(bytes.NewReader(x))
- decoder36.DisallowUnknownFields()
- var h36 []MarkedString
- if err := decoder36.Decode(&h36); err == nil {
- t.Value = h36
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkedString MarkupContent []MarkedString]"}
-}
-
-func (t Or_InlayHintLabelPart_tooltip) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_InlayHintLabelPart_tooltip) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder56 := json.NewDecoder(bytes.NewReader(x))
- decoder56.DisallowUnknownFields()
- var stringVal string
- if err := decoder56.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder57 := json.NewDecoder(bytes.NewReader(x))
- decoder57.DisallowUnknownFields()
- var h57 MarkupContent
- if err := decoder57.Decode(&h57); err == nil {
- t.Value = h57
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_InlayHint_label) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []InlayHintLabelPart:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]InlayHintLabelPart string]", t)
-}
-
-func (t *Or_InlayHint_label) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder9 := json.NewDecoder(bytes.NewReader(x))
- decoder9.DisallowUnknownFields()
- var stringVal string
- if err := decoder9.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder10 := json.NewDecoder(bytes.NewReader(x))
- decoder10.DisallowUnknownFields()
- var h10 []InlayHintLabelPart
- if err := decoder10.Decode(&h10); err == nil {
- t.Value = h10
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]InlayHintLabelPart string]"}
-}
-
-func (t Or_InlayHint_tooltip) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_InlayHint_tooltip) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder12 := json.NewDecoder(bytes.NewReader(x))
- decoder12.DisallowUnknownFields()
- var stringVal string
- if err := decoder12.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder13 := json.NewDecoder(bytes.NewReader(x))
- decoder13.DisallowUnknownFields()
- var h13 MarkupContent
- if err := decoder13.Decode(&h13); err == nil {
- t.Value = h13
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_InlineCompletionItem_insertText) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case StringValue:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [StringValue string]", t)
-}
-
-func (t *Or_InlineCompletionItem_insertText) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder19 := json.NewDecoder(bytes.NewReader(x))
- decoder19.DisallowUnknownFields()
- var stringVal string
- if err := decoder19.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder20 := json.NewDecoder(bytes.NewReader(x))
- decoder20.DisallowUnknownFields()
- var h20 StringValue
- if err := decoder20.Decode(&h20); err == nil {
- t.Value = h20
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [StringValue string]"}
-}
-
-func (t Or_InlineValue) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineValueEvaluatableExpression:
- return json.Marshal(x)
- case InlineValueText:
- return json.Marshal(x)
- case InlineValueVariableLookup:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]", t)
-}
-
-func (t *Or_InlineValue) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder242 := json.NewDecoder(bytes.NewReader(x))
- decoder242.DisallowUnknownFields()
- var h242 InlineValueEvaluatableExpression
- if err := decoder242.Decode(&h242); err == nil {
- t.Value = h242
- return nil
- }
- decoder243 := json.NewDecoder(bytes.NewReader(x))
- decoder243.DisallowUnknownFields()
- var h243 InlineValueText
- if err := decoder243.Decode(&h243); err == nil {
- t.Value = h243
- return nil
- }
- decoder244 := json.NewDecoder(bytes.NewReader(x))
- decoder244.DisallowUnknownFields()
- var h244 InlineValueVariableLookup
- if err := decoder244.Decode(&h244); err == nil {
- t.Value = h244
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]"}
-}
-
-func (t Or_LSPAny) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case LSPArray:
- return json.Marshal(x)
- case LSPObject:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case float64:
- return json.Marshal(x)
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case uint32:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [LSPArray LSPObject bool float64 int32 string uint32]", t)
-}
-
-func (t *Or_LSPAny) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder228 := json.NewDecoder(bytes.NewReader(x))
- decoder228.DisallowUnknownFields()
- var boolVal bool
- if err := decoder228.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder229 := json.NewDecoder(bytes.NewReader(x))
- decoder229.DisallowUnknownFields()
- var float64Val float64
- if err := decoder229.Decode(&float64Val); err == nil {
- t.Value = float64Val
- return nil
- }
- decoder230 := json.NewDecoder(bytes.NewReader(x))
- decoder230.DisallowUnknownFields()
- var int32Val int32
- if err := decoder230.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder231 := json.NewDecoder(bytes.NewReader(x))
- decoder231.DisallowUnknownFields()
- var stringVal string
- if err := decoder231.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder232 := json.NewDecoder(bytes.NewReader(x))
- decoder232.DisallowUnknownFields()
- var uint32Val uint32
- if err := decoder232.Decode(&uint32Val); err == nil {
- t.Value = uint32Val
- return nil
- }
- decoder233 := json.NewDecoder(bytes.NewReader(x))
- decoder233.DisallowUnknownFields()
- var h233 LSPArray
- if err := decoder233.Decode(&h233); err == nil {
- t.Value = h233
- return nil
- }
- decoder234 := json.NewDecoder(bytes.NewReader(x))
- decoder234.DisallowUnknownFields()
- var h234 LSPObject
- if err := decoder234.Decode(&h234); err == nil {
- t.Value = h234
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [LSPArray LSPObject bool float64 int32 string uint32]"}
-}
-
-func (t Or_MarkedString) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkedStringWithLanguage:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkedStringWithLanguage string]", t)
-}
-
-func (t *Or_MarkedString) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder266 := json.NewDecoder(bytes.NewReader(x))
- decoder266.DisallowUnknownFields()
- var stringVal string
- if err := decoder266.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder267 := json.NewDecoder(bytes.NewReader(x))
- decoder267.DisallowUnknownFields()
- var h267 MarkedStringWithLanguage
- if err := decoder267.Decode(&h267); err == nil {
- t.Value = h267
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkedStringWithLanguage string]"}
-}
-
-func (t Or_NotebookCellTextDocumentFilter_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookCellTextDocumentFilter_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder208 := json.NewDecoder(bytes.NewReader(x))
- decoder208.DisallowUnknownFields()
- var stringVal string
- if err := decoder208.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder209 := json.NewDecoder(bytes.NewReader(x))
- decoder209.DisallowUnknownFields()
- var h209 NotebookDocumentFilter
- if err := decoder209.Decode(&h209); err == nil {
- t.Value = h209
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilterNotebookType:
- return json.Marshal(x)
- case NotebookDocumentFilterPattern:
- return json.Marshal(x)
- case NotebookDocumentFilterScheme:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]", t)
-}
-
-func (t *Or_NotebookDocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder285 := json.NewDecoder(bytes.NewReader(x))
- decoder285.DisallowUnknownFields()
- var h285 NotebookDocumentFilterNotebookType
- if err := decoder285.Decode(&h285); err == nil {
- t.Value = h285
- return nil
- }
- decoder286 := json.NewDecoder(bytes.NewReader(x))
- decoder286.DisallowUnknownFields()
- var h286 NotebookDocumentFilterPattern
- if err := decoder286.Decode(&h286); err == nil {
- t.Value = h286
- return nil
- }
- decoder287 := json.NewDecoder(bytes.NewReader(x))
- decoder287.DisallowUnknownFields()
- var h287 NotebookDocumentFilterScheme
- if err := decoder287.Decode(&h287); err == nil {
- t.Value = h287
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]"}
-}
-
-func (t Or_NotebookDocumentFilterWithCells_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookDocumentFilterWithCells_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder192 := json.NewDecoder(bytes.NewReader(x))
- decoder192.DisallowUnknownFields()
- var stringVal string
- if err := decoder192.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder193 := json.NewDecoder(bytes.NewReader(x))
- decoder193.DisallowUnknownFields()
- var h193 NotebookDocumentFilter
- if err := decoder193.Decode(&h193); err == nil {
- t.Value = h193
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentFilterWithNotebook_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookDocumentFilterWithNotebook_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder189 := json.NewDecoder(bytes.NewReader(x))
- decoder189.DisallowUnknownFields()
- var stringVal string
- if err := decoder189.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder190 := json.NewDecoder(bytes.NewReader(x))
- decoder190.DisallowUnknownFields()
- var h190 NotebookDocumentFilter
- if err := decoder190.Decode(&h190); err == nil {
- t.Value = h190
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentSyncOptions_notebookSelector_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilterWithCells:
- return json.Marshal(x)
- case NotebookDocumentFilterWithNotebook:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]", t)
-}
-
-func (t *Or_NotebookDocumentSyncOptions_notebookSelector_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder68 := json.NewDecoder(bytes.NewReader(x))
- decoder68.DisallowUnknownFields()
- var h68 NotebookDocumentFilterWithCells
- if err := decoder68.Decode(&h68); err == nil {
- t.Value = h68
- return nil
- }
- decoder69 := json.NewDecoder(bytes.NewReader(x))
- decoder69.DisallowUnknownFields()
- var h69 NotebookDocumentFilterWithNotebook
- if err := decoder69.Decode(&h69); err == nil {
- t.Value = h69
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]"}
-}
-
-func (t Or_ParameterInformation_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_ParameterInformation_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder205 := json.NewDecoder(bytes.NewReader(x))
- decoder205.DisallowUnknownFields()
- var stringVal string
- if err := decoder205.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder206 := json.NewDecoder(bytes.NewReader(x))
- decoder206.DisallowUnknownFields()
- var h206 MarkupContent
- if err := decoder206.Decode(&h206); err == nil {
- t.Value = h206
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_ParameterInformation_label) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Tuple_ParameterInformation_label_Item1:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Tuple_ParameterInformation_label_Item1 string]", t)
-}
-
-func (t *Or_ParameterInformation_label) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder202 := json.NewDecoder(bytes.NewReader(x))
- decoder202.DisallowUnknownFields()
- var stringVal string
- if err := decoder202.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder203 := json.NewDecoder(bytes.NewReader(x))
- decoder203.DisallowUnknownFields()
- var h203 Tuple_ParameterInformation_label_Item1
- if err := decoder203.Decode(&h203); err == nil {
- t.Value = h203
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Tuple_ParameterInformation_label_Item1 string]"}
-}
-
-func (t Or_PrepareRenameResult) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case PrepareRenameDefaultBehavior:
- return json.Marshal(x)
- case PrepareRenamePlaceholder:
- return json.Marshal(x)
- case Range:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]", t)
-}
-
-func (t *Or_PrepareRenameResult) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder252 := json.NewDecoder(bytes.NewReader(x))
- decoder252.DisallowUnknownFields()
- var h252 PrepareRenameDefaultBehavior
- if err := decoder252.Decode(&h252); err == nil {
- t.Value = h252
- return nil
- }
- decoder253 := json.NewDecoder(bytes.NewReader(x))
- decoder253.DisallowUnknownFields()
- var h253 PrepareRenamePlaceholder
- if err := decoder253.Decode(&h253); err == nil {
- t.Value = h253
- return nil
- }
- decoder254 := json.NewDecoder(bytes.NewReader(x))
- decoder254.DisallowUnknownFields()
- var h254 Range
- if err := decoder254.Decode(&h254); err == nil {
- t.Value = h254
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]"}
-}
-
-func (t Or_ProgressToken) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_ProgressToken) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder255 := json.NewDecoder(bytes.NewReader(x))
- decoder255.DisallowUnknownFields()
- var int32Val int32
- if err := decoder255.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder256 := json.NewDecoder(bytes.NewReader(x))
- decoder256.DisallowUnknownFields()
- var stringVal string
- if err := decoder256.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder60 := json.NewDecoder(bytes.NewReader(x))
- decoder60.DisallowUnknownFields()
- var h60 FullDocumentDiagnosticReport
- if err := decoder60.Decode(&h60); err == nil {
- t.Value = h60
- return nil
- }
- decoder61 := json.NewDecoder(bytes.NewReader(x))
- decoder61.DisallowUnknownFields()
- var h61 UnchangedDocumentDiagnosticReport
- if err := decoder61.Decode(&h61); err == nil {
- t.Value = h61
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder64 := json.NewDecoder(bytes.NewReader(x))
- decoder64.DisallowUnknownFields()
- var h64 FullDocumentDiagnosticReport
- if err := decoder64.Decode(&h64); err == nil {
- t.Value = h64
- return nil
- }
- decoder65 := json.NewDecoder(bytes.NewReader(x))
- decoder65.DisallowUnknownFields()
- var h65 UnchangedDocumentDiagnosticReport
- if err := decoder65.Decode(&h65); err == nil {
- t.Value = h65
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_RelativePattern_baseUri) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case URI:
- return json.Marshal(x)
- case WorkspaceFolder:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [URI WorkspaceFolder]", t)
-}
-
-func (t *Or_RelativePattern_baseUri) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder214 := json.NewDecoder(bytes.NewReader(x))
- decoder214.DisallowUnknownFields()
- var h214 URI
- if err := decoder214.Decode(&h214); err == nil {
- t.Value = h214
- return nil
- }
- decoder215 := json.NewDecoder(bytes.NewReader(x))
- decoder215.DisallowUnknownFields()
- var h215 WorkspaceFolder
- if err := decoder215.Decode(&h215); err == nil {
- t.Value = h215
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [URI WorkspaceFolder]"}
-}
-
-func (t Or_Result_textDocument_codeAction_Item0_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CodeAction:
- return json.Marshal(x)
- case Command:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CodeAction Command]", t)
-}
-
-func (t *Or_Result_textDocument_codeAction_Item0_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder322 := json.NewDecoder(bytes.NewReader(x))
- decoder322.DisallowUnknownFields()
- var h322 CodeAction
- if err := decoder322.Decode(&h322); err == nil {
- t.Value = h322
- return nil
- }
- decoder323 := json.NewDecoder(bytes.NewReader(x))
- decoder323.DisallowUnknownFields()
- var h323 Command
- if err := decoder323.Decode(&h323); err == nil {
- t.Value = h323
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CodeAction Command]"}
-}
-
-func (t Or_Result_textDocument_completion) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CompletionList:
- return json.Marshal(x)
- case []CompletionItem:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CompletionList []CompletionItem]", t)
-}
-
-func (t *Or_Result_textDocument_completion) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder310 := json.NewDecoder(bytes.NewReader(x))
- decoder310.DisallowUnknownFields()
- var h310 CompletionList
- if err := decoder310.Decode(&h310); err == nil {
- t.Value = h310
- return nil
- }
- decoder311 := json.NewDecoder(bytes.NewReader(x))
- decoder311.DisallowUnknownFields()
- var h311 []CompletionItem
- if err := decoder311.Decode(&h311); err == nil {
- t.Value = h311
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CompletionList []CompletionItem]"}
-}
-
-func (t Or_Result_textDocument_declaration) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Declaration:
- return json.Marshal(x)
- case []DeclarationLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Declaration []DeclarationLink]", t)
-}
-
-func (t *Or_Result_textDocument_declaration) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder298 := json.NewDecoder(bytes.NewReader(x))
- decoder298.DisallowUnknownFields()
- var h298 Declaration
- if err := decoder298.Decode(&h298); err == nil {
- t.Value = h298
- return nil
- }
- decoder299 := json.NewDecoder(bytes.NewReader(x))
- decoder299.DisallowUnknownFields()
- var h299 []DeclarationLink
- if err := decoder299.Decode(&h299); err == nil {
- t.Value = h299
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Declaration []DeclarationLink]"}
-}
-
-func (t Or_Result_textDocument_definition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_definition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder314 := json.NewDecoder(bytes.NewReader(x))
- decoder314.DisallowUnknownFields()
- var h314 Definition
- if err := decoder314.Decode(&h314); err == nil {
- t.Value = h314
- return nil
- }
- decoder315 := json.NewDecoder(bytes.NewReader(x))
- decoder315.DisallowUnknownFields()
- var h315 []DefinitionLink
- if err := decoder315.Decode(&h315); err == nil {
- t.Value = h315
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_textDocument_documentSymbol) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []DocumentSymbol:
- return json.Marshal(x)
- case []SymbolInformation:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]DocumentSymbol []SymbolInformation]", t)
-}
-
-func (t *Or_Result_textDocument_documentSymbol) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder318 := json.NewDecoder(bytes.NewReader(x))
- decoder318.DisallowUnknownFields()
- var h318 []DocumentSymbol
- if err := decoder318.Decode(&h318); err == nil {
- t.Value = h318
- return nil
- }
- decoder319 := json.NewDecoder(bytes.NewReader(x))
- decoder319.DisallowUnknownFields()
- var h319 []SymbolInformation
- if err := decoder319.Decode(&h319); err == nil {
- t.Value = h319
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]DocumentSymbol []SymbolInformation]"}
-}
-
-func (t Or_Result_textDocument_implementation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_implementation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder290 := json.NewDecoder(bytes.NewReader(x))
- decoder290.DisallowUnknownFields()
- var h290 Definition
- if err := decoder290.Decode(&h290); err == nil {
- t.Value = h290
- return nil
- }
- decoder291 := json.NewDecoder(bytes.NewReader(x))
- decoder291.DisallowUnknownFields()
- var h291 []DefinitionLink
- if err := decoder291.Decode(&h291); err == nil {
- t.Value = h291
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_textDocument_inlineCompletion) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineCompletionList:
- return json.Marshal(x)
- case []InlineCompletionItem:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineCompletionList []InlineCompletionItem]", t)
-}
-
-func (t *Or_Result_textDocument_inlineCompletion) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder306 := json.NewDecoder(bytes.NewReader(x))
- decoder306.DisallowUnknownFields()
- var h306 InlineCompletionList
- if err := decoder306.Decode(&h306); err == nil {
- t.Value = h306
- return nil
- }
- decoder307 := json.NewDecoder(bytes.NewReader(x))
- decoder307.DisallowUnknownFields()
- var h307 []InlineCompletionItem
- if err := decoder307.Decode(&h307); err == nil {
- t.Value = h307
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionList []InlineCompletionItem]"}
-}
-
-func (t Or_Result_textDocument_semanticTokens_full_delta) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokens:
- return json.Marshal(x)
- case SemanticTokensDelta:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokens SemanticTokensDelta]", t)
-}
-
-func (t *Or_Result_textDocument_semanticTokens_full_delta) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder302 := json.NewDecoder(bytes.NewReader(x))
- decoder302.DisallowUnknownFields()
- var h302 SemanticTokens
- if err := decoder302.Decode(&h302); err == nil {
- t.Value = h302
- return nil
- }
- decoder303 := json.NewDecoder(bytes.NewReader(x))
- decoder303.DisallowUnknownFields()
- var h303 SemanticTokensDelta
- if err := decoder303.Decode(&h303); err == nil {
- t.Value = h303
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokens SemanticTokensDelta]"}
-}
-
-func (t Or_Result_textDocument_typeDefinition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_typeDefinition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder294 := json.NewDecoder(bytes.NewReader(x))
- decoder294.DisallowUnknownFields()
- var h294 Definition
- if err := decoder294.Decode(&h294); err == nil {
- t.Value = h294
- return nil
- }
- decoder295 := json.NewDecoder(bytes.NewReader(x))
- decoder295.DisallowUnknownFields()
- var h295 []DefinitionLink
- if err := decoder295.Decode(&h295); err == nil {
- t.Value = h295
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_workspace_symbol) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []SymbolInformation:
- return json.Marshal(x)
- case []WorkspaceSymbol:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]SymbolInformation []WorkspaceSymbol]", t)
-}
-
-func (t *Or_Result_workspace_symbol) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder326 := json.NewDecoder(bytes.NewReader(x))
- decoder326.DisallowUnknownFields()
- var h326 []SymbolInformation
- if err := decoder326.Decode(&h326); err == nil {
- t.Value = h326
- return nil
- }
- decoder327 := json.NewDecoder(bytes.NewReader(x))
- decoder327.DisallowUnknownFields()
- var h327 []WorkspaceSymbol
- if err := decoder327.Decode(&h327); err == nil {
- t.Value = h327
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]SymbolInformation []WorkspaceSymbol]"}
-}
-
-func (t Or_SemanticTokensOptions_full) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokensFullDelta:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokensFullDelta bool]", t)
-}
-
-func (t *Or_SemanticTokensOptions_full) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder47 := json.NewDecoder(bytes.NewReader(x))
- decoder47.DisallowUnknownFields()
- var boolVal bool
- if err := decoder47.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder48 := json.NewDecoder(bytes.NewReader(x))
- decoder48.DisallowUnknownFields()
- var h48 SemanticTokensFullDelta
- if err := decoder48.Decode(&h48); err == nil {
- t.Value = h48
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensFullDelta bool]"}
-}
-
-func (t Or_SemanticTokensOptions_range) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Lit_SemanticTokensOptions_range_Item1:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Lit_SemanticTokensOptions_range_Item1 bool]", t)
-}
-
-func (t *Or_SemanticTokensOptions_range) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder44 := json.NewDecoder(bytes.NewReader(x))
- decoder44.DisallowUnknownFields()
- var boolVal bool
- if err := decoder44.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder45 := json.NewDecoder(bytes.NewReader(x))
- decoder45.DisallowUnknownFields()
- var h45 Lit_SemanticTokensOptions_range_Item1
- if err := decoder45.Decode(&h45); err == nil {
- t.Value = h45
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Lit_SemanticTokensOptions_range_Item1 bool]"}
-}
-
-func (t Or_ServerCapabilities_callHierarchyProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CallHierarchyOptions:
- return json.Marshal(x)
- case CallHierarchyRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_callHierarchyProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder140 := json.NewDecoder(bytes.NewReader(x))
- decoder140.DisallowUnknownFields()
- var boolVal bool
- if err := decoder140.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder141 := json.NewDecoder(bytes.NewReader(x))
- decoder141.DisallowUnknownFields()
- var h141 CallHierarchyOptions
- if err := decoder141.Decode(&h141); err == nil {
- t.Value = h141
- return nil
- }
- decoder142 := json.NewDecoder(bytes.NewReader(x))
- decoder142.DisallowUnknownFields()
- var h142 CallHierarchyRegistrationOptions
- if err := decoder142.Decode(&h142); err == nil {
- t.Value = h142
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_codeActionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CodeActionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CodeActionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_codeActionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder109 := json.NewDecoder(bytes.NewReader(x))
- decoder109.DisallowUnknownFields()
- var boolVal bool
- if err := decoder109.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder110 := json.NewDecoder(bytes.NewReader(x))
- decoder110.DisallowUnknownFields()
- var h110 CodeActionOptions
- if err := decoder110.Decode(&h110); err == nil {
- t.Value = h110
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CodeActionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_colorProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentColorOptions:
- return json.Marshal(x)
- case DocumentColorRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentColorOptions DocumentColorRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_colorProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder113 := json.NewDecoder(bytes.NewReader(x))
- decoder113.DisallowUnknownFields()
- var boolVal bool
- if err := decoder113.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder114 := json.NewDecoder(bytes.NewReader(x))
- decoder114.DisallowUnknownFields()
- var h114 DocumentColorOptions
- if err := decoder114.Decode(&h114); err == nil {
- t.Value = h114
- return nil
- }
- decoder115 := json.NewDecoder(bytes.NewReader(x))
- decoder115.DisallowUnknownFields()
- var h115 DocumentColorRegistrationOptions
- if err := decoder115.Decode(&h115); err == nil {
- t.Value = h115
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentColorOptions DocumentColorRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_declarationProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DeclarationOptions:
- return json.Marshal(x)
- case DeclarationRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DeclarationOptions DeclarationRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_declarationProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder83 := json.NewDecoder(bytes.NewReader(x))
- decoder83.DisallowUnknownFields()
- var boolVal bool
- if err := decoder83.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder84 := json.NewDecoder(bytes.NewReader(x))
- decoder84.DisallowUnknownFields()
- var h84 DeclarationOptions
- if err := decoder84.Decode(&h84); err == nil {
- t.Value = h84
- return nil
- }
- decoder85 := json.NewDecoder(bytes.NewReader(x))
- decoder85.DisallowUnknownFields()
- var h85 DeclarationRegistrationOptions
- if err := decoder85.Decode(&h85); err == nil {
- t.Value = h85
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DeclarationOptions DeclarationRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_definitionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DefinitionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DefinitionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_definitionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder87 := json.NewDecoder(bytes.NewReader(x))
- decoder87.DisallowUnknownFields()
- var boolVal bool
- if err := decoder87.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder88 := json.NewDecoder(bytes.NewReader(x))
- decoder88.DisallowUnknownFields()
- var h88 DefinitionOptions
- if err := decoder88.Decode(&h88); err == nil {
- t.Value = h88
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DefinitionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_diagnosticProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DiagnosticOptions:
- return json.Marshal(x)
- case DiagnosticRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DiagnosticOptions DiagnosticRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_diagnosticProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder174 := json.NewDecoder(bytes.NewReader(x))
- decoder174.DisallowUnknownFields()
- var h174 DiagnosticOptions
- if err := decoder174.Decode(&h174); err == nil {
- t.Value = h174
- return nil
- }
- decoder175 := json.NewDecoder(bytes.NewReader(x))
- decoder175.DisallowUnknownFields()
- var h175 DiagnosticRegistrationOptions
- if err := decoder175.Decode(&h175); err == nil {
- t.Value = h175
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DiagnosticOptions DiagnosticRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_documentFormattingProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentFormattingOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentFormattingOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentFormattingProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder120 := json.NewDecoder(bytes.NewReader(x))
- decoder120.DisallowUnknownFields()
- var boolVal bool
- if err := decoder120.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder121 := json.NewDecoder(bytes.NewReader(x))
- decoder121.DisallowUnknownFields()
- var h121 DocumentFormattingOptions
- if err := decoder121.Decode(&h121); err == nil {
- t.Value = h121
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentFormattingOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentHighlightProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentHighlightOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentHighlightOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentHighlightProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder103 := json.NewDecoder(bytes.NewReader(x))
- decoder103.DisallowUnknownFields()
- var boolVal bool
- if err := decoder103.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder104 := json.NewDecoder(bytes.NewReader(x))
- decoder104.DisallowUnknownFields()
- var h104 DocumentHighlightOptions
- if err := decoder104.Decode(&h104); err == nil {
- t.Value = h104
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentHighlightOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentRangeFormattingProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentRangeFormattingOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentRangeFormattingOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentRangeFormattingProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder123 := json.NewDecoder(bytes.NewReader(x))
- decoder123.DisallowUnknownFields()
- var boolVal bool
- if err := decoder123.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder124 := json.NewDecoder(bytes.NewReader(x))
- decoder124.DisallowUnknownFields()
- var h124 DocumentRangeFormattingOptions
- if err := decoder124.Decode(&h124); err == nil {
- t.Value = h124
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentRangeFormattingOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentSymbolProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentSymbolOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentSymbolOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentSymbolProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder106 := json.NewDecoder(bytes.NewReader(x))
- decoder106.DisallowUnknownFields()
- var boolVal bool
- if err := decoder106.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder107 := json.NewDecoder(bytes.NewReader(x))
- decoder107.DisallowUnknownFields()
- var h107 DocumentSymbolOptions
- if err := decoder107.Decode(&h107); err == nil {
- t.Value = h107
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentSymbolOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_foldingRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FoldingRangeOptions:
- return json.Marshal(x)
- case FoldingRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_foldingRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder130 := json.NewDecoder(bytes.NewReader(x))
- decoder130.DisallowUnknownFields()
- var boolVal bool
- if err := decoder130.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder131 := json.NewDecoder(bytes.NewReader(x))
- decoder131.DisallowUnknownFields()
- var h131 FoldingRangeOptions
- if err := decoder131.Decode(&h131); err == nil {
- t.Value = h131
- return nil
- }
- decoder132 := json.NewDecoder(bytes.NewReader(x))
- decoder132.DisallowUnknownFields()
- var h132 FoldingRangeRegistrationOptions
- if err := decoder132.Decode(&h132); err == nil {
- t.Value = h132
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_hoverProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case HoverOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [HoverOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_hoverProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder79 := json.NewDecoder(bytes.NewReader(x))
- decoder79.DisallowUnknownFields()
- var boolVal bool
- if err := decoder79.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder80 := json.NewDecoder(bytes.NewReader(x))
- decoder80.DisallowUnknownFields()
- var h80 HoverOptions
- if err := decoder80.Decode(&h80); err == nil {
- t.Value = h80
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [HoverOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_implementationProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ImplementationOptions:
- return json.Marshal(x)
- case ImplementationRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ImplementationOptions ImplementationRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_implementationProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder96 := json.NewDecoder(bytes.NewReader(x))
- decoder96.DisallowUnknownFields()
- var boolVal bool
- if err := decoder96.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder97 := json.NewDecoder(bytes.NewReader(x))
- decoder97.DisallowUnknownFields()
- var h97 ImplementationOptions
- if err := decoder97.Decode(&h97); err == nil {
- t.Value = h97
- return nil
- }
- decoder98 := json.NewDecoder(bytes.NewReader(x))
- decoder98.DisallowUnknownFields()
- var h98 ImplementationRegistrationOptions
- if err := decoder98.Decode(&h98); err == nil {
- t.Value = h98
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ImplementationOptions ImplementationRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlayHintProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlayHintOptions:
- return json.Marshal(x)
- case InlayHintRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlayHintOptions InlayHintRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlayHintProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder169 := json.NewDecoder(bytes.NewReader(x))
- decoder169.DisallowUnknownFields()
- var boolVal bool
- if err := decoder169.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder170 := json.NewDecoder(bytes.NewReader(x))
- decoder170.DisallowUnknownFields()
- var h170 InlayHintOptions
- if err := decoder170.Decode(&h170); err == nil {
- t.Value = h170
- return nil
- }
- decoder171 := json.NewDecoder(bytes.NewReader(x))
- decoder171.DisallowUnknownFields()
- var h171 InlayHintRegistrationOptions
- if err := decoder171.Decode(&h171); err == nil {
- t.Value = h171
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlayHintOptions InlayHintRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlineCompletionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineCompletionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineCompletionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlineCompletionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder177 := json.NewDecoder(bytes.NewReader(x))
- decoder177.DisallowUnknownFields()
- var boolVal bool
- if err := decoder177.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder178 := json.NewDecoder(bytes.NewReader(x))
- decoder178.DisallowUnknownFields()
- var h178 InlineCompletionOptions
- if err := decoder178.Decode(&h178); err == nil {
- t.Value = h178
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlineValueProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineValueOptions:
- return json.Marshal(x)
- case InlineValueRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineValueOptions InlineValueRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlineValueProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder164 := json.NewDecoder(bytes.NewReader(x))
- decoder164.DisallowUnknownFields()
- var boolVal bool
- if err := decoder164.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder165 := json.NewDecoder(bytes.NewReader(x))
- decoder165.DisallowUnknownFields()
- var h165 InlineValueOptions
- if err := decoder165.Decode(&h165); err == nil {
- t.Value = h165
- return nil
- }
- decoder166 := json.NewDecoder(bytes.NewReader(x))
- decoder166.DisallowUnknownFields()
- var h166 InlineValueRegistrationOptions
- if err := decoder166.Decode(&h166); err == nil {
- t.Value = h166
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineValueOptions InlineValueRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_linkedEditingRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case LinkedEditingRangeOptions:
- return json.Marshal(x)
- case LinkedEditingRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_linkedEditingRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder145 := json.NewDecoder(bytes.NewReader(x))
- decoder145.DisallowUnknownFields()
- var boolVal bool
- if err := decoder145.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder146 := json.NewDecoder(bytes.NewReader(x))
- decoder146.DisallowUnknownFields()
- var h146 LinkedEditingRangeOptions
- if err := decoder146.Decode(&h146); err == nil {
- t.Value = h146
- return nil
- }
- decoder147 := json.NewDecoder(bytes.NewReader(x))
- decoder147.DisallowUnknownFields()
- var h147 LinkedEditingRangeRegistrationOptions
- if err := decoder147.Decode(&h147); err == nil {
- t.Value = h147
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_monikerProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MonikerOptions:
- return json.Marshal(x)
- case MonikerRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MonikerOptions MonikerRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_monikerProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder154 := json.NewDecoder(bytes.NewReader(x))
- decoder154.DisallowUnknownFields()
- var boolVal bool
- if err := decoder154.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder155 := json.NewDecoder(bytes.NewReader(x))
- decoder155.DisallowUnknownFields()
- var h155 MonikerOptions
- if err := decoder155.Decode(&h155); err == nil {
- t.Value = h155
- return nil
- }
- decoder156 := json.NewDecoder(bytes.NewReader(x))
- decoder156.DisallowUnknownFields()
- var h156 MonikerRegistrationOptions
- if err := decoder156.Decode(&h156); err == nil {
- t.Value = h156
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MonikerOptions MonikerRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_notebookDocumentSync) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentSyncOptions:
- return json.Marshal(x)
- case NotebookDocumentSyncRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_notebookDocumentSync) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder76 := json.NewDecoder(bytes.NewReader(x))
- decoder76.DisallowUnknownFields()
- var h76 NotebookDocumentSyncOptions
- if err := decoder76.Decode(&h76); err == nil {
- t.Value = h76
- return nil
- }
- decoder77 := json.NewDecoder(bytes.NewReader(x))
- decoder77.DisallowUnknownFields()
- var h77 NotebookDocumentSyncRegistrationOptions
- if err := decoder77.Decode(&h77); err == nil {
- t.Value = h77
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_referencesProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ReferenceOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ReferenceOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_referencesProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder100 := json.NewDecoder(bytes.NewReader(x))
- decoder100.DisallowUnknownFields()
- var boolVal bool
- if err := decoder100.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder101 := json.NewDecoder(bytes.NewReader(x))
- decoder101.DisallowUnknownFields()
- var h101 ReferenceOptions
- if err := decoder101.Decode(&h101); err == nil {
- t.Value = h101
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ReferenceOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_renameProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case RenameOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [RenameOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_renameProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder126 := json.NewDecoder(bytes.NewReader(x))
- decoder126.DisallowUnknownFields()
- var boolVal bool
- if err := decoder126.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder127 := json.NewDecoder(bytes.NewReader(x))
- decoder127.DisallowUnknownFields()
- var h127 RenameOptions
- if err := decoder127.Decode(&h127); err == nil {
- t.Value = h127
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [RenameOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_selectionRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SelectionRangeOptions:
- return json.Marshal(x)
- case SelectionRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_selectionRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder135 := json.NewDecoder(bytes.NewReader(x))
- decoder135.DisallowUnknownFields()
- var boolVal bool
- if err := decoder135.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder136 := json.NewDecoder(bytes.NewReader(x))
- decoder136.DisallowUnknownFields()
- var h136 SelectionRangeOptions
- if err := decoder136.Decode(&h136); err == nil {
- t.Value = h136
- return nil
- }
- decoder137 := json.NewDecoder(bytes.NewReader(x))
- decoder137.DisallowUnknownFields()
- var h137 SelectionRangeRegistrationOptions
- if err := decoder137.Decode(&h137); err == nil {
- t.Value = h137
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_semanticTokensProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokensOptions:
- return json.Marshal(x)
- case SemanticTokensRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokensOptions SemanticTokensRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_semanticTokensProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder150 := json.NewDecoder(bytes.NewReader(x))
- decoder150.DisallowUnknownFields()
- var h150 SemanticTokensOptions
- if err := decoder150.Decode(&h150); err == nil {
- t.Value = h150
- return nil
- }
- decoder151 := json.NewDecoder(bytes.NewReader(x))
- decoder151.DisallowUnknownFields()
- var h151 SemanticTokensRegistrationOptions
- if err := decoder151.Decode(&h151); err == nil {
- t.Value = h151
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensOptions SemanticTokensRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_textDocumentSync) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentSyncKind:
- return json.Marshal(x)
- case TextDocumentSyncOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentSyncKind TextDocumentSyncOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_textDocumentSync) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder72 := json.NewDecoder(bytes.NewReader(x))
- decoder72.DisallowUnknownFields()
- var h72 TextDocumentSyncKind
- if err := decoder72.Decode(&h72); err == nil {
- t.Value = h72
- return nil
- }
- decoder73 := json.NewDecoder(bytes.NewReader(x))
- decoder73.DisallowUnknownFields()
- var h73 TextDocumentSyncOptions
- if err := decoder73.Decode(&h73); err == nil {
- t.Value = h73
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentSyncKind TextDocumentSyncOptions]"}
-}
-
-func (t Or_ServerCapabilities_typeDefinitionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TypeDefinitionOptions:
- return json.Marshal(x)
- case TypeDefinitionRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_typeDefinitionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder91 := json.NewDecoder(bytes.NewReader(x))
- decoder91.DisallowUnknownFields()
- var boolVal bool
- if err := decoder91.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder92 := json.NewDecoder(bytes.NewReader(x))
- decoder92.DisallowUnknownFields()
- var h92 TypeDefinitionOptions
- if err := decoder92.Decode(&h92); err == nil {
- t.Value = h92
- return nil
- }
- decoder93 := json.NewDecoder(bytes.NewReader(x))
- decoder93.DisallowUnknownFields()
- var h93 TypeDefinitionRegistrationOptions
- if err := decoder93.Decode(&h93); err == nil {
- t.Value = h93
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_typeHierarchyProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TypeHierarchyOptions:
- return json.Marshal(x)
- case TypeHierarchyRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_typeHierarchyProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder159 := json.NewDecoder(bytes.NewReader(x))
- decoder159.DisallowUnknownFields()
- var boolVal bool
- if err := decoder159.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder160 := json.NewDecoder(bytes.NewReader(x))
- decoder160.DisallowUnknownFields()
- var h160 TypeHierarchyOptions
- if err := decoder160.Decode(&h160); err == nil {
- t.Value = h160
- return nil
- }
- decoder161 := json.NewDecoder(bytes.NewReader(x))
- decoder161.DisallowUnknownFields()
- var h161 TypeHierarchyRegistrationOptions
- if err := decoder161.Decode(&h161); err == nil {
- t.Value = h161
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_workspaceSymbolProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case WorkspaceSymbolOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [WorkspaceSymbolOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_workspaceSymbolProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder117 := json.NewDecoder(bytes.NewReader(x))
- decoder117.DisallowUnknownFields()
- var boolVal bool
- if err := decoder117.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder118 := json.NewDecoder(bytes.NewReader(x))
- decoder118.DisallowUnknownFields()
- var h118 WorkspaceSymbolOptions
- if err := decoder118.Decode(&h118); err == nil {
- t.Value = h118
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceSymbolOptions bool]"}
-}
-
-func (t Or_SignatureInformation_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_SignatureInformation_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder186 := json.NewDecoder(bytes.NewReader(x))
- decoder186.DisallowUnknownFields()
- var stringVal string
- if err := decoder186.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder187 := json.NewDecoder(bytes.NewReader(x))
- decoder187.DisallowUnknownFields()
- var h187 MarkupContent
- if err := decoder187.Decode(&h187); err == nil {
- t.Value = h187
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_TextDocumentContentChangeEvent) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentContentChangePartial:
- return json.Marshal(x)
- case TextDocumentContentChangeWholeDocument:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]", t)
-}
-
-func (t *Or_TextDocumentContentChangeEvent) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder263 := json.NewDecoder(bytes.NewReader(x))
- decoder263.DisallowUnknownFields()
- var h263 TextDocumentContentChangePartial
- if err := decoder263.Decode(&h263); err == nil {
- t.Value = h263
- return nil
- }
- decoder264 := json.NewDecoder(bytes.NewReader(x))
- decoder264.DisallowUnknownFields()
- var h264 TextDocumentContentChangeWholeDocument
- if err := decoder264.Decode(&h264); err == nil {
- t.Value = h264
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]"}
-}
-
-func (t Or_TextDocumentEdit_edits_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case AnnotatedTextEdit:
- return json.Marshal(x)
- case SnippetTextEdit:
- return json.Marshal(x)
- case TextEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [AnnotatedTextEdit SnippetTextEdit TextEdit]", t)
-}
-
-func (t *Or_TextDocumentEdit_edits_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder52 := json.NewDecoder(bytes.NewReader(x))
- decoder52.DisallowUnknownFields()
- var h52 AnnotatedTextEdit
- if err := decoder52.Decode(&h52); err == nil {
- t.Value = h52
- return nil
- }
- decoder53 := json.NewDecoder(bytes.NewReader(x))
- decoder53.DisallowUnknownFields()
- var h53 SnippetTextEdit
- if err := decoder53.Decode(&h53); err == nil {
- t.Value = h53
- return nil
- }
- decoder54 := json.NewDecoder(bytes.NewReader(x))
- decoder54.DisallowUnknownFields()
- var h54 TextEdit
- if err := decoder54.Decode(&h54); err == nil {
- t.Value = h54
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [AnnotatedTextEdit SnippetTextEdit TextEdit]"}
-}
-
-func (t Or_TextDocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentFilterLanguage:
- return json.Marshal(x)
- case TextDocumentFilterPattern:
- return json.Marshal(x)
- case TextDocumentFilterScheme:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]", t)
-}
-
-func (t *Or_TextDocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder279 := json.NewDecoder(bytes.NewReader(x))
- decoder279.DisallowUnknownFields()
- var h279 TextDocumentFilterLanguage
- if err := decoder279.Decode(&h279); err == nil {
- t.Value = h279
- return nil
- }
- decoder280 := json.NewDecoder(bytes.NewReader(x))
- decoder280.DisallowUnknownFields()
- var h280 TextDocumentFilterPattern
- if err := decoder280.Decode(&h280); err == nil {
- t.Value = h280
- return nil
- }
- decoder281 := json.NewDecoder(bytes.NewReader(x))
- decoder281.DisallowUnknownFields()
- var h281 TextDocumentFilterScheme
- if err := decoder281.Decode(&h281); err == nil {
- t.Value = h281
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]"}
-}
-
-func (t Or_TextDocumentSyncOptions_save) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SaveOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SaveOptions bool]", t)
-}
-
-func (t *Or_TextDocumentSyncOptions_save) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder195 := json.NewDecoder(bytes.NewReader(x))
- decoder195.DisallowUnknownFields()
- var boolVal bool
- if err := decoder195.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder196 := json.NewDecoder(bytes.NewReader(x))
- decoder196.DisallowUnknownFields()
- var h196 SaveOptions
- if err := decoder196.Decode(&h196); err == nil {
- t.Value = h196
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SaveOptions bool]"}
-}
-
-func (t Or_WorkspaceDocumentDiagnosticReport) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case WorkspaceFullDocumentDiagnosticReport:
- return json.Marshal(x)
- case WorkspaceUnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_WorkspaceDocumentDiagnosticReport) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder259 := json.NewDecoder(bytes.NewReader(x))
- decoder259.DisallowUnknownFields()
- var h259 WorkspaceFullDocumentDiagnosticReport
- if err := decoder259.Decode(&h259); err == nil {
- t.Value = h259
- return nil
- }
- decoder260 := json.NewDecoder(bytes.NewReader(x))
- decoder260.DisallowUnknownFields()
- var h260 WorkspaceUnchangedDocumentDiagnosticReport
- if err := decoder260.Decode(&h260); err == nil {
- t.Value = h260
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_WorkspaceEdit_documentChanges_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CreateFile:
- return json.Marshal(x)
- case DeleteFile:
- return json.Marshal(x)
- case RenameFile:
- return json.Marshal(x)
- case TextDocumentEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CreateFile DeleteFile RenameFile TextDocumentEdit]", t)
-}
-
-func (t *Or_WorkspaceEdit_documentChanges_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder4 := json.NewDecoder(bytes.NewReader(x))
- decoder4.DisallowUnknownFields()
- var h4 CreateFile
- if err := decoder4.Decode(&h4); err == nil {
- t.Value = h4
- return nil
- }
- decoder5 := json.NewDecoder(bytes.NewReader(x))
- decoder5.DisallowUnknownFields()
- var h5 DeleteFile
- if err := decoder5.Decode(&h5); err == nil {
- t.Value = h5
- return nil
- }
- decoder6 := json.NewDecoder(bytes.NewReader(x))
- decoder6.DisallowUnknownFields()
- var h6 RenameFile
- if err := decoder6.Decode(&h6); err == nil {
- t.Value = h6
- return nil
- }
- decoder7 := json.NewDecoder(bytes.NewReader(x))
- decoder7.DisallowUnknownFields()
- var h7 TextDocumentEdit
- if err := decoder7.Decode(&h7); err == nil {
- t.Value = h7
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CreateFile DeleteFile RenameFile TextDocumentEdit]"}
-}
-
-func (t Or_WorkspaceFoldersServerCapabilities_changeNotifications) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case bool:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [bool string]", t)
-}
-
-func (t *Or_WorkspaceFoldersServerCapabilities_changeNotifications) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder210 := json.NewDecoder(bytes.NewReader(x))
- decoder210.DisallowUnknownFields()
- var boolVal bool
- if err := decoder210.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder211 := json.NewDecoder(bytes.NewReader(x))
- decoder211.DisallowUnknownFields()
- var stringVal string
- if err := decoder211.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [bool string]"}
-}
-
-func (t Or_WorkspaceOptions_textDocumentContent) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentContentOptions:
- return json.Marshal(x)
- case TextDocumentContentRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]", t)
-}
-
-func (t *Or_WorkspaceOptions_textDocumentContent) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder199 := json.NewDecoder(bytes.NewReader(x))
- decoder199.DisallowUnknownFields()
- var h199 TextDocumentContentOptions
- if err := decoder199.Decode(&h199); err == nil {
- t.Value = h199
- return nil
- }
- decoder200 := json.NewDecoder(bytes.NewReader(x))
- decoder200.DisallowUnknownFields()
- var h200 TextDocumentContentRegistrationOptions
- if err := decoder200.Decode(&h200); err == nil {
- t.Value = h200
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]"}
-}
-
-func (t Or_WorkspaceSymbol_location) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case LocationUriOnly:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location LocationUriOnly]", t)
-}
-
-func (t *Or_WorkspaceSymbol_location) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder39 := json.NewDecoder(bytes.NewReader(x))
- decoder39.DisallowUnknownFields()
- var h39 Location
- if err := decoder39.Decode(&h39); err == nil {
- t.Value = h39
- return nil
- }
- decoder40 := json.NewDecoder(bytes.NewReader(x))
- decoder40.DisallowUnknownFields()
- var h40 LocationUriOnly
- if err := decoder40.Decode(&h40); err == nil {
- t.Value = h40
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location LocationUriOnly]"}
-}
@@ -1,6952 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Code generated for LSP. DO NOT EDIT.
-
-package protocol
-
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.9 (hash c94395b5da53729e6dff931293b051009ccaaaa4).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.9/protocol/metaModel.json
-// LSP metaData.version = 3.17.0.
-
-import "encoding/json"
-
-// created for And
-type And_RegOpt_textDocument_colorPresentation struct {
- WorkDoneProgressOptions
- TextDocumentRegistrationOptions
-}
-
-// A special text edit with an additional change annotation.
-//
-// @since 3.16.0.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#annotatedTextEdit
-type AnnotatedTextEdit struct {
- // The actual identifier of the change annotation
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
- TextEdit
-}
-
-// The parameters passed via an apply workspace edit request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditParams
-type ApplyWorkspaceEditParams struct {
- // An optional label of the workspace edit. This label is
- // presented in the user interface for example on an undo
- // stack to undo the workspace edit.
- Label string `json:"label,omitempty"`
- // The edits to apply.
- Edit WorkspaceEdit `json:"edit"`
- // Additional data about the edit.
- //
- // @since 3.18.0
- // @proposed
- Metadata *WorkspaceEditMetadata `json:"metadata,omitempty"`
-}
-
-// The result returned from the apply workspace edit request.
-//
-// @since 3.17 renamed from ApplyWorkspaceEditResponse
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditResult
-type ApplyWorkspaceEditResult struct {
- // Indicates whether the edit was applied or not.
- Applied bool `json:"applied"`
- // An optional textual description for why the edit was not applied.
- // This may be used by the server for diagnostic slog.or to provide
- // a suitable error for a request that triggered the edit.
- FailureReason string `json:"failureReason,omitempty"`
- // Depending on the client's failure handling strategy `failedChange` might
- // contain the index of the change that failed. This property is only available
- // if the client signals a `failureHandlingStrategy` in its client capabilities.
- FailedChange uint32 `json:"failedChange,omitempty"`
-}
-
-// A base for all symbol information.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#baseSymbolInformation
-type BaseSymbolInformation struct {
- // The name of this symbol.
- Name string `json:"name"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // The name of the symbol containing this symbol. This information is for
- // user interface purposes (e.g. to render a qualifier in the user interface
- // if necessary). It can't be used to re-infer a hierarchy for the document
- // symbols.
- ContainerName string `json:"containerName,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyClientCapabilities
-type CallHierarchyClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Represents an incoming call, e.g. a caller of a method or constructor.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCall
-type CallHierarchyIncomingCall struct {
- // The item that makes the call.
- From CallHierarchyItem `json:"from"`
- // The ranges at which the calls appear. This is relative to the caller
- // denoted by {@link CallHierarchyIncomingCall.from `this.from`}.
- FromRanges []Range `json:"fromRanges"`
-}
-
-// The parameter of a `callHierarchy/incomingCalls` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCallsParams
-type CallHierarchyIncomingCallsParams struct {
- Item CallHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Represents programming constructs like functions or constructors in the context
-// of call hierarchy.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyItem
-type CallHierarchyItem struct {
- // The name of this item.
- Name string `json:"name"`
- // The kind of this item.
- Kind SymbolKind `json:"kind"`
- // Tags for this item.
- Tags []SymbolTag `json:"tags,omitempty"`
- // More detail for this item, e.g. the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The resource identifier of this item.
- URI DocumentURI `json:"uri"`
- // The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function.
- // Must be contained by the {@link CallHierarchyItem.range `range`}.
- SelectionRange Range `json:"selectionRange"`
- // A data entry field that is preserved between a call hierarchy prepare and
- // incoming calls or outgoing calls requests.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Call hierarchy options used during static registration.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOptions
-type CallHierarchyOptions struct {
- WorkDoneProgressOptions
-}
-
-// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCall
-type CallHierarchyOutgoingCall struct {
- // The item that is called.
- To CallHierarchyItem `json:"to"`
- // The range at which this item is called. This is the range relative to the caller, e.g the item
- // passed to {@link CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls `provideCallHierarchyOutgoingCalls`}
- // and not {@link CallHierarchyOutgoingCall.to `this.to`}.
- FromRanges []Range `json:"fromRanges"`
-}
-
-// The parameter of a `callHierarchy/outgoingCalls` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCallsParams
-type CallHierarchyOutgoingCallsParams struct {
- Item CallHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The parameter of a `textDocument/prepareCallHierarchy` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyPrepareParams
-type CallHierarchyPrepareParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Call hierarchy options used during static or dynamic registration.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyRegistrationOptions
-type CallHierarchyRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CallHierarchyOptions
- StaticRegistrationOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#cancelParams
-type CancelParams struct {
- // The request id to cancel.
- ID interface{} `json:"id"`
-}
-
-// Additional information that describes document changes.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotation
-type ChangeAnnotation struct {
- // A human-readable string describing the actual change. The string
- // is rendered prominent in the user interface.
- Label string `json:"label"`
- // A flag which indicates that user confirmation is needed
- // before applying the change.
- NeedsConfirmation bool `json:"needsConfirmation,omitempty"`
- // A human-readable string which is rendered less prominent in
- // the user interface.
- Description string `json:"description,omitempty"`
-}
-
-// An identifier to refer to a change annotation stored with a workspace edit.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationIdentifier
-type (
- ChangeAnnotationIdentifier = string // (alias)
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationsSupportOptions
- ChangeAnnotationsSupportOptions struct {
- // Whether the client groups edits with equal labels into tree nodes,
- // for instance all edits labelled with "Changes in Strings" would
- // be a tree node.
- GroupsOnLabel bool `json:"groupsOnLabel,omitempty"`
- }
-)
-
-// Defines the capabilities provided by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCapabilities
-type ClientCapabilities struct {
- // Workspace specific client capabilities.
- Workspace WorkspaceClientCapabilities `json:"workspace,omitempty"`
- // Text document specific client capabilities.
- TextDocument TextDocumentClientCapabilities `json:"textDocument,omitempty"`
- // Capabilities specific to the notebook document support.
- //
- // @since 3.17.0
- NotebookDocument *NotebookDocumentClientCapabilities `json:"notebookDocument,omitempty"`
- // Window specific client capabilities.
- Window WindowClientCapabilities `json:"window,omitempty"`
- // General client capabilities.
- //
- // @since 3.16.0
- General *GeneralClientCapabilities `json:"general,omitempty"`
- // Experimental client capabilities.
- Experimental interface{} `json:"experimental,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionKindOptions
-type ClientCodeActionKindOptions struct {
- // The code action kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- ValueSet []CodeActionKind `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionLiteralOptions
-type ClientCodeActionLiteralOptions struct {
- // The code action kind is support with the following value
- // set.
- CodeActionKind ClientCodeActionKindOptions `json:"codeActionKind"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionResolveOptions
-type ClientCodeActionResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeLensResolveOptions
-type ClientCodeLensResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemInsertTextModeOptions
-type ClientCompletionItemInsertTextModeOptions struct {
- ValueSet []InsertTextMode `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptions
-type ClientCompletionItemOptions struct {
- // Client supports snippets as insert text.
- //
- // A snippet can define tab stops and placeholders with `$1`, `$2`
- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to
- // the end of the snippet. Placeholders with equal identifiers are linked,
- // that is typing in one will update others too.
- SnippetSupport bool `json:"snippetSupport,omitempty"`
- // Client supports commit characters on a completion item.
- CommitCharactersSupport bool `json:"commitCharactersSupport,omitempty"`
- // Client supports the following content formats for the documentation
- // property. The order describes the preferred format of the client.
- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"`
- // Client supports the deprecated property on a completion item.
- DeprecatedSupport bool `json:"deprecatedSupport,omitempty"`
- // Client supports the preselect property on a completion item.
- PreselectSupport bool `json:"preselectSupport,omitempty"`
- // Client supports the tag property on a completion item. Clients supporting
- // tags have to handle unknown tags gracefully. Clients especially need to
- // preserve unknown tags when sending a completion item back to the server in
- // a resolve call.
- //
- // @since 3.15.0
- TagSupport *CompletionItemTagOptions `json:"tagSupport,omitempty"`
- // Client support insert replace edit to control different behavior if a
- // completion item is inserted in the text or should replace text.
- //
- // @since 3.16.0
- InsertReplaceSupport bool `json:"insertReplaceSupport,omitempty"`
- // Indicates which properties a client can resolve lazily on a completion
- // item. Before version 3.16.0 only the predefined properties `documentation`
- // and `details` could be resolved lazily.
- //
- // @since 3.16.0
- ResolveSupport *ClientCompletionItemResolveOptions `json:"resolveSupport,omitempty"`
- // The client supports the `insertTextMode` property on
- // a completion item to override the whitespace handling mode
- // as defined by the client (see `insertTextMode`).
- //
- // @since 3.16.0
- InsertTextModeSupport *ClientCompletionItemInsertTextModeOptions `json:"insertTextModeSupport,omitempty"`
- // The client has support for completion item label
- // details (see also `CompletionItemLabelDetails`).
- //
- // @since 3.17.0
- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptionsKind
-type ClientCompletionItemOptionsKind struct {
- // The completion item kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- //
- // If this property is not present the client only supports
- // the completion items kinds from `Text` to `Reference` as defined in
- // the initial version of the protocol.
- ValueSet []CompletionItemKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemResolveOptions
-type ClientCompletionItemResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientDiagnosticsTagOptions
-type ClientDiagnosticsTagOptions struct {
- // The tags supported by the client.
- ValueSet []DiagnosticTag `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeKindOptions
-type ClientFoldingRangeKindOptions struct {
- // The folding range kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- ValueSet []FoldingRangeKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeOptions
-type ClientFoldingRangeOptions struct {
- // If set, the client signals that it supports setting collapsedText on
- // folding ranges to display custom labels instead of the default text.
- //
- // @since 3.17.0
- CollapsedText bool `json:"collapsedText,omitempty"`
-}
-
-// Information about the client
-//
-// @since 3.15.0
-// @since 3.18.0 ClientInfo type name added.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInfo
-type ClientInfo struct {
- // The name of the client as defined by the client.
- Name string `json:"name"`
- // The client's version as defined by the client.
- Version string `json:"version,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInlayHintResolveOptions
-type ClientInlayHintResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestFullDelta
-type ClientSemanticTokensRequestFullDelta struct {
- // The client will send the `textDocument/semanticTokens/full/delta` request if
- // the server provides a corresponding handler.
- Delta bool `json:"delta,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestOptions
-type ClientSemanticTokensRequestOptions struct {
- // The client will send the `textDocument/semanticTokens/range` request if
- // the server provides a corresponding handler.
- Range *Or_ClientSemanticTokensRequestOptions_range `json:"range,omitempty"`
- // The client will send the `textDocument/semanticTokens/full` request if
- // the server provides a corresponding handler.
- Full *Or_ClientSemanticTokensRequestOptions_full `json:"full,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientShowMessageActionItemOptions
-type ClientShowMessageActionItemOptions struct {
- // Whether the client supports additional attributes which
- // are preserved and send back to the server in the
- // request's response.
- AdditionalPropertiesSupport bool `json:"additionalPropertiesSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureInformationOptions
-type ClientSignatureInformationOptions struct {
- // Client supports the following content formats for the documentation
- // property. The order describes the preferred format of the client.
- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"`
- // Client capabilities specific to parameter information.
- ParameterInformation *ClientSignatureParameterInformationOptions `json:"parameterInformation,omitempty"`
- // The client supports the `activeParameter` property on `SignatureInformation`
- // literal.
- //
- // @since 3.16.0
- ActiveParameterSupport bool `json:"activeParameterSupport,omitempty"`
- // The client supports the `activeParameter` property on
- // `SignatureHelp`/`SignatureInformation` being set to `null` to
- // indicate that no parameter should be active.
- //
- // @since 3.18.0
- // @proposed
- NoActiveParameterSupport bool `json:"noActiveParameterSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureParameterInformationOptions
-type ClientSignatureParameterInformationOptions struct {
- // The client supports processing label offsets instead of a
- // simple label string.
- //
- // @since 3.14.0
- LabelOffsetSupport bool `json:"labelOffsetSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolKindOptions
-type ClientSymbolKindOptions struct {
- // The symbol kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- //
- // If this property is not present the client only supports
- // the symbol kinds from `File` to `Array` as defined in
- // the initial version of the protocol.
- ValueSet []SymbolKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolResolveOptions
-type ClientSymbolResolveOptions struct {
- // The properties that a client can resolve lazily. Usually
- // `location.range`
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolTagOptions
-type ClientSymbolTagOptions struct {
- // The tags supported by the client.
- ValueSet []SymbolTag `json:"valueSet"`
-}
-
-// A code action represents a change that can be performed in code, e.g. to fix a problem or
-// to refactor code.
-//
-// A CodeAction must set either `edit` and/or a `command`. If both are supplied, the `edit` is applied first, then the `command` is executed.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeAction
-type CodeAction struct {
- // A short, human-readable, title for this code action.
- Title string `json:"title"`
- // The kind of the code action.
- //
- // Used to filter code actions.
- Kind CodeActionKind `json:"kind,omitempty"`
- // The diagnostics that this code action resolves.
- Diagnostics []Diagnostic `json:"diagnostics,omitempty"`
- // Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted
- // by keybindings.
- //
- // A quick fix should be marked preferred if it properly addresses the underlying error.
- // A refactoring should be marked preferred if it is the most reasonable choice of actions to take.
- //
- // @since 3.15.0
- IsPreferred bool `json:"isPreferred,omitempty"`
- // Marks that the code action cannot currently be applied.
- //
- // Clients should follow the following guidelines regarding disabled code actions:
- //
- // - Disabled code actions are not shown in automatic [lightbulbs](https://code.visualstudio.com/docs/editor/editingevolved#_code-action)
- // code action menus.
- //
- // - Disabled actions are shown as faded out in the code action menu when the user requests a more specific type
- // of code action, such as refactorings.
- //
- // - If the user has a [keybinding](https://code.visualstudio.com/docs/editor/refactoring#_keybindings-for-code-actions)
- // that auto applies a code action and only disabled code actions are returned, the client should show the user an
- // error message with `reason` in the editor.
- //
- // @since 3.16.0
- Disabled *CodeActionDisabled `json:"disabled,omitempty"`
- // The workspace edit this code action performs.
- Edit *WorkspaceEdit `json:"edit,omitempty"`
- // A command this code action executes. If a code action
- // provides an edit and a command, first the edit is
- // executed and then the command.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a code action between
- // a `textDocument/codeAction` and a `codeAction/resolve` request.
- //
- // @since 3.16.0
- Data *json.RawMessage `json:"data,omitempty"`
-}
-
-// The Client Capabilities of a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionClientCapabilities
-type CodeActionClientCapabilities struct {
- // Whether code action supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client support code action literals of type `CodeAction` as a valid
- // response of the `textDocument/codeAction` request. If the property is not
- // set the request can only return `Command` literals.
- //
- // @since 3.8.0
- CodeActionLiteralSupport ClientCodeActionLiteralOptions `json:"codeActionLiteralSupport,omitempty"`
- // Whether code action supports the `isPreferred` property.
- //
- // @since 3.15.0
- IsPreferredSupport bool `json:"isPreferredSupport,omitempty"`
- // Whether code action supports the `disabled` property.
- //
- // @since 3.16.0
- DisabledSupport bool `json:"disabledSupport,omitempty"`
- // Whether code action supports the `data` property which is
- // preserved between a `textDocument/codeAction` and a
- // `codeAction/resolve` request.
- //
- // @since 3.16.0
- DataSupport bool `json:"dataSupport,omitempty"`
- // Whether the client supports resolving additional code action
- // properties via a separate `codeAction/resolve` request.
- //
- // @since 3.16.0
- ResolveSupport *ClientCodeActionResolveOptions `json:"resolveSupport,omitempty"`
- // Whether the client honors the change annotations in
- // text edits and resource operations returned via the
- // `CodeAction#edit` property by for example presenting
- // the workspace edit in the user interface and asking
- // for confirmation.
- //
- // @since 3.16.0
- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"`
- // Whether the client supports documentation for a class of
- // code actions.
- //
- // @since 3.18.0
- // @proposed
- DocumentationSupport bool `json:"documentationSupport,omitempty"`
-}
-
-// Contains additional diagnostic information about the context in which
-// a {@link CodeActionProvider.provideCodeActions code action} is run.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionContext
-type CodeActionContext struct {
- // An array of diagnostics known on the client side overlapping the range provided to the
- // `textDocument/codeAction` request. They are provided so that the server knows which
- // errors are currently presented to the user for the given range. There is no guarantee
- // that these accurately reflect the error state of the resource. The primary parameter
- // to compute code actions is the provided range.
- Diagnostics []Diagnostic `json:"diagnostics"`
- // Requested kind of actions to return.
- //
- // Actions not of this kind are filtered out by the client before being shown. So servers
- // can omit computing them.
- Only []CodeActionKind `json:"only,omitempty"`
- // The reason why code actions were requested.
- //
- // @since 3.17.0
- TriggerKind *CodeActionTriggerKind `json:"triggerKind,omitempty"`
-}
-
-// Captures why the code action is currently disabled.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionDisabled
-type CodeActionDisabled struct {
- // Human readable description of why the code action is currently disabled.
- //
- // This is displayed in the code actions UI.
- Reason string `json:"reason"`
-}
-
-// A set of predefined code action kinds
-type CodeActionKind string
-
-// Documentation for a class of code actions.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionKindDocumentation
-type CodeActionKindDocumentation struct {
- // The kind of the code action being documented.
- //
- // If the kind is generic, such as `CodeActionKind.Refactor`, the documentation will be shown whenever any
- // refactorings are returned. If the kind if more specific, such as `CodeActionKind.RefactorExtract`, the
- // documentation will only be shown when extract refactoring code actions are returned.
- Kind CodeActionKind `json:"kind"`
- // Command that is ued to display the documentation to the user.
- //
- // The title of this documentation code action is taken from {@linkcode Command.title}
- Command Command `json:"command"`
-}
-
-// Provider options for a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionOptions
-type CodeActionOptions struct {
- // CodeActionKinds that this server may return.
- //
- // The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server
- // may list out every specific kind they provide.
- CodeActionKinds []CodeActionKind `json:"codeActionKinds,omitempty"`
- // Static documentation for a class of code actions.
- //
- // Documentation from the provider should be shown in the code actions menu if either:
- //
- //
- // - Code actions of `kind` are requested by the editor. In this case, the editor will show the documentation that
- // most closely matches the requested code action kind. For example, if a provider has documentation for
- // both `Refactor` and `RefactorExtract`, when the user requests code actions for `RefactorExtract`,
- // the editor will use the documentation for `RefactorExtract` instead of the documentation for `Refactor`.
- //
- //
- // - Any code actions of `kind` are returned by the provider.
- //
- // At most one documentation entry should be shown per provider.
- //
- // @since 3.18.0
- // @proposed
- Documentation []CodeActionKindDocumentation `json:"documentation,omitempty"`
- // The server provides support to resolve additional
- // information for a code action.
- //
- // @since 3.16.0
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionParams
-type CodeActionParams struct {
- // The document in which the command was invoked.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range for which the command was invoked.
- Range Range `json:"range"`
- // Context carrying additional information.
- Context CodeActionContext `json:"context"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionRegistrationOptions
-type CodeActionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CodeActionOptions
-}
-
-// The reason why code actions were requested.
-//
-// @since 3.17.0
-type CodeActionTriggerKind uint32
-
-// Structure to capture a description for an error code.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeDescription
-type CodeDescription struct {
- // An URI to open with more information about the diagnostic error.
- Href URI `json:"href"`
-}
-
-// A code lens represents a {@link Command command} that should be shown along with
-// source text, like the number of references, a way to run tests, etc.
-//
-// A code lens is _unresolved_ when no command is associated to it. For performance
-// reasons the creation of a code lens and resolving should be done in two stages.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLens
-type CodeLens struct {
- // The range in which this code lens is valid. Should only span a single line.
- Range Range `json:"range"`
- // The command this code lens represents.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a code lens item between
- // a {@link CodeLensRequest} and a {@link CodeLensResolveRequest}
- Data interface{} `json:"data,omitempty"`
-}
-
-// The client capabilities of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensClientCapabilities
-type CodeLensClientCapabilities struct {
- // Whether code lens supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports resolving additional code lens
- // properties via a separate `codeLens/resolve` request.
- //
- // @since 3.18.0
- ResolveSupport *ClientCodeLensResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Code Lens provider options of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensOptions
-type CodeLensOptions struct {
- // Code lens has a resolve provider as well.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensParams
-type CodeLensParams struct {
- // The document to request code lens for.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensRegistrationOptions
-type CodeLensRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CodeLensOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensWorkspaceClientCapabilities
-type CodeLensWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // code lenses currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detect a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Represents a color in RGBA space.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#color
-type Color struct {
- // The red component of this color in the range [0-1].
- Red float64 `json:"red"`
- // The green component of this color in the range [0-1].
- Green float64 `json:"green"`
- // The blue component of this color in the range [0-1].
- Blue float64 `json:"blue"`
- // The alpha component of this color in the range [0-1].
- Alpha float64 `json:"alpha"`
-}
-
-// Represents a color range from a document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorInformation
-type ColorInformation struct {
- // The range in the document where this color appears.
- Range Range `json:"range"`
- // The actual color value for this color range.
- Color Color `json:"color"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentation
-type ColorPresentation struct {
- // The label of this color presentation. It will be shown on the color
- // picker header. By default this is also the text that is inserted when selecting
- // this color presentation.
- Label string `json:"label"`
- // An {@link TextEdit edit} which is applied to a document when selecting
- // this presentation for the color. When `falsy` the {@link ColorPresentation.label label}
- // is used.
- TextEdit *TextEdit `json:"textEdit,omitempty"`
- // An optional array of additional {@link TextEdit text edits} that are applied when
- // selecting this color presentation. Edits must not overlap with the main {@link ColorPresentation.textEdit edit} nor with themselves.
- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"`
-}
-
-// Parameters for a {@link ColorPresentationRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentationParams
-type ColorPresentationParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The color to request presentations for.
- Color Color `json:"color"`
- // The range where the color would be inserted. Serves as a context.
- Range Range `json:"range"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Represents a reference to a command. Provides a title which
-// will be used to represent a command in the UI and, optionally,
-// an array of arguments which will be passed to the command handler
-// function when invoked.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#command
-type Command struct {
- // Title of the command, like `save`.
- Title string `json:"title"`
- // An optional tooltip.
- //
- // @since 3.18.0
- // @proposed
- Tooltip string `json:"tooltip,omitempty"`
- // The identifier of the actual command handler.
- Command string `json:"command"`
- // Arguments that the command handler should be
- // invoked with.
- Arguments []json.RawMessage `json:"arguments,omitempty"`
-}
-
-// Completion client capabilities
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionClientCapabilities
-type CompletionClientCapabilities struct {
- // Whether completion supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports the following `CompletionItem` specific
- // capabilities.
- CompletionItem ClientCompletionItemOptions `json:"completionItem,omitempty"`
- CompletionItemKind *ClientCompletionItemOptionsKind `json:"completionItemKind,omitempty"`
- // Defines how the client handles whitespace and indentation
- // when accepting a completion item that uses multi line
- // text in either `insertText` or `textEdit`.
- //
- // @since 3.17.0
- InsertTextMode InsertTextMode `json:"insertTextMode,omitempty"`
- // The client supports to send additional context information for a
- // `textDocument/completion` request.
- ContextSupport bool `json:"contextSupport,omitempty"`
- // The client supports the following `CompletionList` specific
- // capabilities.
- //
- // @since 3.17.0
- CompletionList *CompletionListCapabilities `json:"completionList,omitempty"`
-}
-
-// Contains additional information about the context in which a completion request is triggered.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionContext
-type CompletionContext struct {
- // How the completion was triggered.
- TriggerKind CompletionTriggerKind `json:"triggerKind"`
- // The trigger character (a single character) that has trigger code complete.
- // Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter`
- TriggerCharacter string `json:"triggerCharacter,omitempty"`
-}
-
-// A completion item represents a text snippet that is
-// proposed to complete text that is being typed.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItem
-type CompletionItem struct {
- // The label of this completion item.
- //
- // The label property is also by default the text that
- // is inserted when selecting this completion.
- //
- // If label details are provided the label itself should
- // be an unqualified name of the completion item.
- Label string `json:"label"`
- // Additional details for the label
- //
- // @since 3.17.0
- LabelDetails *CompletionItemLabelDetails `json:"labelDetails,omitempty"`
- // The kind of this completion item. Based of the kind
- // an icon is chosen by the editor.
- Kind CompletionItemKind `json:"kind,omitempty"`
- // Tags for this completion item.
- //
- // @since 3.15.0
- Tags []CompletionItemTag `json:"tags,omitempty"`
- // A human-readable string with additional information
- // about this item, like type or symbol information.
- Detail string `json:"detail,omitempty"`
- // A human-readable string that represents a doc-comment.
- Documentation *Or_CompletionItem_documentation `json:"documentation,omitempty"`
- // Indicates if this item is deprecated.
- // @deprecated Use `tags` instead.
- Deprecated bool `json:"deprecated,omitempty"`
- // Select this item when showing.
- //
- // *Note* that only one completion item can be selected and that the
- // tool / client decides which item that is. The rule is that the *first*
- // item of those that match best is selected.
- Preselect bool `json:"preselect,omitempty"`
- // A string that should be used when comparing this item
- // with other items. When `falsy` the {@link CompletionItem.label label}
- // is used.
- SortText string `json:"sortText,omitempty"`
- // A string that should be used when filtering a set of
- // completion items. When `falsy` the {@link CompletionItem.label label}
- // is used.
- FilterText string `json:"filterText,omitempty"`
- // A string that should be inserted into a document when selecting
- // this completion. When `falsy` the {@link CompletionItem.label label}
- // is used.
- //
- // The `insertText` is subject to interpretation by the client side.
- // Some tools might not take the string literally. For example
- // VS Code when code complete is requested in this example
- // `con<cursor position>` and a completion item with an `insertText` of
- // `console` is provided it will only insert `sole`. Therefore it is
- // recommended to use `textEdit` instead since it avoids additional client
- // side interpretation.
- InsertText string `json:"insertText,omitempty"`
- // The format of the insert text. The format applies to both the
- // `insertText` property and the `newText` property of a provided
- // `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`.
- //
- // Please note that the insertTextFormat doesn't apply to
- // `additionalTextEdits`.
- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"`
- // How whitespace and indentation is handled during completion
- // item insertion. If not provided the clients default value depends on
- // the `textDocument.completion.insertTextMode` client capability.
- //
- // @since 3.16.0
- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"`
- // An {@link TextEdit edit} which is applied to a document when selecting
- // this completion. When an edit is provided the value of
- // {@link CompletionItem.insertText insertText} is ignored.
- //
- // Most editors support two different operations when accepting a completion
- // item. One is to insert a completion text and the other is to replace an
- // existing text with a completion text. Since this can usually not be
- // predetermined by a server it can report both ranges. Clients need to
- // signal support for `InsertReplaceEdits` via the
- // `textDocument.completion.insertReplaceSupport` client capability
- // property.
- //
- // *Note 1:* The text edit's range as well as both ranges from an insert
- // replace edit must be a [single line] and they must contain the position
- // at which completion has been requested.
- // *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range
- // must be a prefix of the edit's replace range, that means it must be
- // contained and starting at the same position.
- //
- // @since 3.16.0 additional type `InsertReplaceEdit`
- TextEdit *Or_CompletionItem_textEdit `json:"textEdit,omitempty"`
- // The edit text used if the completion item is part of a CompletionList and
- // CompletionList defines an item default for the text edit range.
- //
- // Clients will only honor this property if they opt into completion list
- // item defaults using the capability `completionList.itemDefaults`.
- //
- // If not provided and a list's default range is provided the label
- // property is used as a text.
- //
- // @since 3.17.0
- TextEditText string `json:"textEditText,omitempty"`
- // An optional array of additional {@link TextEdit text edits} that are applied when
- // selecting this completion. Edits must not overlap (including the same insert position)
- // with the main {@link CompletionItem.textEdit edit} nor with themselves.
- //
- // Additional text edits should be used to change text unrelated to the current cursor position
- // (for example adding an import statement at the top of the file if the completion item will
- // insert an unqualified type).
- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"`
- // An optional set of characters that when pressed while this completion is active will accept it first and
- // then type that character. *Note* that all commit characters should have `length=1` and that superfluous
- // characters will be ignored.
- CommitCharacters []string `json:"commitCharacters,omitempty"`
- // An optional {@link Command command} that is executed *after* inserting this completion. *Note* that
- // additional modifications to the current document should be described with the
- // {@link CompletionItem.additionalTextEdits additionalTextEdits}-property.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a completion item between a
- // {@link CompletionRequest} and a {@link CompletionResolveRequest}.
- Data interface{} `json:"data,omitempty"`
-}
-
-// In many cases the items of an actual completion result share the same
-// value for properties like `commitCharacters` or the range of a text
-// edit. A completion list can therefore define item defaults which will
-// be used if a completion item itself doesn't specify the value.
-//
-// If a completion list specifies a default value and a completion item
-// also specifies a corresponding value the one from the item is used.
-//
-// Servers are only allowed to return default values if the client
-// signals support for this via the `completionList.itemDefaults`
-// capability.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemDefaults
-type CompletionItemDefaults struct {
- // A default commit character set.
- //
- // @since 3.17.0
- CommitCharacters []string `json:"commitCharacters,omitempty"`
- // A default edit range.
- //
- // @since 3.17.0
- EditRange *Or_CompletionItemDefaults_editRange `json:"editRange,omitempty"`
- // A default insert text format.
- //
- // @since 3.17.0
- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"`
- // A default insert text mode.
- //
- // @since 3.17.0
- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"`
- // A default data value.
- //
- // @since 3.17.0
- Data interface{} `json:"data,omitempty"`
-}
-
-// The kind of a completion entry.
-type CompletionItemKind uint32
-
-// Additional details for a completion item label.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemLabelDetails
-type CompletionItemLabelDetails struct {
- // An optional string which is rendered less prominently directly after {@link CompletionItem.label label},
- // without any spacing. Should be used for function signatures and type annotations.
- Detail string `json:"detail,omitempty"`
- // An optional string which is rendered less prominently after {@link CompletionItem.detail}. Should be used
- // for fully qualified names and file paths.
- Description string `json:"description,omitempty"`
-}
-
-// Completion item tags are extra annotations that tweak the rendering of a completion
-// item.
-//
-// @since 3.15.0
-type CompletionItemTag uint32
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemTagOptions
-type CompletionItemTagOptions struct {
- // The tags supported by the client.
- ValueSet []CompletionItemTag `json:"valueSet"`
-}
-
-// Represents a collection of {@link CompletionItem completion items} to be presented
-// in the editor.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionList
-type CompletionList struct {
- // This list it not complete. Further typing results in recomputing this list.
- //
- // Recomputed lists have all their items replaced (not appended) in the
- // incomplete completion sessions.
- IsIncomplete bool `json:"isIncomplete"`
- // In many cases the items of an actual completion result share the same
- // value for properties like `commitCharacters` or the range of a text
- // edit. A completion list can therefore define item defaults which will
- // be used if a completion item itself doesn't specify the value.
- //
- // If a completion list specifies a default value and a completion item
- // also specifies a corresponding value the one from the item is used.
- //
- // Servers are only allowed to return default values if the client
- // signals support for this via the `completionList.itemDefaults`
- // capability.
- //
- // @since 3.17.0
- ItemDefaults *CompletionItemDefaults `json:"itemDefaults,omitempty"`
- // The completion items.
- Items []CompletionItem `json:"items"`
-}
-
-// The client supports the following `CompletionList` specific
-// capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionListCapabilities
-type CompletionListCapabilities struct {
- // The client supports the following itemDefaults on
- // a completion list.
- //
- // The value lists the supported property names of the
- // `CompletionList.itemDefaults` object. If omitted
- // no properties are supported.
- //
- // @since 3.17.0
- ItemDefaults []string `json:"itemDefaults,omitempty"`
-}
-
-// Completion options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionOptions
-type CompletionOptions struct {
- // Most tools trigger completion request automatically without explicitly requesting
- // it using a keyboard shortcut (e.g. Ctrl+Space). Typically they do so when the user
- // starts to type an identifier. For example if the user types `c` in a JavaScript file
- // code complete will automatically pop up present `console` besides others as a
- // completion item. Characters that make up identifiers don't need to be listed here.
- //
- // If code complete should automatically be trigger on characters not being valid inside
- // an identifier (for example `.` in JavaScript) list them in `triggerCharacters`.
- TriggerCharacters []string `json:"triggerCharacters,omitempty"`
- // The list of all possible characters that commit a completion. This field can be used
- // if clients don't support individual commit characters per completion item. See
- // `ClientCapabilities.textDocument.completion.completionItem.commitCharactersSupport`
- //
- // If a server provides both `allCommitCharacters` and commit characters on an individual
- // completion item the ones on the completion item win.
- //
- // @since 3.2.0
- AllCommitCharacters []string `json:"allCommitCharacters,omitempty"`
- // The server provides support to resolve additional
- // information for a completion item.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- // The server supports the following `CompletionItem` specific
- // capabilities.
- //
- // @since 3.17.0
- CompletionItem *ServerCompletionItemOptions `json:"completionItem,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Completion parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionParams
-type CompletionParams struct {
- // The completion context. This is only available it the client specifies
- // to send this using the client capability `textDocument.completion.contextSupport === true`
- Context CompletionContext `json:"context,omitempty"`
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CompletionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionRegistrationOptions
-type CompletionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CompletionOptions
-}
-
-// How a completion was triggered
-type CompletionTriggerKind uint32
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationItem
-type ConfigurationItem struct {
- // The scope to get the configuration section for.
- ScopeURI *URI `json:"scopeUri,omitempty"`
- // The configuration section asked for.
- Section string `json:"section,omitempty"`
-}
-
-// The parameters of a configuration request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams
-type ConfigurationParams struct {
- Items []ConfigurationItem `json:"items"`
-}
-
-// Create file operation.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFile
-type CreateFile struct {
- // A create
- Kind string `json:"kind"`
- // The resource to create.
- URI DocumentURI `json:"uri"`
- // Additional options
- Options *CreateFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Options to create a file.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFileOptions
-type CreateFileOptions struct {
- // Overwrite existing file. Overwrite wins over `ignoreIfExists`
- Overwrite bool `json:"overwrite,omitempty"`
- // Ignore if exists.
- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated creation of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFilesParams
-type CreateFilesParams struct {
- // An array of all files/folders created in this operation.
- Files []FileCreate `json:"files"`
-}
-
-// The declaration of a symbol representation as one or many {@link Location locations}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declaration
-type (
- Declaration = Or_Declaration // (alias)
- // @since 3.14.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationClientCapabilities
- DeclarationClientCapabilities struct {
- // Whether declaration supports dynamic registration. If this is set to `true`
- // the client supports the new `DeclarationRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of declaration links.
- LinkSupport bool `json:"linkSupport,omitempty"`
- }
-)
-
-// Information about where a symbol is declared.
-//
-// Provides additional metadata over normal {@link Location location} declarations, including the range of
-// the declaring symbol.
-//
-// Servers should prefer returning `DeclarationLink` over `Declaration` if supported
-// by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationLink
-type (
- DeclarationLink = LocationLink // (alias)
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationOptions
- DeclarationOptions struct {
- WorkDoneProgressOptions
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationParams
-type DeclarationParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationRegistrationOptions
-type DeclarationRegistrationOptions struct {
- DeclarationOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// The definition of a symbol represented as one or many {@link Location locations}.
-// For most programming languages there is only one location at which a symbol is
-// defined.
-//
-// Servers should prefer returning `DefinitionLink` over `Definition` if supported
-// by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definition
-type (
- Definition = Or_Definition // (alias)
- // Client Capabilities for a {@link DefinitionRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionClientCapabilities
- DefinitionClientCapabilities struct {
- // Whether definition supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // @since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
- }
-)
-
-// Information about where a symbol is defined.
-//
-// Provides additional metadata over normal {@link Location location} definitions, including the range of
-// the defining symbol
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionLink
-type (
- DefinitionLink = LocationLink // (alias)
- // Server Capabilities for a {@link DefinitionRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionOptions
- DefinitionOptions struct {
- WorkDoneProgressOptions
- }
-)
-
-// Parameters for a {@link DefinitionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionParams
-type DefinitionParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DefinitionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionRegistrationOptions
-type DefinitionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DefinitionOptions
-}
-
-// Delete file operation
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFile
-type DeleteFile struct {
- // A delete
- Kind string `json:"kind"`
- // The file to delete.
- URI DocumentURI `json:"uri"`
- // Delete options.
- Options *DeleteFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Delete file options
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFileOptions
-type DeleteFileOptions struct {
- // Delete the content recursively if a folder is denoted.
- Recursive bool `json:"recursive,omitempty"`
- // Ignore the operation if the file doesn't exist.
- IgnoreIfNotExists bool `json:"ignoreIfNotExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated deletes of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFilesParams
-type DeleteFilesParams struct {
- // An array of all files/folders deleted in this operation.
- Files []FileDelete `json:"files"`
-}
-
-// Represents a diagnostic, such as a compiler error or warning. Diagnostic objects
-// are only valid in the scope of a resource.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnostic
-type Diagnostic struct {
- // The range at which the message applies
- Range Range `json:"range"`
- // The diagnostic's severity. To avoid interpretation mismatches when a
- // server is used with different clients it is highly recommended that servers
- // always provide a severity value.
- Severity DiagnosticSeverity `json:"severity,omitempty"`
- // The diagnostic's code, which usually appear in the user interface.
- Code interface{} `json:"code,omitempty"`
- // An optional property to describe the error code.
- // Requires the code field (above) to be present/not null.
- //
- // @since 3.16.0
- CodeDescription *CodeDescription `json:"codeDescription,omitempty"`
- // A human-readable string describing the source of this
- // diagnostic, e.g. 'typescript' or 'super lint'. It usually
- // appears in the user interface.
- Source string `json:"source,omitempty"`
- // The diagnostic's message. It usually appears in the user interface
- Message string `json:"message"`
- // Additional metadata about the diagnostic.
- //
- // @since 3.15.0
- Tags []DiagnosticTag `json:"tags,omitempty"`
- // An array of related diagnostic information, e.g. when symbol-names within
- // a scope collide all definitions can be marked via this property.
- RelatedInformation []DiagnosticRelatedInformation `json:"relatedInformation,omitempty"`
- // A data entry field that is preserved between a `textDocument/publishDiagnostics`
- // notification and `textDocument/codeAction` request.
- //
- // @since 3.16.0
- Data *json.RawMessage `json:"data,omitempty"`
-}
-
-// Client capabilities specific to diagnostic pull requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticClientCapabilities
-type DiagnosticClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the clients supports related documents for document diagnostic pulls.
- RelatedDocumentSupport bool `json:"relatedDocumentSupport,omitempty"`
- DiagnosticsCapabilities
-}
-
-// Diagnostic options.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticOptions
-type DiagnosticOptions struct {
- // An optional identifier under which the diagnostics are
- // managed by the client.
- Identifier string `json:"identifier,omitempty"`
- // Whether the language has inter file dependencies meaning that
- // editing code in one file can result in a different diagnostic
- // set in another file. Inter file dependencies are common for
- // most programming languages and typically uncommon for linters.
- InterFileDependencies bool `json:"interFileDependencies"`
- // The server provides support for workspace diagnostics as well.
- WorkspaceDiagnostics bool `json:"workspaceDiagnostics"`
- WorkDoneProgressOptions
-}
-
-// Diagnostic registration options.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRegistrationOptions
-type DiagnosticRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DiagnosticOptions
- StaticRegistrationOptions
-}
-
-// Represents a related message and source code location for a diagnostic. This should be
-// used to point to code locations that cause or related to a diagnostics, e.g when duplicating
-// a symbol in a scope.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRelatedInformation
-type DiagnosticRelatedInformation struct {
- // The location of this related diagnostic information.
- Location Location `json:"location"`
- // The message of this related diagnostic information.
- Message string `json:"message"`
-}
-
-// Cancellation data returned from a diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticServerCancellationData
-type DiagnosticServerCancellationData struct {
- RetriggerRequest bool `json:"retriggerRequest"`
-}
-
-// The diagnostic's severity.
-type DiagnosticSeverity uint32
-
-// The diagnostic tags.
-//
-// @since 3.15.0
-type DiagnosticTag uint32
-
-// Workspace client capabilities specific to diagnostic pull requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticWorkspaceClientCapabilities
-type DiagnosticWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // pulled diagnostics currently shown. It should be used with absolute care and
- // is useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// General diagnostics capabilities for pull and push model.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticsCapabilities
-type DiagnosticsCapabilities struct {
- // Whether the clients accepts diagnostics with related information.
- RelatedInformation bool `json:"relatedInformation,omitempty"`
- // Client supports the tag property to provide meta data about a diagnostic.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.15.0
- TagSupport *ClientDiagnosticsTagOptions `json:"tagSupport,omitempty"`
- // Client supports a codeDescription property
- //
- // @since 3.16.0
- CodeDescriptionSupport bool `json:"codeDescriptionSupport,omitempty"`
- // Whether code action supports the `data` property which is
- // preserved between a `textDocument/publishDiagnostics` and
- // `textDocument/codeAction` request.
- //
- // @since 3.16.0
- DataSupport bool `json:"dataSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationClientCapabilities
-type DidChangeConfigurationClientCapabilities struct {
- // Did change configuration notification supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The parameters of a change configuration notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationParams
-type DidChangeConfigurationParams struct {
- // The actual changed settings
- Settings interface{} `json:"settings"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationRegistrationOptions
-type DidChangeConfigurationRegistrationOptions struct {
- Section *Or_DidChangeConfigurationRegistrationOptions_section `json:"section,omitempty"`
-}
-
-// The params sent in a change notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeNotebookDocumentParams
-type DidChangeNotebookDocumentParams struct {
- // The notebook document that did change. The version number points
- // to the version after all provided changes have been applied. If
- // only the text document content of a cell changes the notebook version
- // doesn't necessarily have to change.
- NotebookDocument VersionedNotebookDocumentIdentifier `json:"notebookDocument"`
- // The actual changes to the notebook document.
- //
- // The changes describe single state changes to the notebook document.
- // So if there are two changes c1 (at array index 0) and c2 (at array
- // index 1) for a notebook in state S then c1 moves the notebook from
- // S to S' and c2 from S' to S''. So c1 is computed on the state S and
- // c2 is computed on the state S'.
- //
- // To mirror the content of a notebook using change events use the following approach:
- //
- // - start with the same initial content
- // - apply the 'notebookDocument/didChange' notifications in the order you receive them.
- // - apply the `NotebookChangeEvent`s in a single notification in the order
- // you receive them.
- Change NotebookDocumentChangeEvent `json:"change"`
-}
-
-// The change text document notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeTextDocumentParams
-type DidChangeTextDocumentParams struct {
- // The document that did change. The version number points
- // to the version after all provided content changes have
- // been applied.
- TextDocument VersionedTextDocumentIdentifier `json:"textDocument"`
- // The actual content changes. The content changes describe single state changes
- // to the document. So if there are two content changes c1 (at array index 0) and
- // c2 (at array index 1) for a document in state S then c1 moves the document from
- // S to S' and c2 from S' to S''. So c1 is computed on the state S and c2 is computed
- // on the state S'.
- //
- // To mirror the content of a document using change events use the following approach:
- //
- // - start with the same initial content
- // - apply the 'textDocument/didChange' notifications in the order you receive them.
- // - apply the `TextDocumentContentChangeEvent`s in a single notification in the order
- // you receive them.
- ContentChanges []TextDocumentContentChangeEvent `json:"contentChanges"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesClientCapabilities
-type DidChangeWatchedFilesClientCapabilities struct {
- // Did change watched files notification supports dynamic registration. Please note
- // that the current protocol doesn't support static configuration for file changes
- // from the server side.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client has support for {@link RelativePattern relative pattern}
- // or not.
- //
- // @since 3.17.0
- RelativePatternSupport bool `json:"relativePatternSupport,omitempty"`
-}
-
-// The watched files change notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesParams
-type DidChangeWatchedFilesParams struct {
- // The actual file events.
- Changes []FileEvent `json:"changes"`
-}
-
-// Describe options to be used when registered for text document change events.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesRegistrationOptions
-type DidChangeWatchedFilesRegistrationOptions struct {
- // The watchers to register.
- Watchers []FileSystemWatcher `json:"watchers"`
-}
-
-// The parameters of a `workspace/didChangeWorkspaceFolders` notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWorkspaceFoldersParams
-type DidChangeWorkspaceFoldersParams struct {
- // The actual workspace folder change event.
- Event WorkspaceFoldersChangeEvent `json:"event"`
-}
-
-// The params sent in a close notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseNotebookDocumentParams
-type DidCloseNotebookDocumentParams struct {
- // The notebook document that got closed.
- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
- // The text documents that represent the content
- // of a notebook cell that got closed.
- CellTextDocuments []TextDocumentIdentifier `json:"cellTextDocuments"`
-}
-
-// The parameters sent in a close text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseTextDocumentParams
-type DidCloseTextDocumentParams struct {
- // The document that was closed.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
-}
-
-// The params sent in an open notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenNotebookDocumentParams
-type DidOpenNotebookDocumentParams struct {
- // The notebook document that got opened.
- NotebookDocument NotebookDocument `json:"notebookDocument"`
- // The text documents that represent the content
- // of a notebook cell.
- CellTextDocuments []TextDocumentItem `json:"cellTextDocuments"`
-}
-
-// The parameters sent in an open text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenTextDocumentParams
-type DidOpenTextDocumentParams struct {
- // The document that was opened.
- TextDocument TextDocumentItem `json:"textDocument"`
-}
-
-// The params sent in a save notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveNotebookDocumentParams
-type DidSaveNotebookDocumentParams struct {
- // The notebook document that got saved.
- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
-}
-
-// The parameters sent in a save text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveTextDocumentParams
-type DidSaveTextDocumentParams struct {
- // The document that was saved.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // Optional the content when saved. Depends on the includeText value
- // when the save notification was requested.
- Text *string `json:"text,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorClientCapabilities
-type DocumentColorClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `DocumentColorRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorOptions
-type DocumentColorOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentColorRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorParams
-type DocumentColorParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorRegistrationOptions
-type DocumentColorRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentColorOptions
- StaticRegistrationOptions
-}
-
-// Parameters of the document diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticParams
-type DocumentDiagnosticParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The additional identifier provided during registration.
- Identifier string `json:"identifier,omitempty"`
- // The result id of a previous response if provided.
- PreviousResultID string `json:"previousResultId,omitempty"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The result of a document diagnostic pull request. A report can
-// either be a full report containing all diagnostics for the
-// requested document or an unchanged report indicating that nothing
-// has changed in terms of diagnostics in comparison to the last
-// pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReport
-type (
- DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias)
- // The document diagnostic report kinds.
- //
- // @since 3.17.0
- DocumentDiagnosticReportKind string
-)
-
-// A partial result for a document diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReportPartialResult
-type DocumentDiagnosticReportPartialResult struct {
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments"`
-}
-
-// A document filter describes a top level text document or
-// a notebook cell document.
-//
-// @since 3.17.0 - proposed support for NotebookCellTextDocumentFilter.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFilter
-type (
- DocumentFilter = Or_DocumentFilter // (alias)
- // Client capabilities of a {@link DocumentFormattingRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingClientCapabilities
- DocumentFormattingClientCapabilities struct {
- // Whether formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- }
-)
-
-// Provider options for a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingOptions
-type DocumentFormattingOptions struct {
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingParams
-type DocumentFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The format options.
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingRegistrationOptions
-type DocumentFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentFormattingOptions
-}
-
-// A document highlight is a range inside a text document which deserves
-// special attention. Usually a document highlight is visualized by changing
-// the background color of its range.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlight
-type DocumentHighlight struct {
- // The range this highlight applies to.
- Range Range `json:"range"`
- // The highlight kind, default is {@link DocumentHighlightKind.Text text}.
- Kind DocumentHighlightKind `json:"kind,omitempty"`
-}
-
-// Client Capabilities for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightClientCapabilities
-type DocumentHighlightClientCapabilities struct {
- // Whether document highlight supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// A document highlight kind.
-type DocumentHighlightKind uint32
-
-// Provider options for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightOptions
-type DocumentHighlightOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightParams
-type DocumentHighlightParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightRegistrationOptions
-type DocumentHighlightRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentHighlightOptions
-}
-
-// A document link is a range in a text document that links to an internal or external resource, like another
-// text document or a web site.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLink
-type DocumentLink struct {
- // The range this link applies to.
- Range Range `json:"range"`
- // The uri this link points to. If missing a resolve request is sent later.
- Target *URI `json:"target,omitempty"`
- // The tooltip text when you hover over this link.
- //
- // If a tooltip is provided, is will be displayed in a string that includes instructions on how to
- // trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS,
- // user settings, and localization.
- //
- // @since 3.15.0
- Tooltip string `json:"tooltip,omitempty"`
- // A data entry field that is preserved on a document link between a
- // DocumentLinkRequest and a DocumentLinkResolveRequest.
- Data interface{} `json:"data,omitempty"`
-}
-
-// The client capabilities of a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkClientCapabilities
-type DocumentLinkClientCapabilities struct {
- // Whether document link supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports the `tooltip` property on `DocumentLink`.
- //
- // @since 3.15.0
- TooltipSupport bool `json:"tooltipSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkOptions
-type DocumentLinkOptions struct {
- // Document links have a resolve provider as well.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkParams
-type DocumentLinkParams struct {
- // The document to provide document links for.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkRegistrationOptions
-type DocumentLinkRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentLinkOptions
-}
-
-// Client capabilities of a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingClientCapabilities
-type DocumentOnTypeFormattingClientCapabilities struct {
- // Whether on type formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Provider options for a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingOptions
-type DocumentOnTypeFormattingOptions struct {
- // A character on which formatting should be triggered, like `{`.
- FirstTriggerCharacter string `json:"firstTriggerCharacter"`
- // More trigger characters.
- MoreTriggerCharacter []string `json:"moreTriggerCharacter,omitempty"`
-}
-
-// The parameters of a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingParams
-type DocumentOnTypeFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position around which the on type formatting should happen.
- // This is not necessarily the exact position where the character denoted
- // by the property `ch` got typed.
- Position Position `json:"position"`
- // The character that has been typed that triggered the formatting
- // on type request. That is not necessarily the last character that
- // got inserted into the document since the client could auto insert
- // characters as well (e.g. like automatic brace completion).
- Ch string `json:"ch"`
- // The formatting options.
- Options FormattingOptions `json:"options"`
-}
-
-// Registration options for a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingRegistrationOptions
-type DocumentOnTypeFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentOnTypeFormattingOptions
-}
-
-// Client capabilities of a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingClientCapabilities
-type DocumentRangeFormattingClientCapabilities struct {
- // Whether range formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports formatting multiple ranges at once.
- //
- // @since 3.18.0
- // @proposed
- RangesSupport bool `json:"rangesSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingOptions
-type DocumentRangeFormattingOptions struct {
- // Whether the server supports formatting multiple ranges at once.
- //
- // @since 3.18.0
- // @proposed
- RangesSupport bool `json:"rangesSupport,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingParams
-type DocumentRangeFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range to format
- Range Range `json:"range"`
- // The format options
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingRegistrationOptions
-type DocumentRangeFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentRangeFormattingOptions
-}
-
-// The parameters of a {@link DocumentRangesFormattingRequest}.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangesFormattingParams
-type DocumentRangesFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The ranges to format
- Ranges []Range `json:"ranges"`
- // The format options
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// A document selector is the combination of one or many document filters.
-//
-// @sample `let sel:DocumentSelector = [{ language: 'typescript' }, { language: 'json', pattern: '**∕tsconfig.json' }]`;
-//
-// The use of a string as a document filter is deprecated @since 3.16.0.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSelector
-type (
- DocumentSelector = []DocumentFilter // (alias)
- // Represents programming constructs like variables, classes, interfaces etc.
- // that appear in a document. Document symbols can be hierarchical and they
- // have two ranges: one that encloses its definition and one that points to
- // its most interesting range, e.g. the range of an identifier.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbol
- DocumentSymbol struct {
- // The name of this symbol. Will be displayed in the user interface and therefore must not be
- // an empty string or a string only consisting of white spaces.
- Name string `json:"name"`
- // More detail for this symbol, e.g the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this document symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // Indicates if this symbol is deprecated.
- //
- // @deprecated Use tags instead
- Deprecated bool `json:"deprecated,omitempty"`
- // The range enclosing this symbol not including leading/trailing whitespace but everything else
- // like comments. This information is typically used to determine if the clients cursor is
- // inside the symbol to reveal in the symbol in the UI.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being picked, e.g the name of a function.
- // Must be contained by the `range`.
- SelectionRange Range `json:"selectionRange"`
- // Children of this symbol, e.g. properties of a class.
- Children []DocumentSymbol `json:"children,omitempty"`
- }
-)
-
-// Client Capabilities for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolClientCapabilities
-type DocumentSymbolClientCapabilities struct {
- // Whether document symbol supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Specific capabilities for the `SymbolKind` in the
- // `textDocument/documentSymbol` request.
- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"`
- // The client supports hierarchical document symbols.
- HierarchicalDocumentSymbolSupport bool `json:"hierarchicalDocumentSymbolSupport,omitempty"`
- // The client supports tags on `SymbolInformation`. Tags are supported on
- // `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.16.0
- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"`
- // The client supports an additional label presented in the UI when
- // registering a document symbol provider.
- //
- // @since 3.16.0
- LabelSupport bool `json:"labelSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolOptions
-type DocumentSymbolOptions struct {
- // A human-readable string that is shown when multiple outlines trees
- // are shown for the same document.
- //
- // @since 3.16.0
- Label string `json:"label,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolParams
-type DocumentSymbolParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolRegistrationOptions
-type DocumentSymbolRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentSymbolOptions
-}
-
-// Edit range variant that includes ranges for insert and replace operations.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#editRangeWithInsertReplace
-type EditRangeWithInsertReplace struct {
- Insert Range `json:"insert"`
- Replace Range `json:"replace"`
-}
-
-// Predefined error codes.
-type ErrorCodes int32
-
-// The client capabilities of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandClientCapabilities
-type ExecuteCommandClientCapabilities struct {
- // Execute command supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The server capabilities of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandOptions
-type ExecuteCommandOptions struct {
- // The commands to be executed on the server
- Commands []string `json:"commands"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandParams
-type ExecuteCommandParams struct {
- // The identifier of the actual command handler.
- Command string `json:"command"`
- // Arguments that the command should be invoked with.
- Arguments []json.RawMessage `json:"arguments,omitempty"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandRegistrationOptions
-type ExecuteCommandRegistrationOptions struct {
- ExecuteCommandOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executionSummary
-type ExecutionSummary struct {
- // A strict monotonically increasing value
- // indicating the execution order of a cell
- // inside a notebook.
- ExecutionOrder uint32 `json:"executionOrder"`
- // Whether the execution was successful or
- // not if known by the client.
- Success bool `json:"success,omitempty"`
-}
-type FailureHandlingKind string
-
-// The file event type
-type FileChangeType uint32
-
-// Represents information on a file/folder create.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileCreate
-type FileCreate struct {
- // A file:// URI for the location of the file/folder being created.
- URI string `json:"uri"`
-}
-
-// Represents information on a file/folder delete.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileDelete
-type FileDelete struct {
- // A file:// URI for the location of the file/folder being deleted.
- URI string `json:"uri"`
-}
-
-// An event describing a file change.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileEvent
-type FileEvent struct {
- // The file's uri.
- URI DocumentURI `json:"uri"`
- // The change type.
- Type FileChangeType `json:"type"`
-}
-
-// Capabilities relating to events from file operations by the user in the client.
-//
-// These events do not come from the file system, they come from user operations
-// like renaming a file in the UI.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationClientCapabilities
-type FileOperationClientCapabilities struct {
- // Whether the client supports dynamic registration for file requests/notifications.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client has support for sending didCreateFiles notifications.
- DidCreate bool `json:"didCreate,omitempty"`
- // The client has support for sending willCreateFiles requests.
- WillCreate bool `json:"willCreate,omitempty"`
- // The client has support for sending didRenameFiles notifications.
- DidRename bool `json:"didRename,omitempty"`
- // The client has support for sending willRenameFiles requests.
- WillRename bool `json:"willRename,omitempty"`
- // The client has support for sending didDeleteFiles notifications.
- DidDelete bool `json:"didDelete,omitempty"`
- // The client has support for sending willDeleteFiles requests.
- WillDelete bool `json:"willDelete,omitempty"`
-}
-
-// A filter to describe in which file operation requests or notifications
-// the server is interested in receiving.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationFilter
-type FileOperationFilter struct {
- // A Uri scheme like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // The actual file operation pattern.
- Pattern FileOperationPattern `json:"pattern"`
-}
-
-// Options for notifications/requests for user operations on files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationOptions
-type FileOperationOptions struct {
- // The server is interested in receiving didCreateFiles notifications.
- DidCreate *FileOperationRegistrationOptions `json:"didCreate,omitempty"`
- // The server is interested in receiving willCreateFiles requests.
- WillCreate *FileOperationRegistrationOptions `json:"willCreate,omitempty"`
- // The server is interested in receiving didRenameFiles notifications.
- DidRename *FileOperationRegistrationOptions `json:"didRename,omitempty"`
- // The server is interested in receiving willRenameFiles requests.
- WillRename *FileOperationRegistrationOptions `json:"willRename,omitempty"`
- // The server is interested in receiving didDeleteFiles file notifications.
- DidDelete *FileOperationRegistrationOptions `json:"didDelete,omitempty"`
- // The server is interested in receiving willDeleteFiles file requests.
- WillDelete *FileOperationRegistrationOptions `json:"willDelete,omitempty"`
-}
-
-// A pattern to describe in which file operation requests or notifications
-// the server is interested in receiving.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPattern
-type FileOperationPattern struct {
- // The glob pattern to match. Glob patterns can have the following syntax:
- //
- // - `*` to match one or more characters in a path segment
- // - `?` to match on one character in a path segment
- // - `**` to match any number of path segments, including none
- // - `{}` to group sub patterns into an OR expression. (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
- // - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
- // - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
- Glob string `json:"glob"`
- // Whether to match files or folders with this pattern.
- //
- // Matches both if undefined.
- Matches *FileOperationPatternKind `json:"matches,omitempty"`
- // Additional options used during matching.
- Options *FileOperationPatternOptions `json:"options,omitempty"`
-}
-
-// A pattern kind describing if a glob pattern matches a file a folder or
-// both.
-//
-// @since 3.16.0
-type FileOperationPatternKind string
-
-// Matching options for the file operation pattern.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPatternOptions
-type FileOperationPatternOptions struct {
- // The pattern should be matched ignoring casing.
- IgnoreCase bool `json:"ignoreCase,omitempty"`
-}
-
-// The options to register for file operations.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationRegistrationOptions
-type FileOperationRegistrationOptions struct {
- // The actual filters.
- Filters []FileOperationFilter `json:"filters"`
-}
-
-// Represents information on a file/folder rename.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileRename
-type FileRename struct {
- // A file:// URI for the original location of the file/folder being renamed.
- OldURI string `json:"oldUri"`
- // A file:// URI for the new location of the file/folder being renamed.
- NewURI string `json:"newUri"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileSystemWatcher
-type FileSystemWatcher struct {
- // The glob pattern to watch. See {@link GlobPattern glob pattern} for more detail.
- //
- // @since 3.17.0 support for relative patterns.
- GlobPattern GlobPattern `json:"globPattern"`
- // The kind of events of interest. If omitted it defaults
- // to WatchKind.Create | WatchKind.Change | WatchKind.Delete
- // which is 7.
- Kind *WatchKind `json:"kind,omitempty"`
-}
-
-// Represents a folding range. To be valid, start and end line must be bigger than zero and smaller
-// than the number of lines in the document. Clients are free to ignore invalid ranges.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRange
-type FoldingRange struct {
- // The zero-based start line of the range to fold. The folded area starts after the line's last character.
- // To be valid, the end must be zero or larger and smaller than the number of lines in the document.
- StartLine uint32 `json:"startLine"`
- // The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line.
- StartCharacter uint32 `json:"startCharacter,omitempty"`
- // The zero-based end line of the range to fold. The folded area ends with the line's last character.
- // To be valid, the end must be zero or larger and smaller than the number of lines in the document.
- EndLine uint32 `json:"endLine"`
- // The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line.
- EndCharacter uint32 `json:"endCharacter,omitempty"`
- // Describes the kind of the folding range such as 'comment' or 'region'. The kind
- // is used to categorize folding ranges and used by commands like 'Fold all comments'.
- // See {@link FoldingRangeKind} for an enumeration of standardized kinds.
- Kind string `json:"kind,omitempty"`
- // The text that the client should show when the specified range is
- // collapsed. If not defined or not supported by the client, a default
- // will be chosen by the client.
- //
- // @since 3.17.0
- CollapsedText string `json:"collapsedText,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeClientCapabilities
-type FoldingRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration for folding range
- // providers. If this is set to `true` the client supports the new
- // `FoldingRangeRegistrationOptions` return value for the corresponding
- // server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The maximum number of folding ranges that the client prefers to receive
- // per document. The value serves as a hint, servers are free to follow the
- // limit.
- RangeLimit uint32 `json:"rangeLimit,omitempty"`
- // If set, the client signals that it only supports folding complete lines.
- // If set, client will ignore specified `startCharacter` and `endCharacter`
- // properties in a FoldingRange.
- LineFoldingOnly bool `json:"lineFoldingOnly,omitempty"`
- // Specific options for the folding range kind.
- //
- // @since 3.17.0
- FoldingRangeKind *ClientFoldingRangeKindOptions `json:"foldingRangeKind,omitempty"`
- // Specific options for the folding range.
- //
- // @since 3.17.0
- FoldingRange *ClientFoldingRangeOptions `json:"foldingRange,omitempty"`
-}
-
-// A set of predefined range kinds.
-type FoldingRangeKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeOptions
-type FoldingRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link FoldingRangeRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeParams
-type FoldingRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeRegistrationOptions
-type FoldingRangeRegistrationOptions struct {
- TextDocumentRegistrationOptions
- FoldingRangeOptions
- StaticRegistrationOptions
-}
-
-// Client workspace capabilities specific to folding ranges
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeWorkspaceClientCapabilities
-type FoldingRangeWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // folding ranges currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- //
- // @since 3.18.0
- // @proposed
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Value-object describing what options formatting should use.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#formattingOptions
-type FormattingOptions struct {
- // Size of a tab in spaces.
- TabSize uint32 `json:"tabSize"`
- // Prefer spaces over tabs.
- InsertSpaces bool `json:"insertSpaces"`
- // Trim trailing whitespace on a line.
- //
- // @since 3.15.0
- TrimTrailingWhitespace bool `json:"trimTrailingWhitespace,omitempty"`
- // Insert a newline character at the end of the file if one does not exist.
- //
- // @since 3.15.0
- InsertFinalNewline bool `json:"insertFinalNewline,omitempty"`
- // Trim all newlines after the final newline at the end of the file.
- //
- // @since 3.15.0
- TrimFinalNewlines bool `json:"trimFinalNewlines,omitempty"`
-}
-
-// A diagnostic report with a full set of problems.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fullDocumentDiagnosticReport
-type FullDocumentDiagnosticReport struct {
- // A full document diagnostic report.
- Kind string `json:"kind"`
- // An optional result id. If provided it will
- // be sent on the next diagnostic request for the
- // same document.
- ResultID string `json:"resultId,omitempty"`
- // The actual items.
- Items []Diagnostic `json:"items"`
-}
-
-// General client capabilities.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#generalClientCapabilities
-type GeneralClientCapabilities struct {
- // Client capability that signals how the client
- // handles stale requests (e.g. a request
- // for which the client will not process the response
- // anymore since the information is outdated).
- //
- // @since 3.17.0
- StaleRequestSupport *StaleRequestSupportOptions `json:"staleRequestSupport,omitempty"`
- // Client capabilities specific to regular expressions.
- //
- // @since 3.16.0
- RegularExpressions *RegularExpressionsClientCapabilities `json:"regularExpressions,omitempty"`
- // Client capabilities specific to the client's markdown parser.
- //
- // @since 3.16.0
- Markdown *MarkdownClientCapabilities `json:"markdown,omitempty"`
- // The position encodings supported by the client. Client and server
- // have to agree on the same position encoding to ensure that offsets
- // (e.g. character position in a line) are interpreted the same on both
- // sides.
- //
- // To keep the protocol backwards compatible the following applies: if
- // the value 'utf-16' is missing from the array of position encodings
- // servers can assume that the client supports UTF-16. UTF-16 is
- // therefore a mandatory encoding.
- //
- // If omitted it defaults to ['utf-16'].
- //
- // Implementation considerations: since the conversion from one encoding
- // into another requires the content of the file / line the conversion
- // is best done where the file is read which is usually on the server
- // side.
- //
- // @since 3.17.0
- PositionEncodings []PositionEncodingKind `json:"positionEncodings,omitempty"`
-}
-
-// The glob pattern. Either a string pattern or a relative pattern.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#globPattern
-type (
- GlobPattern = Or_GlobPattern // (alias)
- // The result of a hover request.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hover
- Hover struct {
- // The hover's content
- Contents MarkupContent `json:"contents"`
- // An optional range inside the text document that is used to
- // visualize the hover, e.g. by changing the background color.
- Range Range `json:"range,omitempty"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverClientCapabilities
-type HoverClientCapabilities struct {
- // Whether hover supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Client supports the following content formats for the content
- // property. The order describes the preferred format of the client.
- ContentFormat []MarkupKind `json:"contentFormat,omitempty"`
-}
-
-// Hover options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverOptions
-type HoverOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link HoverRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverParams
-type HoverParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link HoverRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverRegistrationOptions
-type HoverRegistrationOptions struct {
- TextDocumentRegistrationOptions
- HoverOptions
-}
-
-// @since 3.6.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationClientCapabilities
-type ImplementationClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `ImplementationRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // @since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationOptions
-type ImplementationOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationParams
-type ImplementationParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationRegistrationOptions
-type ImplementationRegistrationOptions struct {
- TextDocumentRegistrationOptions
- ImplementationOptions
- StaticRegistrationOptions
-}
-
-// The data type of the ResponseError if the
-// initialize request fails.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeError
-type InitializeError struct {
- // Indicates whether the client execute the following retry logic:
- // (1) show the message provided by the ResponseError to the user
- // (2) user selects retry or cancel
- // (3) if user selected retry the initialize method is sent again.
- Retry bool `json:"retry"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams
-type InitializeParams struct {
- XInitializeParams
- WorkspaceFoldersInitializeParams
-}
-
-// The result returned from an initialize request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeResult
-type InitializeResult struct {
- // The capabilities the language server provides.
- Capabilities ServerCapabilities `json:"capabilities"`
- // Information about the server.
- //
- // @since 3.15.0
- ServerInfo *ServerInfo `json:"serverInfo,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializedParams
-type InitializedParams struct{}
-
-// Inlay hint information.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHint
-type InlayHint struct {
- // The position of this hint.
- //
- // If multiple hints have the same position, they will be shown in the order
- // they appear in the response.
- Position Position `json:"position"`
- // The label of this hint. A human readable string or an array of
- // InlayHintLabelPart label parts.
- //
- // *Note* that neither the string nor the label part can be empty.
- Label []InlayHintLabelPart `json:"label"`
- // The kind of this hint. Can be omitted in which case the client
- // should fall back to a reasonable default.
- Kind InlayHintKind `json:"kind,omitempty"`
- // Optional text edits that are performed when accepting this inlay hint.
- //
- // *Note* that edits are expected to change the document so that the inlay
- // hint (or its nearest variant) is now part of the document and the inlay
- // hint itself is now obsolete.
- TextEdits []TextEdit `json:"textEdits,omitempty"`
- // The tooltip text when you hover over this item.
- Tooltip *Or_InlayHint_tooltip `json:"tooltip,omitempty"`
- // Render padding before the hint.
- //
- // Note: Padding should use the editor's background color, not the
- // background color of the hint itself. That means padding can be used
- // to visually align/separate an inlay hint.
- PaddingLeft bool `json:"paddingLeft,omitempty"`
- // Render padding after the hint.
- //
- // Note: Padding should use the editor's background color, not the
- // background color of the hint itself. That means padding can be used
- // to visually align/separate an inlay hint.
- PaddingRight bool `json:"paddingRight,omitempty"`
- // A data entry field that is preserved on an inlay hint between
- // a `textDocument/inlayHint` and a `inlayHint/resolve` request.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Inlay hint client capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintClientCapabilities
-type InlayHintClientCapabilities struct {
- // Whether inlay hints support dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Indicates which properties a client can resolve lazily on an inlay
- // hint.
- ResolveSupport *ClientInlayHintResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Inlay hint kinds.
-//
-// @since 3.17.0
-type InlayHintKind uint32
-
-// An inlay hint label part allows for interactive and composite labels
-// of inlay hints.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintLabelPart
-type InlayHintLabelPart struct {
- // The value of this label part.
- Value string `json:"value"`
- // The tooltip text when you hover over this label part. Depending on
- // the client capability `inlayHint.resolveSupport` clients might resolve
- // this property late using the resolve request.
- Tooltip *Or_InlayHintLabelPart_tooltip `json:"tooltip,omitempty"`
- // An optional source code location that represents this
- // label part.
- //
- // The editor will use this location for the hover and for code navigation
- // features: This part will become a clickable link that resolves to the
- // definition of the symbol at the given location (not necessarily the
- // location itself), it shows the hover that shows at the given location,
- // and it shows a context menu with further code navigation commands.
- //
- // Depending on the client capability `inlayHint.resolveSupport` clients
- // might resolve this property late using the resolve request.
- Location *Location `json:"location,omitempty"`
- // An optional command for this label part.
- //
- // Depending on the client capability `inlayHint.resolveSupport` clients
- // might resolve this property late using the resolve request.
- Command *Command `json:"command,omitempty"`
-}
-
-// Inlay hint options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintOptions
-type InlayHintOptions struct {
- // The server provides support to resolve additional
- // information for an inlay hint item.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inlay hint requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintParams
-type InlayHintParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The document range for which inlay hints should be computed.
- Range Range `json:"range"`
- WorkDoneProgressParams
-}
-
-// Inlay hint options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintRegistrationOptions
-type InlayHintRegistrationOptions struct {
- InlayHintOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Client workspace capabilities specific to inlay hints.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintWorkspaceClientCapabilities
-type InlayHintWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // inlay hints currently shown. It should be used with absolute care and
- // is useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Client capabilities specific to inline completions.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionClientCapabilities
-type InlineCompletionClientCapabilities struct {
- // Whether implementation supports dynamic registration for inline completion providers.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Provides information about the context in which an inline completion was requested.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionContext
-type InlineCompletionContext struct {
- // Describes how the inline completion was triggered.
- TriggerKind InlineCompletionTriggerKind `json:"triggerKind"`
- // Provides information about the currently selected item in the autocomplete widget if it is visible.
- SelectedCompletionInfo *SelectedCompletionInfo `json:"selectedCompletionInfo,omitempty"`
-}
-
-// An inline completion item represents a text snippet that is proposed inline to complete text that is being typed.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionItem
-type InlineCompletionItem struct {
- // The text to replace the range with. Must be set.
- InsertText Or_InlineCompletionItem_insertText `json:"insertText"`
- // A text that is used to decide if this inline completion should be shown. When `falsy` the {@link InlineCompletionItem.insertText} is used.
- FilterText string `json:"filterText,omitempty"`
- // The range to replace. Must begin and end on the same line.
- Range *Range `json:"range,omitempty"`
- // An optional {@link Command} that is executed *after* inserting this completion.
- Command *Command `json:"command,omitempty"`
-}
-
-// Represents a collection of {@link InlineCompletionItem inline completion items} to be presented in the editor.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionList
-type InlineCompletionList struct {
- // The inline completion items
- Items []InlineCompletionItem `json:"items"`
-}
-
-// Inline completion options used during static registration.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionOptions
-type InlineCompletionOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inline completion requests.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionParams
-type InlineCompletionParams struct {
- // Additional information about the context in which inline completions were
- // requested.
- Context InlineCompletionContext `json:"context"`
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Inline completion options used during static or dynamic registration.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionRegistrationOptions
-type InlineCompletionRegistrationOptions struct {
- InlineCompletionOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.
-//
-// @since 3.18.0
-// @proposed
-type InlineCompletionTriggerKind uint32
-
-// Inline value information can be provided by different means:
-//
-// - directly as a text value (class InlineValueText).
-// - as a name to use for a variable lookup (class InlineValueVariableLookup)
-// - as an evaluatable expression (class InlineValueEvaluatableExpression)
-//
-// The InlineValue types combines all inline value types into one type.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValue
-type (
- InlineValue = Or_InlineValue // (alias)
- // Client capabilities specific to inline values.
- //
- // @since 3.17.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueClientCapabilities
- InlineValueClientCapabilities struct {
- // Whether implementation supports dynamic registration for inline value providers.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- }
-)
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueContext
-type InlineValueContext struct {
- // The stack frame (as a DAP Id) where the execution has stopped.
- FrameID int32 `json:"frameId"`
- // The document range where execution has stopped.
- // Typically the end position of the range denotes the line where the inline values are shown.
- StoppedLocation Range `json:"stoppedLocation"`
-}
-
-// Provide an inline value through an expression evaluation.
-// If only a range is specified, the expression will be extracted from the underlying document.
-// An optional expression can be used to override the extracted expression.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueEvaluatableExpression
-type InlineValueEvaluatableExpression struct {
- // The document range for which the inline value applies.
- // The range is used to extract the evaluatable expression from the underlying document.
- Range Range `json:"range"`
- // If specified the expression overrides the extracted expression.
- Expression string `json:"expression,omitempty"`
-}
-
-// Inline value options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueOptions
-type InlineValueOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inline value requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueParams
-type InlineValueParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The document range for which inline values should be computed.
- Range Range `json:"range"`
- // Additional information about the context in which inline values were
- // requested.
- Context InlineValueContext `json:"context"`
- WorkDoneProgressParams
-}
-
-// Inline value options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueRegistrationOptions
-type InlineValueRegistrationOptions struct {
- InlineValueOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Provide inline value as text.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueText
-type InlineValueText struct {
- // The document range for which the inline value applies.
- Range Range `json:"range"`
- // The text of the inline value.
- Text string `json:"text"`
-}
-
-// Provide inline value through a variable lookup.
-// If only a range is specified, the variable name will be extracted from the underlying document.
-// An optional variable name can be used to override the extracted name.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueVariableLookup
-type InlineValueVariableLookup struct {
- // The document range for which the inline value applies.
- // The range is used to extract the variable name from the underlying document.
- Range Range `json:"range"`
- // If specified the name of the variable to look up.
- VariableName string `json:"variableName,omitempty"`
- // How to perform the lookup.
- CaseSensitiveLookup bool `json:"caseSensitiveLookup"`
-}
-
-// Client workspace capabilities specific to inline values.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueWorkspaceClientCapabilities
-type InlineValueWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // inline values currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// A special text edit to provide an insert and a replace operation.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#insertReplaceEdit
-type InsertReplaceEdit struct {
- // The string to be inserted.
- NewText string `json:"newText"`
- // The range if the insert is requested
- Insert Range `json:"insert"`
- // The range if the replace is requested.
- Replace Range `json:"replace"`
-}
-
-// Defines whether the insert text in a completion item should be interpreted as
-// plain text or a snippet.
-type InsertTextFormat uint32
-
-// How whitespace and indentation is handled during completion
-// item insertion.
-//
-// @since 3.16.0
-type (
- InsertTextMode uint32
- LSPAny = interface{}
-)
-
-// LSP arrays.
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPArray
-type (
- LSPArray = []interface{} // (alias)
- LSPErrorCodes int32
-)
-
-// LSP object definition.
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPObject
-type (
- LSPObject = map[string]LSPAny // (alias)
- // Predefined Language kinds
- // @since 3.18.0
- // @proposed
- LanguageKind string
-)
-
-// Client capabilities for the linked editing range request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeClientCapabilities
-type LinkedEditingRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeOptions
-type LinkedEditingRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeParams
-type LinkedEditingRangeParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeRegistrationOptions
-type LinkedEditingRangeRegistrationOptions struct {
- TextDocumentRegistrationOptions
- LinkedEditingRangeOptions
- StaticRegistrationOptions
-}
-
-// The result of a linked editing range request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRanges
-type LinkedEditingRanges struct {
- // A list of ranges that can be edited together. The ranges must have
- // identical length and contain identical text content. The ranges cannot overlap.
- Ranges []Range `json:"ranges"`
- // An optional word pattern (regular expression) that describes valid contents for
- // the given ranges. If no pattern is provided, the client configuration's word
- // pattern will be used.
- WordPattern string `json:"wordPattern,omitempty"`
-}
-
-// created for Literal (Lit_ClientSemanticTokensRequestOptions_range_Item1)
-type Lit_ClientSemanticTokensRequestOptions_range_Item1 struct{}
-
-// created for Literal (Lit_SemanticTokensOptions_range_Item1)
-type Lit_SemanticTokensOptions_range_Item1 struct{}
-
-// Represents a location inside a resource, such as a line
-// inside a text file.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#location
-type Location struct {
- URI DocumentURI `json:"uri"`
- Range Range `json:"range"`
-}
-
-// Represents the connection of two locations. Provides additional metadata over normal {@link Location locations},
-// including an origin range.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationLink
-type LocationLink struct {
- // Span of the origin of this link.
- //
- // Used as the underlined span for mouse interaction. Defaults to the word range at
- // the definition position.
- OriginSelectionRange *Range `json:"originSelectionRange,omitempty"`
- // The target resource identifier of this link.
- TargetURI DocumentURI `json:"targetUri"`
- // The full target range of this link. If the target for example is a symbol then target range is the
- // range enclosing this symbol not including leading/trailing whitespace but everything else
- // like comments. This information is typically used to highlight the range in the editor.
- TargetRange Range `json:"targetRange"`
- // The range that should be selected and revealed when this link is being followed, e.g the name of a function.
- // Must be contained by the `targetRange`. See also `DocumentSymbol#range`
- TargetSelectionRange Range `json:"targetSelectionRange"`
-}
-
-// Location with only uri and does not include range.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationUriOnly
-type LocationUriOnly struct {
- URI DocumentURI `json:"uri"`
-}
-
-// The log message parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logMessageParams
-type LogMessageParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logTraceParams
-type LogTraceParams struct {
- Message string `json:"message"`
- Verbose string `json:"verbose,omitempty"`
-}
-
-// Client capabilities specific to the used markdown parser.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markdownClientCapabilities
-type MarkdownClientCapabilities struct {
- // The name of the parser.
- Parser string `json:"parser"`
- // The version of the parser.
- Version string `json:"version,omitempty"`
- // A list of HTML tags that the client allows / supports in
- // Markdown.
- //
- // @since 3.17.0
- AllowedTags []string `json:"allowedTags,omitempty"`
-}
-
-// MarkedString can be used to render human readable text. It is either a markdown string
-// or a code-block that provides a language and a code snippet. The language identifier
-// is semantically equal to the optional language identifier in fenced code blocks in GitHub
-// issues. See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
-//
-// The pair of a language and a value is an equivalent to markdown:
-// ```${language}
-// ${value}
-// ```
-//
-// Note that markdown strings will be sanitized - that means html will be escaped.
-// @deprecated use MarkupContent instead.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedString
-type (
- MarkedString = Or_MarkedString // (alias)
- // @since 3.18.0
- // @deprecated use MarkupContent instead.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedStringWithLanguage
- MarkedStringWithLanguage struct {
- Language string `json:"language"`
- Value string `json:"value"`
- }
-)
-
-// A `MarkupContent` literal represents a string value which content is interpreted base on its
-// kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds.
-//
-// If the kind is `markdown` then the value can contain fenced code blocks like in GitHub issues.
-// See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
-//
-// Here is an example how such a string can be constructed using JavaScript / TypeScript:
-// ```ts
-//
-// let markdown: MarkdownContent = {
-// kind: MarkupKind.Markdown,
-// value: [
-// '# Header',
-// 'Some text',
-// '```typescript',
-// 'someCode();',
-// '```'
-// ].join('\n')
-// };
-//
-// ```
-//
-// *Please Note* that clients might sanitize the return markdown. A client could decide to
-// remove HTML from the markdown to avoid script execution.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markupContent
-type MarkupContent struct {
- // The type of the Markup
- Kind MarkupKind `json:"kind"`
- // The content itself
- Value string `json:"value"`
-}
-
-// Describes the content type that a client supports in various
-// result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
-//
-// Please note that `MarkupKinds` must not start with a `$`. This kinds
-// are reserved for internal usage.
-type MarkupKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#messageActionItem
-type MessageActionItem struct {
- // A short title like 'Retry', 'Open Log' etc.
- Title string `json:"title"`
-}
-
-// The message type
-type MessageType uint32
-
-// Moniker definition to match LSIF 0.5 moniker definition.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#moniker
-type Moniker struct {
- // The scheme of the moniker. For example tsc or .Net
- Scheme string `json:"scheme"`
- // The identifier of the moniker. The value is opaque in LSIF however
- // schema owners are allowed to define the structure if they want.
- Identifier string `json:"identifier"`
- // The scope in which the moniker is unique
- Unique UniquenessLevel `json:"unique"`
- // The moniker kind if known.
- Kind *MonikerKind `json:"kind,omitempty"`
-}
-
-// Client capabilities specific to the moniker request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerClientCapabilities
-type MonikerClientCapabilities struct {
- // Whether moniker supports dynamic registration. If this is set to `true`
- // the client supports the new `MonikerRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The moniker kind.
-//
-// @since 3.16.0
-type MonikerKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerOptions
-type MonikerOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerParams
-type MonikerParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerRegistrationOptions
-type MonikerRegistrationOptions struct {
- TextDocumentRegistrationOptions
- MonikerOptions
-}
-
-// A notebook cell.
-//
-// A cell's document URI must be unique across ALL notebook
-// cells and can therefore be used to uniquely identify a
-// notebook cell or the cell's text document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCell
-type NotebookCell struct {
- // The cell's kind
- Kind NotebookCellKind `json:"kind"`
- // The URI of the cell's text document
- // content.
- Document DocumentURI `json:"document"`
- // Additional metadata stored with the cell.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // Additional execution summary information
- // if supported by the client.
- ExecutionSummary *ExecutionSummary `json:"executionSummary,omitempty"`
-}
-
-// A change describing how to move a `NotebookCell`
-// array from state S to S'.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellArrayChange
-type NotebookCellArrayChange struct {
- // The start oftest of the cell that changed.
- Start uint32 `json:"start"`
- // The deleted cells
- DeleteCount uint32 `json:"deleteCount"`
- // The new cells, if any
- Cells []NotebookCell `json:"cells,omitempty"`
-}
-
-// A notebook cell kind.
-//
-// @since 3.17.0
-type NotebookCellKind uint32
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellLanguage
-type NotebookCellLanguage struct {
- Language string `json:"language"`
-}
-
-// A notebook cell text document filter denotes a cell text
-// document by different properties.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellTextDocumentFilter
-type NotebookCellTextDocumentFilter struct {
- // A filter that matches against the notebook
- // containing the notebook cell. If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook Or_NotebookCellTextDocumentFilter_notebook `json:"notebook"`
- // A language id like `python`.
- //
- // Will be matched against the language id of the
- // notebook cell document. '*' matches every language.
- Language string `json:"language,omitempty"`
-}
-
-// A notebook document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument
-type NotebookDocument struct {
- // The notebook document's uri.
- URI URI `json:"uri"`
- // The type of the notebook.
- NotebookType string `json:"notebookType"`
- // The version number of this document (it will increase after each
- // change, including undo/redo).
- Version int32 `json:"version"`
- // Additional metadata stored with the notebook
- // document.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // The cells of a notebook.
- Cells []NotebookCell `json:"cells"`
-}
-
-// Structural changes to cells in a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChangeStructure
-type NotebookDocumentCellChangeStructure struct {
- // The change to the cell array.
- Array NotebookCellArrayChange `json:"array"`
- // Additional opened cell text documents.
- DidOpen []TextDocumentItem `json:"didOpen,omitempty"`
- // Additional closed cell text documents.
- DidClose []TextDocumentIdentifier `json:"didClose,omitempty"`
-}
-
-// Cell changes to a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChanges
-type NotebookDocumentCellChanges struct {
- // Changes to the cell structure to add or
- // remove cells.
- Structure *NotebookDocumentCellChangeStructure `json:"structure,omitempty"`
- // Changes to notebook cells properties like its
- // kind, execution summary or metadata.
- Data []NotebookCell `json:"data,omitempty"`
- // Changes to the text content of notebook cells.
- TextContent []NotebookDocumentCellContentChanges `json:"textContent,omitempty"`
-}
-
-// Content changes to a cell in a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellContentChanges
-type NotebookDocumentCellContentChanges struct {
- Document VersionedTextDocumentIdentifier `json:"document"`
- Changes []TextDocumentContentChangeEvent `json:"changes"`
-}
-
-// A change event for a notebook document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentChangeEvent
-type NotebookDocumentChangeEvent struct {
- // The changed meta data if any.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // Changes to cells
- Cells *NotebookDocumentCellChanges `json:"cells,omitempty"`
-}
-
-// Capabilities specific to the notebook document support.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentClientCapabilities
-type NotebookDocumentClientCapabilities struct {
- // Capabilities specific to notebook document synchronization
- //
- // @since 3.17.0
- Synchronization NotebookDocumentSyncClientCapabilities `json:"synchronization"`
-}
-
-// A notebook document filter denotes a notebook document by
-// different properties. The properties will be match
-// against the notebook's URI (same as with documents)
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilter
-type (
- NotebookDocumentFilter = Or_NotebookDocumentFilter // (alias)
- // A notebook document filter where `notebookType` is required field.
- //
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterNotebookType
- NotebookDocumentFilterNotebookType struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern.
- Pattern *GlobPattern `json:"pattern,omitempty"`
- }
-)
-
-// A notebook document filter where `pattern` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterPattern
-type NotebookDocumentFilterPattern struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern.
- Pattern GlobPattern `json:"pattern"`
-}
-
-// A notebook document filter where `scheme` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterScheme
-type NotebookDocumentFilterScheme struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme"`
- // A glob pattern.
- Pattern *GlobPattern `json:"pattern,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithCells
-type NotebookDocumentFilterWithCells struct {
- // The notebook to be synced If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook *Or_NotebookDocumentFilterWithCells_notebook `json:"notebook,omitempty"`
- // The cells of the matching notebook to be synced.
- Cells []NotebookCellLanguage `json:"cells"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithNotebook
-type NotebookDocumentFilterWithNotebook struct {
- // The notebook to be synced If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook Or_NotebookDocumentFilterWithNotebook_notebook `json:"notebook"`
- // The cells of the matching notebook to be synced.
- Cells []NotebookCellLanguage `json:"cells,omitempty"`
-}
-
-// A literal to identify a notebook document in the client.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentIdentifier
-type NotebookDocumentIdentifier struct {
- // The notebook document's uri.
- URI URI `json:"uri"`
-}
-
-// Notebook specific client capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncClientCapabilities
-type NotebookDocumentSyncClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is
- // set to `true` the client supports the new
- // `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports sending execution summary data per cell.
- ExecutionSummarySupport bool `json:"executionSummarySupport,omitempty"`
-}
-
-// Options specific to a notebook plus its cells
-// to be synced to the server.
-//
-// If a selector provides a notebook document
-// filter but no cell selector all cells of a
-// matching notebook document will be synced.
-//
-// If a selector provides no notebook document
-// filter but only a cell selector all notebook
-// document that contain at least one matching
-// cell will be synced.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncOptions
-type NotebookDocumentSyncOptions struct {
- // The notebooks to be synced
- NotebookSelector []Or_NotebookDocumentSyncOptions_notebookSelector_Elem `json:"notebookSelector"`
- // Whether save notification should be forwarded to
- // the server. Will only be honored if mode === `notebook`.
- Save bool `json:"save,omitempty"`
-}
-
-// Registration options specific to a notebook.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncRegistrationOptions
-type NotebookDocumentSyncRegistrationOptions struct {
- NotebookDocumentSyncOptions
- StaticRegistrationOptions
-}
-
-// A text document identifier to optionally denote a specific version of a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#optionalVersionedTextDocumentIdentifier
-type OptionalVersionedTextDocumentIdentifier struct {
- // The version number of this document. If a versioned text document identifier
- // is sent from the server to the client and the file is not open in the editor
- // (the server has not received an open notification before) the server can send
- // `null` to indicate that the version is unknown and the content on disk is the
- // truth (as specified with document content ownership).
- Version int32 `json:"version"`
- TextDocumentIdentifier
-}
-
-// created for Or [int32 string]
-type Or_CancelParams_id struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ClientSemanticTokensRequestFullDelta bool]
-type Or_ClientSemanticTokensRequestOptions_full struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]
-type Or_ClientSemanticTokensRequestOptions_range struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [EditRangeWithInsertReplace Range]
-type Or_CompletionItemDefaults_editRange struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_CompletionItem_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InsertReplaceEdit TextEdit]
-type Or_CompletionItem_textEdit struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location []Location]
-type Or_Declaration struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location []Location]
-type Or_Definition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [int32 string]
-type Or_Diagnostic_code struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]string string]
-type Or_DidChangeConfigurationRegistrationOptions_section struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]
-type Or_DocumentDiagnosticReport struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookCellTextDocumentFilter TextDocumentFilter]
-type Or_DocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Pattern RelativePattern]
-type Or_GlobPattern struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkedString MarkupContent []MarkedString]
-type Or_Hover_contents struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_InlayHintLabelPart_tooltip struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]InlayHintLabelPart string]
-type Or_InlayHint_label struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_InlayHint_tooltip struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [StringValue string]
-type Or_InlineCompletionItem_insertText struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]
-type Or_InlineValue struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [LSPArray LSPObject bool float64 int32 string uint32]
-type Or_LSPAny struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkedStringWithLanguage string]
-type Or_MarkedString struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookCellTextDocumentFilter_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]
-type Or_NotebookDocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookDocumentFilterWithCells_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookDocumentFilterWithNotebook_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]
-type Or_NotebookDocumentSyncOptions_notebookSelector_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_ParameterInformation_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Tuple_ParameterInformation_label_Item1 string]
-type Or_ParameterInformation_label struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]
-type Or_PrepareRenameResult struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [int32 string]
-type Or_ProgressToken struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [URI WorkspaceFolder]
-type Or_RelativePattern_baseUri struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CodeAction Command]
-type Or_Result_textDocument_codeAction_Item0_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CompletionList []CompletionItem]
-type Or_Result_textDocument_completion struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Declaration []DeclarationLink]
-type Or_Result_textDocument_declaration struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_definition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]DocumentSymbol []SymbolInformation]
-type Or_Result_textDocument_documentSymbol struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_implementation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineCompletionList []InlineCompletionItem]
-type Or_Result_textDocument_inlineCompletion struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokens SemanticTokensDelta]
-type Or_Result_textDocument_semanticTokens_full_delta struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_typeDefinition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]SymbolInformation []WorkspaceSymbol]
-type Or_Result_workspace_symbol struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokensFullDelta bool]
-type Or_SemanticTokensOptions_full struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Lit_SemanticTokensOptions_range_Item1 bool]
-type Or_SemanticTokensOptions_range struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CallHierarchyOptions CallHierarchyRegistrationOptions bool]
-type Or_ServerCapabilities_callHierarchyProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CodeActionOptions bool]
-type Or_ServerCapabilities_codeActionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentColorOptions DocumentColorRegistrationOptions bool]
-type Or_ServerCapabilities_colorProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DeclarationOptions DeclarationRegistrationOptions bool]
-type Or_ServerCapabilities_declarationProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DefinitionOptions bool]
-type Or_ServerCapabilities_definitionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DiagnosticOptions DiagnosticRegistrationOptions]
-type Or_ServerCapabilities_diagnosticProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentFormattingOptions bool]
-type Or_ServerCapabilities_documentFormattingProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentHighlightOptions bool]
-type Or_ServerCapabilities_documentHighlightProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentRangeFormattingOptions bool]
-type Or_ServerCapabilities_documentRangeFormattingProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentSymbolOptions bool]
-type Or_ServerCapabilities_documentSymbolProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FoldingRangeOptions FoldingRangeRegistrationOptions bool]
-type Or_ServerCapabilities_foldingRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [HoverOptions bool]
-type Or_ServerCapabilities_hoverProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ImplementationOptions ImplementationRegistrationOptions bool]
-type Or_ServerCapabilities_implementationProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlayHintOptions InlayHintRegistrationOptions bool]
-type Or_ServerCapabilities_inlayHintProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineCompletionOptions bool]
-type Or_ServerCapabilities_inlineCompletionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineValueOptions InlineValueRegistrationOptions bool]
-type Or_ServerCapabilities_inlineValueProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]
-type Or_ServerCapabilities_linkedEditingRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MonikerOptions MonikerRegistrationOptions bool]
-type Or_ServerCapabilities_monikerProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]
-type Or_ServerCapabilities_notebookDocumentSync struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ReferenceOptions bool]
-type Or_ServerCapabilities_referencesProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [RenameOptions bool]
-type Or_ServerCapabilities_renameProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool]
-type Or_ServerCapabilities_selectionRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions]
-type Or_ServerCapabilities_semanticTokensProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentSyncKind TextDocumentSyncOptions]
-type Or_ServerCapabilities_textDocumentSync struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]
-type Or_ServerCapabilities_typeDefinitionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]
-type Or_ServerCapabilities_typeHierarchyProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [WorkspaceSymbolOptions bool]
-type Or_ServerCapabilities_workspaceSymbolProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_SignatureInformation_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]
-type Or_TextDocumentContentChangeEvent struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [AnnotatedTextEdit SnippetTextEdit TextEdit]
-type Or_TextDocumentEdit_edits_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]
-type Or_TextDocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SaveOptions bool]
-type Or_TextDocumentSyncOptions_save struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]
-type Or_WorkspaceDocumentDiagnosticReport struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit]
-type Or_WorkspaceEdit_documentChanges_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [bool string]
-type Or_WorkspaceFoldersServerCapabilities_changeNotifications struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentContentOptions TextDocumentContentRegistrationOptions]
-type Or_WorkspaceOptions_textDocumentContent struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location LocationUriOnly]
-type Or_WorkspaceSymbol_location struct {
- Value interface{} `json:"value"`
-}
-
-// The parameters of a configuration request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams
-type ParamConfiguration struct {
- Items []ConfigurationItem `json:"items"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams
-type ParamInitialize struct {
- XInitializeParams
- WorkspaceFoldersInitializeParams
-}
-
-// Represents a parameter of a callable-signature. A parameter can
-// have a label and a doc-comment.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#parameterInformation
-type ParameterInformation struct {
- // The label of this parameter information.
- //
- // Either a string or an inclusive start and exclusive end offsets within its containing
- // signature label. (see SignatureInformation.label). The offsets are based on a UTF-16
- // string representation as `Position` and `Range` does.
- //
- // To avoid ambiguities a server should use the [start, end] offset value instead of using
- // a substring. Whether a client support this is controlled via `labelOffsetSupport` client
- // capability.
- //
- // *Note*: a label of type string should be a substring of its containing signature label.
- // Its intended use case is to highlight the parameter label part in the `SignatureInformation.label`.
- Label Or_ParameterInformation_label `json:"label"`
- // The human-readable doc-comment of this parameter. Will be shown
- // in the UI but can be omitted.
- Documentation *Or_ParameterInformation_documentation `json:"documentation,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#partialResultParams
-type PartialResultParams struct {
- // An optional token that a server can use to report partial results (e.g. streaming) to
- // the client.
- PartialResultToken *ProgressToken `json:"partialResultToken,omitempty"`
-}
-
-// The glob pattern to watch relative to the base path. Glob patterns can have the following syntax:
-//
-// - `*` to match one or more characters in a path segment
-// - `?` to match on one character in a path segment
-// - `**` to match any number of path segments, including none
-// - `{}` to group conditions (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#pattern
-type (
- Pattern = string // (alias)
- // Position in a text document expressed as zero-based line and character
- // offset. Prior to 3.17 the offsets were always based on a UTF-16 string
- // representation. So a string of the form `a𐐀b` the character offset of the
- // character `a` is 0, the character offset of `𐐀` is 1 and the character
- // offset of b is 3 since `𐐀` is represented using two code units in UTF-16.
- // Since 3.17 clients and servers can agree on a different string encoding
- // representation (e.g. UTF-8). The client announces it's supported encoding
- // via the client capability [`general.positionEncodings`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#clientCapabilities).
- // The value is an array of position encodings the client supports, with
- // decreasing preference (e.g. the encoding at index `0` is the most preferred
- // one). To stay backwards compatible the only mandatory encoding is UTF-16
- // represented via the string `utf-16`. The server can pick one of the
- // encodings offered by the client and signals that encoding back to the
- // client via the initialize result's property
- // [`capabilities.positionEncoding`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#serverCapabilities). If the string value
- // `utf-16` is missing from the client's capability `general.positionEncodings`
- // servers can safely assume that the client supports UTF-16. If the server
- // omits the position encoding in its initialize result the encoding defaults
- // to the string value `utf-16`. Implementation considerations: since the
- // conversion from one encoding into another requires the content of the
- // file / line the conversion is best done where the file is read which is
- // usually on the server side.
- //
- // Positions are line end character agnostic. So you can not specify a position
- // that denotes `\r|\n` or `\n|` where `|` represents the character offset.
- //
- // @since 3.17.0 - support for negotiated position encoding.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#position
- Position struct {
- // Line position in a document (zero-based).
- //
- // If a line number is greater than the number of lines in a document, it defaults back to the number of lines in the document.
- // If a line number is negative, it defaults to 0.
- Line uint32 `json:"line"`
- // Character offset on a line in a document (zero-based).
- //
- // The meaning of this offset is determined by the negotiated
- // `PositionEncodingKind`.
- //
- // If the character value is greater than the line length it defaults back to the
- // line length.
- Character uint32 `json:"character"`
- }
-)
-
-// A set of predefined position encoding kinds.
-//
-// @since 3.17.0
-type PositionEncodingKind string
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameDefaultBehavior
-type PrepareRenameDefaultBehavior struct {
- DefaultBehavior bool `json:"defaultBehavior"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameParams
-type PrepareRenameParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenamePlaceholder
-type PrepareRenamePlaceholder struct {
- Range Range `json:"range"`
- Placeholder string `json:"placeholder"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameResult
-type (
- PrepareRenameResult = Or_PrepareRenameResult // (alias)
- PrepareSupportDefaultBehavior uint32
-)
-
-// A previous result id in a workspace pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId
-type PreviousResultID struct {
- // The URI for which the client knowns a
- // result id.
- URI DocumentURI `json:"uri"`
- // The value of the previous result id.
- Value string `json:"value"`
-}
-
-// A previous result id in a workspace pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId
-type PreviousResultId struct {
- // The URI for which the client knowns a
- // result id.
- URI DocumentURI `json:"uri"`
- // The value of the previous result id.
- Value string `json:"value"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressParams
-type ProgressParams struct {
- // The progress token provided by the client or server.
- Token ProgressToken `json:"token"`
- // The progress data.
- Value interface{} `json:"value"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressToken
-type (
- ProgressToken = Or_ProgressToken // (alias)
- // The publish diagnostic client capabilities.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsClientCapabilities
- PublishDiagnosticsClientCapabilities struct {
- // Whether the client interprets the version property of the
- // `textDocument/publishDiagnostics` notification's parameter.
- //
- // @since 3.15.0
- VersionSupport bool `json:"versionSupport,omitempty"`
- DiagnosticsCapabilities
- }
-)
-
-// The publish diagnostic notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsParams
-type PublishDiagnosticsParams struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // Optional the version number of the document the diagnostics are published for.
- //
- // @since 3.15.0
- Version int32 `json:"version,omitempty"`
- // An array of diagnostic information items.
- Diagnostics []Diagnostic `json:"diagnostics"`
-}
-
-// A range in a text document expressed as (zero-based) start and end positions.
-//
-// If you want to specify a range that contains a line including the line ending
-// character(s) then use an end position denoting the start of the next line.
-// For example:
-// ```ts
-//
-// {
-// start: { line: 5, character: 23 }
-// end : { line 6, character : 0 }
-// }
-//
-// ```
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#range
-type Range struct {
- // The range's start position.
- Start Position `json:"start"`
- // The range's end position.
- End Position `json:"end"`
-}
-
-// Client Capabilities for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceClientCapabilities
-type ReferenceClientCapabilities struct {
- // Whether references supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Value-object that contains additional information when
-// requesting references.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceContext
-type ReferenceContext struct {
- // Include the declaration of the current symbol.
- IncludeDeclaration bool `json:"includeDeclaration"`
-}
-
-// Reference options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceOptions
-type ReferenceOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceParams
-type ReferenceParams struct {
- Context ReferenceContext `json:"context"`
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceRegistrationOptions
-type ReferenceRegistrationOptions struct {
- TextDocumentRegistrationOptions
- ReferenceOptions
-}
-
-// General parameters to register for a notification or to register a provider.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registration
-type Registration struct {
- // The id used to register the request. The id can be used to deregister
- // the request again.
- ID string `json:"id"`
- // The method / capability to register for.
- Method string `json:"method"`
- // Options necessary for the registration.
- RegisterOptions interface{} `json:"registerOptions,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registrationParams
-type RegistrationParams struct {
- Registrations []Registration `json:"registrations"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionEngineKind
-type (
- RegularExpressionEngineKind = string // (alias)
- // Client capabilities specific to regular expressions.
- //
- // @since 3.16.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionsClientCapabilities
- RegularExpressionsClientCapabilities struct {
- // The engine's name.
- Engine RegularExpressionEngineKind `json:"engine"`
- // The engine's version.
- Version string `json:"version,omitempty"`
- }
-)
-
-// A full diagnostic report with a set of related documents.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedFullDocumentDiagnosticReport
-type RelatedFullDocumentDiagnosticReport struct {
- // Diagnostics of related documents. This information is useful
- // in programming languages where code in a file A can generate
- // diagnostics in a file B which A depends on. An example of
- // such a language is C/C++ where marco definitions in a file
- // a.cpp and result in errors in a header file b.hpp.
- //
- // @since 3.17.0
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"`
- FullDocumentDiagnosticReport
-}
-
-// An unchanged diagnostic report with a set of related documents.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedUnchangedDocumentDiagnosticReport
-type RelatedUnchangedDocumentDiagnosticReport struct {
- // Diagnostics of related documents. This information is useful
- // in programming languages where code in a file A can generate
- // diagnostics in a file B which A depends on. An example of
- // such a language is C/C++ where marco definitions in a file
- // a.cpp and result in errors in a header file b.hpp.
- //
- // @since 3.17.0
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"`
- UnchangedDocumentDiagnosticReport
-}
-
-// A relative pattern is a helper to construct glob patterns that are matched
-// relatively to a base URI. The common value for a `baseUri` is a workspace
-// folder root, but it can be another absolute URI as well.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relativePattern
-type RelativePattern struct {
- // A workspace folder or a base URI to which this pattern will be matched
- // against relatively.
- BaseURI Or_RelativePattern_baseUri `json:"baseUri"`
- // The actual glob pattern;
- Pattern Pattern `json:"pattern"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameClientCapabilities
-type RenameClientCapabilities struct {
- // Whether rename supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Client supports testing for validity of rename operations
- // before execution.
- //
- // @since 3.12.0
- PrepareSupport bool `json:"prepareSupport,omitempty"`
- // Client supports the default behavior result.
- //
- // The value indicates the default behavior used by the
- // client.
- //
- // @since 3.16.0
- PrepareSupportDefaultBehavior *PrepareSupportDefaultBehavior `json:"prepareSupportDefaultBehavior,omitempty"`
- // Whether the client honors the change annotations in
- // text edits and resource operations returned via the
- // rename request's workspace edit by for example presenting
- // the workspace edit in the user interface and asking
- // for confirmation.
- //
- // @since 3.16.0
- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"`
-}
-
-// Rename file operation
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFile
-type RenameFile struct {
- // A rename
- Kind string `json:"kind"`
- // The old (existing) location.
- OldURI DocumentURI `json:"oldUri"`
- // The new location.
- NewURI DocumentURI `json:"newUri"`
- // Rename options.
- Options *RenameFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Rename file options
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFileOptions
-type RenameFileOptions struct {
- // Overwrite target if existing. Overwrite wins over `ignoreIfExists`
- Overwrite bool `json:"overwrite,omitempty"`
- // Ignores if target exists.
- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated renames of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFilesParams
-type RenameFilesParams struct {
- // An array of all files/folders renamed in this operation. When a folder is renamed, only
- // the folder will be included, and not its children.
- Files []FileRename `json:"files"`
-}
-
-// Provider options for a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameOptions
-type RenameOptions struct {
- // Renames should be checked and tested before being executed.
- //
- // @since version 3.12.0
- PrepareProvider bool `json:"prepareProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameParams
-type RenameParams struct {
- // The document to rename.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position at which this request was sent.
- Position Position `json:"position"`
- // The new name of the symbol. If the given name is not valid the
- // request must return a {@link ResponseError} with an
- // appropriate message set.
- NewName string `json:"newName"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameRegistrationOptions
-type RenameRegistrationOptions struct {
- TextDocumentRegistrationOptions
- RenameOptions
-}
-
-// A generic resource operation.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#resourceOperation
-type ResourceOperation struct {
- // The resource operation kind.
- Kind string `json:"kind"`
- // An optional annotation identifier describing the operation.
- //
- // @since 3.16.0
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
-}
-type ResourceOperationKind string
-
-// Save options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#saveOptions
-type SaveOptions struct {
- // The client is supposed to include the content on save.
- IncludeText bool `json:"includeText,omitempty"`
-}
-
-// Describes the currently selected completion item.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectedCompletionInfo
-type SelectedCompletionInfo struct {
- // The range that will be replaced if this completion item is accepted.
- Range Range `json:"range"`
- // The text the range will be replaced with if this completion is accepted.
- Text string `json:"text"`
-}
-
-// A selection range represents a part of a selection hierarchy. A selection range
-// may have a parent selection range that contains it.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRange
-type SelectionRange struct {
- // The {@link Range range} of this selection range.
- Range Range `json:"range"`
- // The parent selection range containing this range. Therefore `parent.range` must contain `this.range`.
- Parent *SelectionRange `json:"parent,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeClientCapabilities
-type SelectionRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration for selection range providers. If this is set to `true`
- // the client supports the new `SelectionRangeRegistrationOptions` return value for the corresponding server
- // capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeOptions
-type SelectionRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in selection range requests.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeParams
-type SelectionRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The positions inside the text document.
- Positions []Position `json:"positions"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeRegistrationOptions
-type SelectionRangeRegistrationOptions struct {
- SelectionRangeOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// A set of predefined token modifiers. This set is not fixed
-// an clients can specify additional token types via the
-// corresponding client capabilities.
-//
-// @since 3.16.0
-type SemanticTokenModifiers string
-
-// A set of predefined token types. This set is not fixed
-// an clients can specify additional token types via the
-// corresponding client capabilities.
-//
-// @since 3.16.0
-type SemanticTokenTypes string
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokens
-type SemanticTokens struct {
- // An optional result id. If provided and clients support delta updating
- // the client will include the result id in the next semantic token request.
- // A server can then instead of computing all semantic tokens again simply
- // send a delta.
- ResultID string `json:"resultId,omitempty"`
- // The actual tokens.
- Data []uint32 `json:"data"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensClientCapabilities
-type SemanticTokensClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Which requests the client supports and might send to the server
- // depending on the server's capability. Please note that clients might not
- // show semantic tokens or degrade some of the user experience if a range
- // or full request is advertised by the client but not provided by the
- // server. If for example the client capability `requests.full` and
- // `request.range` are both set to true but the server only provides a
- // range provider the client might not render a minimap correctly or might
- // even decide to not show any semantic tokens at all.
- Requests ClientSemanticTokensRequestOptions `json:"requests"`
- // The token types that the client supports.
- TokenTypes []string `json:"tokenTypes"`
- // The token modifiers that the client supports.
- TokenModifiers []string `json:"tokenModifiers"`
- // The token formats the clients supports.
- Formats []TokenFormat `json:"formats"`
- // Whether the client supports tokens that can overlap each other.
- OverlappingTokenSupport bool `json:"overlappingTokenSupport,omitempty"`
- // Whether the client supports tokens that can span multiple lines.
- MultilineTokenSupport bool `json:"multilineTokenSupport,omitempty"`
- // Whether the client allows the server to actively cancel a
- // semantic token request, e.g. supports returning
- // LSPErrorCodes.ServerCancelled. If a server does the client
- // needs to retrigger the request.
- //
- // @since 3.17.0
- ServerCancelSupport bool `json:"serverCancelSupport,omitempty"`
- // Whether the client uses semantic tokens to augment existing
- // syntax tokens. If set to `true` client side created syntax
- // tokens and semantic tokens are both used for colorization. If
- // set to `false` the client only uses the returned semantic tokens
- // for colorization.
- //
- // If the value is `undefined` then the client behavior is not
- // specified.
- //
- // @since 3.17.0
- AugmentsSyntaxTokens bool `json:"augmentsSyntaxTokens,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDelta
-type SemanticTokensDelta struct {
- ResultID string `json:"resultId,omitempty"`
- // The semantic token edits to transform a previous result into a new result.
- Edits []SemanticTokensEdit `json:"edits"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaParams
-type SemanticTokensDeltaParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The result id of a previous response. The result Id can either point to a full response
- // or a delta response depending on what was received last.
- PreviousResultID string `json:"previousResultId"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaPartialResult
-type SemanticTokensDeltaPartialResult struct {
- Edits []SemanticTokensEdit `json:"edits"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensEdit
-type SemanticTokensEdit struct {
- // The start offset of the edit.
- Start uint32 `json:"start"`
- // The count of elements to remove.
- DeleteCount uint32 `json:"deleteCount"`
- // The elements to insert.
- Data []uint32 `json:"data,omitempty"`
-}
-
-// Semantic tokens options to support deltas for full documents
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensFullDelta
-type SemanticTokensFullDelta struct {
- // The server supports deltas for full documents.
- Delta bool `json:"delta,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensLegend
-type SemanticTokensLegend struct {
- // The token types a server uses.
- TokenTypes []string `json:"tokenTypes"`
- // The token modifiers a server uses.
- TokenModifiers []string `json:"tokenModifiers"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensOptions
-type SemanticTokensOptions struct {
- // The legend used by the server
- Legend SemanticTokensLegend `json:"legend"`
- // Server supports providing semantic tokens for a specific range
- // of a document.
- Range *Or_SemanticTokensOptions_range `json:"range,omitempty"`
- // Server supports providing semantic tokens for a full document.
- Full *Or_SemanticTokensOptions_full `json:"full,omitempty"`
- WorkDoneProgressOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensParams
-type SemanticTokensParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensPartialResult
-type SemanticTokensPartialResult struct {
- Data []uint32 `json:"data"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRangeParams
-type SemanticTokensRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range the semantic tokens are requested for.
- Range Range `json:"range"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRegistrationOptions
-type SemanticTokensRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SemanticTokensOptions
- StaticRegistrationOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensWorkspaceClientCapabilities
-type SemanticTokensWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // semantic tokens currently shown. It should be used with absolute care
- // and is useful for situation where a server for example detects a project
- // wide change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Defines the capabilities provided by a language
-// server.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCapabilities
-type ServerCapabilities struct {
- // The position encoding the server picked from the encodings offered
- // by the client via the client capability `general.positionEncodings`.
- //
- // If the client didn't provide any position encodings the only valid
- // value that a server can return is 'utf-16'.
- //
- // If omitted it defaults to 'utf-16'.
- //
- // @since 3.17.0
- PositionEncoding *PositionEncodingKind `json:"positionEncoding,omitempty"`
- // Defines how text documents are synced. Is either a detailed structure
- // defining each notification or for backwards compatibility the
- // TextDocumentSyncKind number.
- TextDocumentSync interface{} `json:"textDocumentSync,omitempty"`
- // Defines how notebook documents are synced.
- //
- // @since 3.17.0
- NotebookDocumentSync *Or_ServerCapabilities_notebookDocumentSync `json:"notebookDocumentSync,omitempty"`
- // The server provides completion support.
- CompletionProvider *CompletionOptions `json:"completionProvider,omitempty"`
- // The server provides hover support.
- HoverProvider *Or_ServerCapabilities_hoverProvider `json:"hoverProvider,omitempty"`
- // The server provides signature help support.
- SignatureHelpProvider *SignatureHelpOptions `json:"signatureHelpProvider,omitempty"`
- // The server provides Goto Declaration support.
- DeclarationProvider *Or_ServerCapabilities_declarationProvider `json:"declarationProvider,omitempty"`
- // The server provides goto definition support.
- DefinitionProvider *Or_ServerCapabilities_definitionProvider `json:"definitionProvider,omitempty"`
- // The server provides Goto Type Definition support.
- TypeDefinitionProvider *Or_ServerCapabilities_typeDefinitionProvider `json:"typeDefinitionProvider,omitempty"`
- // The server provides Goto Implementation support.
- ImplementationProvider *Or_ServerCapabilities_implementationProvider `json:"implementationProvider,omitempty"`
- // The server provides find references support.
- ReferencesProvider *Or_ServerCapabilities_referencesProvider `json:"referencesProvider,omitempty"`
- // The server provides document highlight support.
- DocumentHighlightProvider *Or_ServerCapabilities_documentHighlightProvider `json:"documentHighlightProvider,omitempty"`
- // The server provides document symbol support.
- DocumentSymbolProvider *Or_ServerCapabilities_documentSymbolProvider `json:"documentSymbolProvider,omitempty"`
- // The server provides code actions. CodeActionOptions may only be
- // specified if the client states that it supports
- // `codeActionLiteralSupport` in its initial `initialize` request.
- CodeActionProvider interface{} `json:"codeActionProvider,omitempty"`
- // The server provides code lens.
- CodeLensProvider *CodeLensOptions `json:"codeLensProvider,omitempty"`
- // The server provides document link support.
- DocumentLinkProvider *DocumentLinkOptions `json:"documentLinkProvider,omitempty"`
- // The server provides color provider support.
- ColorProvider *Or_ServerCapabilities_colorProvider `json:"colorProvider,omitempty"`
- // The server provides workspace symbol support.
- WorkspaceSymbolProvider *Or_ServerCapabilities_workspaceSymbolProvider `json:"workspaceSymbolProvider,omitempty"`
- // The server provides document formatting.
- DocumentFormattingProvider *Or_ServerCapabilities_documentFormattingProvider `json:"documentFormattingProvider,omitempty"`
- // The server provides document range formatting.
- DocumentRangeFormattingProvider *Or_ServerCapabilities_documentRangeFormattingProvider `json:"documentRangeFormattingProvider,omitempty"`
- // The server provides document formatting on typing.
- DocumentOnTypeFormattingProvider *DocumentOnTypeFormattingOptions `json:"documentOnTypeFormattingProvider,omitempty"`
- // The server provides rename support. RenameOptions may only be
- // specified if the client states that it supports
- // `prepareSupport` in its initial `initialize` request.
- RenameProvider interface{} `json:"renameProvider,omitempty"`
- // The server provides folding provider support.
- FoldingRangeProvider *Or_ServerCapabilities_foldingRangeProvider `json:"foldingRangeProvider,omitempty"`
- // The server provides selection range support.
- SelectionRangeProvider *Or_ServerCapabilities_selectionRangeProvider `json:"selectionRangeProvider,omitempty"`
- // The server provides execute command support.
- ExecuteCommandProvider *ExecuteCommandOptions `json:"executeCommandProvider,omitempty"`
- // The server provides call hierarchy support.
- //
- // @since 3.16.0
- CallHierarchyProvider *Or_ServerCapabilities_callHierarchyProvider `json:"callHierarchyProvider,omitempty"`
- // The server provides linked editing range support.
- //
- // @since 3.16.0
- LinkedEditingRangeProvider *Or_ServerCapabilities_linkedEditingRangeProvider `json:"linkedEditingRangeProvider,omitempty"`
- // The server provides semantic tokens support.
- //
- // @since 3.16.0
- SemanticTokensProvider interface{} `json:"semanticTokensProvider,omitempty"`
- // The server provides moniker support.
- //
- // @since 3.16.0
- MonikerProvider *Or_ServerCapabilities_monikerProvider `json:"monikerProvider,omitempty"`
- // The server provides type hierarchy support.
- //
- // @since 3.17.0
- TypeHierarchyProvider *Or_ServerCapabilities_typeHierarchyProvider `json:"typeHierarchyProvider,omitempty"`
- // The server provides inline values.
- //
- // @since 3.17.0
- InlineValueProvider *Or_ServerCapabilities_inlineValueProvider `json:"inlineValueProvider,omitempty"`
- // The server provides inlay hints.
- //
- // @since 3.17.0
- InlayHintProvider interface{} `json:"inlayHintProvider,omitempty"`
- // The server has support for pull model diagnostics.
- //
- // @since 3.17.0
- DiagnosticProvider *Or_ServerCapabilities_diagnosticProvider `json:"diagnosticProvider,omitempty"`
- // Inline completion options used during static registration.
- //
- // @since 3.18.0
- // @proposed
- InlineCompletionProvider *Or_ServerCapabilities_inlineCompletionProvider `json:"inlineCompletionProvider,omitempty"`
- // Workspace specific server capabilities.
- Workspace *WorkspaceOptions `json:"workspace,omitempty"`
- // Experimental server capabilities.
- Experimental interface{} `json:"experimental,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCompletionItemOptions
-type ServerCompletionItemOptions struct {
- // The server has support for completion item label
- // details (see also `CompletionItemLabelDetails`) when
- // receiving a completion item in a resolve call.
- //
- // @since 3.17.0
- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"`
-}
-
-// Information about the server
-//
-// @since 3.15.0
-// @since 3.18.0 ServerInfo type name added.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverInfo
-type ServerInfo struct {
- // The name of the server as defined by the server.
- Name string `json:"name"`
- // The server's version as defined by the server.
- Version string `json:"version,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#setTraceParams
-type SetTraceParams struct {
- Value TraceValue `json:"value"`
-}
-
-// Client capabilities for the showDocument request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentClientCapabilities
-type ShowDocumentClientCapabilities struct {
- // The client has support for the showDocument
- // request.
- Support bool `json:"support"`
-}
-
-// Params to show a resource in the UI.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentParams
-type ShowDocumentParams struct {
- // The uri to show.
- URI URI `json:"uri"`
- // Indicates to show the resource in an external program.
- // To show, for example, `https://code.visualstudio.com/`
- // in the default WEB browser set `external` to `true`.
- External bool `json:"external,omitempty"`
- // An optional property to indicate whether the editor
- // showing the document should take focus or not.
- // Clients might ignore this property if an external
- // program is started.
- TakeFocus bool `json:"takeFocus,omitempty"`
- // An optional selection range if the document is a text
- // document. Clients might ignore the property if an
- // external program is started or the file is not a text
- // file.
- Selection *Range `json:"selection,omitempty"`
-}
-
-// The result of a showDocument request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentResult
-type ShowDocumentResult struct {
- // A boolean indicating if the show was successful.
- Success bool `json:"success"`
-}
-
-// The parameters of a notification message.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageParams
-type ShowMessageParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
-}
-
-// Show message request client capabilities
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestClientCapabilities
-type ShowMessageRequestClientCapabilities struct {
- // Capabilities specific to the `MessageActionItem` type.
- MessageActionItem *ClientShowMessageActionItemOptions `json:"messageActionItem,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestParams
-type ShowMessageRequestParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
- // The message action items to present.
- Actions []MessageActionItem `json:"actions,omitempty"`
-}
-
-// Signature help represents the signature of something
-// callable. There can be multiple signature but only one
-// active and only one active parameter.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelp
-type SignatureHelp struct {
- // One or more signatures.
- Signatures []SignatureInformation `json:"signatures"`
- // The active signature. If omitted or the value lies outside the
- // range of `signatures` the value defaults to zero or is ignored if
- // the `SignatureHelp` has no signatures.
- //
- // Whenever possible implementors should make an active decision about
- // the active signature and shouldn't rely on a default value.
- //
- // In future version of the protocol this property might become
- // mandatory to better express this.
- ActiveSignature uint32 `json:"activeSignature,omitempty"`
- // The active parameter of the active signature.
- //
- // If `null`, no parameter of the signature is active (for example a named
- // argument that does not match any declared parameters). This is only valid
- // if the client specifies the client capability
- // `textDocument.signatureHelp.noActiveParameterSupport === true`
- //
- // If omitted or the value lies outside the range of
- // `signatures[activeSignature].parameters` defaults to 0 if the active
- // signature has parameters.
- //
- // If the active signature has no parameters it is ignored.
- //
- // In future version of the protocol this property might become
- // mandatory (but still nullable) to better express the active parameter if
- // the active signature does have any.
- ActiveParameter uint32 `json:"activeParameter,omitempty"`
-}
-
-// Client Capabilities for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpClientCapabilities
-type SignatureHelpClientCapabilities struct {
- // Whether signature help supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports the following `SignatureInformation`
- // specific properties.
- SignatureInformation *ClientSignatureInformationOptions `json:"signatureInformation,omitempty"`
- // The client supports to send additional context information for a
- // `textDocument/signatureHelp` request. A client that opts into
- // contextSupport will also support the `retriggerCharacters` on
- // `SignatureHelpOptions`.
- //
- // @since 3.15.0
- ContextSupport bool `json:"contextSupport,omitempty"`
-}
-
-// Additional information about the context in which a signature help request was triggered.
-//
-// @since 3.15.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpContext
-type SignatureHelpContext struct {
- // Action that caused signature help to be triggered.
- TriggerKind SignatureHelpTriggerKind `json:"triggerKind"`
- // Character that caused signature help to be triggered.
- //
- // This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter`
- TriggerCharacter string `json:"triggerCharacter,omitempty"`
- // `true` if signature help was already showing when it was triggered.
- //
- // Retriggers occurs when the signature help is already active and can be caused by actions such as
- // typing a trigger character, a cursor move, or document content changes.
- IsRetrigger bool `json:"isRetrigger"`
- // The currently active `SignatureHelp`.
- //
- // The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on
- // the user navigating through available signatures.
- ActiveSignatureHelp *SignatureHelp `json:"activeSignatureHelp,omitempty"`
-}
-
-// Server Capabilities for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpOptions
-type SignatureHelpOptions struct {
- // List of characters that trigger signature help automatically.
- TriggerCharacters []string `json:"triggerCharacters,omitempty"`
- // List of characters that re-trigger signature help.
- //
- // These trigger characters are only active when signature help is already showing. All trigger characters
- // are also counted as re-trigger characters.
- //
- // @since 3.15.0
- RetriggerCharacters []string `json:"retriggerCharacters,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpParams
-type SignatureHelpParams struct {
- // The signature help context. This is only available if the client specifies
- // to send this using the client capability `textDocument.signatureHelp.contextSupport === true`
- //
- // @since 3.15.0
- Context *SignatureHelpContext `json:"context,omitempty"`
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpRegistrationOptions
-type SignatureHelpRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SignatureHelpOptions
-}
-
-// How a signature help was triggered.
-//
-// @since 3.15.0
-type SignatureHelpTriggerKind uint32
-
-// Represents the signature of something callable. A signature
-// can have a label, like a function-name, a doc-comment, and
-// a set of parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureInformation
-type SignatureInformation struct {
- // The label of this signature. Will be shown in
- // the UI.
- Label string `json:"label"`
- // The human-readable doc-comment of this signature. Will be shown
- // in the UI but can be omitted.
- Documentation *Or_SignatureInformation_documentation `json:"documentation,omitempty"`
- // The parameters of this signature.
- Parameters []ParameterInformation `json:"parameters,omitempty"`
- // The index of the active parameter.
- //
- // If `null`, no parameter of the signature is active (for example a named
- // argument that does not match any declared parameters). This is only valid
- // if the client specifies the client capability
- // `textDocument.signatureHelp.noActiveParameterSupport === true`
- //
- // If provided (or `null`), this is used in place of
- // `SignatureHelp.activeParameter`.
- //
- // @since 3.16.0
- ActiveParameter uint32 `json:"activeParameter,omitempty"`
-}
-
-// An interactive text edit.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#snippetTextEdit
-type SnippetTextEdit struct {
- // The range of the text document to be manipulated.
- Range Range `json:"range"`
- // The snippet to be inserted.
- Snippet StringValue `json:"snippet"`
- // The actual identifier of the snippet edit.
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staleRequestSupportOptions
-type StaleRequestSupportOptions struct {
- // The client will actively cancel the request.
- Cancel bool `json:"cancel"`
- // The list of requests for which the client
- // will retry the request if it receives a
- // response with error code `ContentModified`
- RetryOnContentModified []string `json:"retryOnContentModified"`
-}
-
-// Static registration options to be returned in the initialize
-// request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staticRegistrationOptions
-type StaticRegistrationOptions struct {
- // The id used to register the request. The id can be used to deregister
- // the request again. See also Registration#id.
- ID string `json:"id,omitempty"`
-}
-
-// A string value used as a snippet is a template which allows to insert text
-// and to control the editor cursor when insertion happens.
-//
-// A snippet can define tab stops and placeholders with `$1`, `$2`
-// and `${3:foo}`. `$0` defines the final tab stop, it defaults to
-// the end of the snippet. Variables are defined with `$name` and
-// `${name:default value}`.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#stringValue
-type StringValue struct {
- // The kind of string value.
- Kind string `json:"kind"`
- // The snippet string.
- Value string `json:"value"`
-}
-
-// Represents information about programming constructs like variables, classes,
-// interfaces etc.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#symbolInformation
-type SymbolInformation struct {
- // extends BaseSymbolInformation
- // Indicates if this symbol is deprecated.
- //
- // @deprecated Use tags instead
- Deprecated bool `json:"deprecated,omitempty"`
- // The location of this symbol. The location's range is used by a tool
- // to reveal the location in the editor. If the symbol is selected in the
- // tool the range's start information is used to position the cursor. So
- // the range usually spans more than the actual symbol's name and does
- // normally include things like visibility modifiers.
- //
- // The range doesn't have to denote a node range in the sense of an abstract
- // syntax tree. It can therefore not be used to re-construct a hierarchy of
- // the symbols.
- Location Location `json:"location"`
- // The name of this symbol.
- Name string `json:"name"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // The name of the symbol containing this symbol. This information is for
- // user interface purposes (e.g. to render a qualifier in the user interface
- // if necessary). It can't be used to re-infer a hierarchy for the document
- // symbols.
- ContainerName string `json:"containerName,omitempty"`
-}
-
-// A symbol kind.
-type SymbolKind uint32
-
-// Symbol tags are extra annotations that tweak the rendering of a symbol.
-//
-// @since 3.16
-type SymbolTag uint32
-
-// Describe options to be used when registered for text document change events.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentChangeRegistrationOptions
-type TextDocumentChangeRegistrationOptions struct {
- // How documents are synced to the server.
- SyncKind TextDocumentSyncKind `json:"syncKind"`
- TextDocumentRegistrationOptions
-}
-
-// Text document specific client capabilities.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentClientCapabilities
-type TextDocumentClientCapabilities struct {
- // Defines which synchronization capabilities the client supports.
- Synchronization *TextDocumentSyncClientCapabilities `json:"synchronization,omitempty"`
- // Capabilities specific to the `textDocument/completion` request.
- Completion CompletionClientCapabilities `json:"completion,omitempty"`
- // Capabilities specific to the `textDocument/hover` request.
- Hover *HoverClientCapabilities `json:"hover,omitempty"`
- // Capabilities specific to the `textDocument/signatureHelp` request.
- SignatureHelp *SignatureHelpClientCapabilities `json:"signatureHelp,omitempty"`
- // Capabilities specific to the `textDocument/declaration` request.
- //
- // @since 3.14.0
- Declaration *DeclarationClientCapabilities `json:"declaration,omitempty"`
- // Capabilities specific to the `textDocument/definition` request.
- Definition *DefinitionClientCapabilities `json:"definition,omitempty"`
- // Capabilities specific to the `textDocument/typeDefinition` request.
- //
- // @since 3.6.0
- TypeDefinition *TypeDefinitionClientCapabilities `json:"typeDefinition,omitempty"`
- // Capabilities specific to the `textDocument/implementation` request.
- //
- // @since 3.6.0
- Implementation *ImplementationClientCapabilities `json:"implementation,omitempty"`
- // Capabilities specific to the `textDocument/references` request.
- References *ReferenceClientCapabilities `json:"references,omitempty"`
- // Capabilities specific to the `textDocument/documentHighlight` request.
- DocumentHighlight *DocumentHighlightClientCapabilities `json:"documentHighlight,omitempty"`
- // Capabilities specific to the `textDocument/documentSymbol` request.
- DocumentSymbol DocumentSymbolClientCapabilities `json:"documentSymbol,omitempty"`
- // Capabilities specific to the `textDocument/codeAction` request.
- CodeAction CodeActionClientCapabilities `json:"codeAction,omitempty"`
- // Capabilities specific to the `textDocument/codeLens` request.
- CodeLens *CodeLensClientCapabilities `json:"codeLens,omitempty"`
- // Capabilities specific to the `textDocument/documentLink` request.
- DocumentLink *DocumentLinkClientCapabilities `json:"documentLink,omitempty"`
- // Capabilities specific to the `textDocument/documentColor` and the
- // `textDocument/colorPresentation` request.
- //
- // @since 3.6.0
- ColorProvider *DocumentColorClientCapabilities `json:"colorProvider,omitempty"`
- // Capabilities specific to the `textDocument/formatting` request.
- Formatting *DocumentFormattingClientCapabilities `json:"formatting,omitempty"`
- // Capabilities specific to the `textDocument/rangeFormatting` request.
- RangeFormatting *DocumentRangeFormattingClientCapabilities `json:"rangeFormatting,omitempty"`
- // Capabilities specific to the `textDocument/onTypeFormatting` request.
- OnTypeFormatting *DocumentOnTypeFormattingClientCapabilities `json:"onTypeFormatting,omitempty"`
- // Capabilities specific to the `textDocument/rename` request.
- Rename *RenameClientCapabilities `json:"rename,omitempty"`
- // Capabilities specific to the `textDocument/foldingRange` request.
- //
- // @since 3.10.0
- FoldingRange *FoldingRangeClientCapabilities `json:"foldingRange,omitempty"`
- // Capabilities specific to the `textDocument/selectionRange` request.
- //
- // @since 3.15.0
- SelectionRange *SelectionRangeClientCapabilities `json:"selectionRange,omitempty"`
- // Capabilities specific to the `textDocument/publishDiagnostics` notification.
- PublishDiagnostics PublishDiagnosticsClientCapabilities `json:"publishDiagnostics,omitempty"`
- // Capabilities specific to the various call hierarchy requests.
- //
- // @since 3.16.0
- CallHierarchy *CallHierarchyClientCapabilities `json:"callHierarchy,omitempty"`
- // Capabilities specific to the various semantic token request.
- //
- // @since 3.16.0
- SemanticTokens SemanticTokensClientCapabilities `json:"semanticTokens,omitempty"`
- // Capabilities specific to the `textDocument/linkedEditingRange` request.
- //
- // @since 3.16.0
- LinkedEditingRange *LinkedEditingRangeClientCapabilities `json:"linkedEditingRange,omitempty"`
- // Client capabilities specific to the `textDocument/moniker` request.
- //
- // @since 3.16.0
- Moniker *MonikerClientCapabilities `json:"moniker,omitempty"`
- // Capabilities specific to the various type hierarchy requests.
- //
- // @since 3.17.0
- TypeHierarchy *TypeHierarchyClientCapabilities `json:"typeHierarchy,omitempty"`
- // Capabilities specific to the `textDocument/inlineValue` request.
- //
- // @since 3.17.0
- InlineValue *InlineValueClientCapabilities `json:"inlineValue,omitempty"`
- // Capabilities specific to the `textDocument/inlayHint` request.
- //
- // @since 3.17.0
- InlayHint *InlayHintClientCapabilities `json:"inlayHint,omitempty"`
- // Capabilities specific to the diagnostic pull model.
- //
- // @since 3.17.0
- Diagnostic *DiagnosticClientCapabilities `json:"diagnostic,omitempty"`
- // Client capabilities specific to inline completions.
- //
- // @since 3.18.0
- // @proposed
- InlineCompletion *InlineCompletionClientCapabilities `json:"inlineCompletion,omitempty"`
-}
-
-// An event describing a change to a text document. If only a text is provided
-// it is considered to be the full content of the document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeEvent
-type (
- TextDocumentContentChangeEvent = Or_TextDocumentContentChangeEvent // (alias)
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangePartial
- TextDocumentContentChangePartial struct {
- // The range of the document that changed.
- Range *Range `json:"range,omitempty"`
- // The optional length of the range that got replaced.
- //
- // @deprecated use range instead.
- RangeLength uint32 `json:"rangeLength,omitempty"`
- // The new text for the provided range.
- Text string `json:"text"`
- }
-)
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeWholeDocument
-type TextDocumentContentChangeWholeDocument struct {
- // The new text of the whole document.
- Text string `json:"text"`
-}
-
-// Client capabilities for a text document content provider.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentClientCapabilities
-type TextDocumentContentClientCapabilities struct {
- // Text document content provider supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Text document content provider options.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentOptions
-type TextDocumentContentOptions struct {
- // The scheme for which the server provides content.
- Scheme string `json:"scheme"`
-}
-
-// Parameters for the `workspace/textDocumentContent` request.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentParams
-type TextDocumentContentParams struct {
- // The uri of the text document.
- URI DocumentURI `json:"uri"`
-}
-
-// Parameters for the `workspace/textDocumentContent/refresh` request.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRefreshParams
-type TextDocumentContentRefreshParams struct {
- // The uri of the text document to refresh.
- URI DocumentURI `json:"uri"`
-}
-
-// Text document content provider registration options.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRegistrationOptions
-type TextDocumentContentRegistrationOptions struct {
- TextDocumentContentOptions
- StaticRegistrationOptions
-}
-
-// Describes textual changes on a text document. A TextDocumentEdit describes all changes
-// on a document version Si and after they are applied move the document to version Si+1.
-// So the creator of a TextDocumentEdit doesn't need to sort the array of edits or do any
-// kind of ordering. However the edits must be non overlapping.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentEdit
-type TextDocumentEdit struct {
- // The text document to change.
- TextDocument OptionalVersionedTextDocumentIdentifier `json:"textDocument"`
- // The edits to be applied.
- //
- // @since 3.16.0 - support for AnnotatedTextEdit. This is guarded using a
- // client capability.
- //
- // @since 3.18.0 - support for SnippetTextEdit. This is guarded using a
- // client capability.
- Edits []Or_TextDocumentEdit_edits_Elem `json:"edits"`
-}
-
-// A document filter denotes a document by different properties like
-// the {@link TextDocument.languageId language}, the {@link Uri.scheme scheme} of
-// its resource, or a glob-pattern that is applied to the {@link TextDocument.fileName path}.
-//
-// Glob patterns can have the following syntax:
-//
-// - `*` to match one or more characters in a path segment
-// - `?` to match on one character in a path segment
-// - `**` to match any number of path segments, including none
-// - `{}` to group sub patterns into an OR expression. (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
-//
-// @sample A language filter that applies to typescript files on disk: `{ language: 'typescript', scheme: 'file' }`
-// @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }`
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilter
-type (
- TextDocumentFilter = Or_TextDocumentFilter // (alias)
- // A document filter where `language` is required field.
- //
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterLanguage
- TextDocumentFilterLanguage struct {
- // A language id, like `typescript`.
- Language string `json:"language"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern *GlobPattern `json:"pattern,omitempty"`
- }
-)
-
-// A document filter where `pattern` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterPattern
-type TextDocumentFilterPattern struct {
- // A language id, like `typescript`.
- Language string `json:"language,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern GlobPattern `json:"pattern"`
-}
-
-// A document filter where `scheme` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterScheme
-type TextDocumentFilterScheme struct {
- // A language id, like `typescript`.
- Language string `json:"language,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern *GlobPattern `json:"pattern,omitempty"`
-}
-
-// A literal to identify a text document in the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentIdentifier
-type TextDocumentIdentifier struct {
- // The text document's uri.
- URI DocumentURI `json:"uri"`
-}
-
-// An item to transfer a text document from the client to the
-// server.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentItem
-type TextDocumentItem struct {
- // The text document's uri.
- URI DocumentURI `json:"uri"`
- // The text document's language identifier.
- LanguageID LanguageKind `json:"languageId"`
- // The version number of this document (it will increase after each
- // change, including undo/redo).
- Version int32 `json:"version"`
- // The content of the opened text document.
- Text string `json:"text"`
-}
-
-// A parameter literal used in requests to pass a text document and a position inside that
-// document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentPositionParams
-type TextDocumentPositionParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position inside the text document.
- Position Position `json:"position"`
-}
-
-// General text document registration options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentRegistrationOptions
-type TextDocumentRegistrationOptions struct {
- // A document selector to identify the scope of the registration. If set to null
- // the document selector provided on the client side will be used.
- DocumentSelector DocumentSelector `json:"documentSelector"`
-}
-
-// Represents reasons why a text document is saved.
-type TextDocumentSaveReason uint32
-
-// Save registration options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSaveRegistrationOptions
-type TextDocumentSaveRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SaveOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncClientCapabilities
-type TextDocumentSyncClientCapabilities struct {
- // Whether text document synchronization supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports sending will save notifications.
- WillSave bool `json:"willSave,omitempty"`
- // The client supports sending a will save request and
- // waits for a response providing text edits which will
- // be applied to the document before it is saved.
- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"`
- // The client supports did save notifications.
- DidSave bool `json:"didSave,omitempty"`
-}
-
-// Defines how the host (editor) should sync
-// document changes to the language server.
-type TextDocumentSyncKind uint32
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncOptions
-type TextDocumentSyncOptions struct {
- // Open and close notifications are sent to the server. If omitted open close notification should not
- // be sent.
- OpenClose bool `json:"openClose,omitempty"`
- // Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full
- // and TextDocumentSyncKind.Incremental. If omitted it defaults to TextDocumentSyncKind.None.
- Change TextDocumentSyncKind `json:"change,omitempty"`
- // If present will save notifications are sent to the server. If omitted the notification should not be
- // sent.
- WillSave bool `json:"willSave,omitempty"`
- // If present will save wait until requests are sent to the server. If omitted the request should not be
- // sent.
- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"`
- // If present save notifications are sent to the server. If omitted the notification should not be
- // sent.
- Save *SaveOptions `json:"save,omitempty"`
-}
-
-// A text edit applicable to a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textEdit
-type TextEdit struct {
- // The range of the text document to be manipulated. To insert
- // text into a document create a range where start === end.
- Range Range `json:"range"`
- // The string to be inserted. For delete operations use an
- // empty string.
- NewText string `json:"newText"`
-}
-type (
- TokenFormat string
- TraceValue string
-)
-
-// created for Tuple
-type Tuple_ParameterInformation_label_Item1 struct {
- Fld0 uint32 `json:"fld0"`
- Fld1 uint32 `json:"fld1"`
-}
-
-// Since 3.6.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionClientCapabilities
-type TypeDefinitionClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `TypeDefinitionRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // Since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionOptions
-type TypeDefinitionOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionParams
-type TypeDefinitionParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionRegistrationOptions
-type TypeDefinitionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- TypeDefinitionOptions
- StaticRegistrationOptions
-}
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyClientCapabilities
-type TypeHierarchyClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyItem
-type TypeHierarchyItem struct {
- // The name of this item.
- Name string `json:"name"`
- // The kind of this item.
- Kind SymbolKind `json:"kind"`
- // Tags for this item.
- Tags []SymbolTag `json:"tags,omitempty"`
- // More detail for this item, e.g. the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The resource identifier of this item.
- URI DocumentURI `json:"uri"`
- // The range enclosing this symbol not including leading/trailing whitespace
- // but everything else, e.g. comments and code.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being
- // picked, e.g. the name of a function. Must be contained by the
- // {@link TypeHierarchyItem.range `range`}.
- SelectionRange Range `json:"selectionRange"`
- // A data entry field that is preserved between a type hierarchy prepare and
- // supertypes or subtypes requests. It could also be used to identify the
- // type hierarchy in the server, helping improve the performance on
- // resolving supertypes and subtypes.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Type hierarchy options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyOptions
-type TypeHierarchyOptions struct {
- WorkDoneProgressOptions
-}
-
-// The parameter of a `textDocument/prepareTypeHierarchy` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyPrepareParams
-type TypeHierarchyPrepareParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Type hierarchy options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyRegistrationOptions
-type TypeHierarchyRegistrationOptions struct {
- TextDocumentRegistrationOptions
- TypeHierarchyOptions
- StaticRegistrationOptions
-}
-
-// The parameter of a `typeHierarchy/subtypes` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySubtypesParams
-type TypeHierarchySubtypesParams struct {
- Item TypeHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The parameter of a `typeHierarchy/supertypes` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySupertypesParams
-type TypeHierarchySupertypesParams struct {
- Item TypeHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// A diagnostic report indicating that the last returned
-// report is still accurate.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unchangedDocumentDiagnosticReport
-type UnchangedDocumentDiagnosticReport struct {
- // A document diagnostic report indicating
- // no changes to the last result. A server can
- // only return `unchanged` if result ids are
- // provided.
- Kind string `json:"kind"`
- // A result id which will be sent on the next
- // diagnostic request for the same document.
- ResultID string `json:"resultId"`
-}
-
-// Moniker uniqueness level to define scope of the moniker.
-//
-// @since 3.16.0
-type UniquenessLevel string
-
-// General parameters to unregister a request or notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistration
-type Unregistration struct {
- // The id used to unregister the request or notification. Usually an id
- // provided during the register request.
- ID string `json:"id"`
- // The method to unregister for.
- Method string `json:"method"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistrationParams
-type UnregistrationParams struct {
- Unregisterations []Unregistration `json:"unregisterations"`
-}
-
-// A versioned notebook document identifier.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedNotebookDocumentIdentifier
-type VersionedNotebookDocumentIdentifier struct {
- // The version number of this notebook document.
- Version int32 `json:"version"`
- // The notebook document's uri.
- URI URI `json:"uri"`
-}
-
-// A text document identifier to denote a specific version of a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedTextDocumentIdentifier
-type VersionedTextDocumentIdentifier struct {
- // The version number of this document.
- Version int32 `json:"version"`
- TextDocumentIdentifier
-}
-type (
- WatchKind = uint32 // The parameters sent in a will save text document notification.
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#willSaveTextDocumentParams
- WillSaveTextDocumentParams struct {
- // The document that will be saved.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The 'TextDocumentSaveReason'.
- Reason TextDocumentSaveReason `json:"reason"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#windowClientCapabilities
-type WindowClientCapabilities struct {
- // It indicates whether the client supports server initiated
- // progress using the `window/workDoneProgress/create` request.
- //
- // The capability also controls Whether client supports handling
- // of progress notifications. If set servers are allowed to report a
- // `workDoneProgress` property in the request specific server
- // capabilities.
- //
- // @since 3.15.0
- WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
- // Capabilities specific to the showMessage request.
- //
- // @since 3.16.0
- ShowMessage *ShowMessageRequestClientCapabilities `json:"showMessage,omitempty"`
- // Capabilities specific to the showDocument request.
- //
- // @since 3.16.0
- ShowDocument *ShowDocumentClientCapabilities `json:"showDocument,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressBegin
-type WorkDoneProgressBegin struct {
- Kind string `json:"kind"`
- // Mandatory title of the progress operation. Used to briefly inform about
- // the kind of operation being performed.
- //
- // Examples: "Indexing" or "Linking dependencies".
- Title string `json:"title"`
- // Controls if a cancel button should show to allow the user to cancel the
- // long running operation. Clients that don't support cancellation are allowed
- // to ignore the setting.
- Cancellable bool `json:"cancellable,omitempty"`
- // Optional, more detailed associated progress message. Contains
- // complementary information to the `title`.
- //
- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- // If unset, the previous progress message (if any) is still valid.
- Message string `json:"message,omitempty"`
- // Optional progress percentage to display (value 100 is considered 100%).
- // If not provided infinite progress is assumed and clients are allowed
- // to ignore the `percentage` value in subsequent in report notifications.
- //
- // The value should be steadily rising. Clients are free to ignore values
- // that are not following this rule. The value range is [0, 100].
- Percentage uint32 `json:"percentage,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCancelParams
-type WorkDoneProgressCancelParams struct {
- // The token to be used to report progress.
- Token ProgressToken `json:"token"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCreateParams
-type WorkDoneProgressCreateParams struct {
- // The token to be used to report progress.
- Token ProgressToken `json:"token"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressEnd
-type WorkDoneProgressEnd struct {
- Kind string `json:"kind"`
- // Optional, a final message indicating to for example indicate the outcome
- // of the operation.
- Message string `json:"message,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressOptions
-type WorkDoneProgressOptions struct {
- WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressParams
-type WorkDoneProgressParams struct {
- // An optional token that a server can use to report work done progress.
- WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressReport
-type WorkDoneProgressReport struct {
- Kind string `json:"kind"`
- // Controls enablement state of a cancel button.
- //
- // Clients that don't support cancellation or don't support controlling the button's
- // enablement state are allowed to ignore the property.
- Cancellable bool `json:"cancellable,omitempty"`
- // Optional, more detailed associated progress message. Contains
- // complementary information to the `title`.
- //
- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- // If unset, the previous progress message (if any) is still valid.
- Message string `json:"message,omitempty"`
- // Optional progress percentage to display (value 100 is considered 100%).
- // If not provided infinite progress is assumed and clients are allowed
- // to ignore the `percentage` value in subsequent in report notifications.
- //
- // The value should be steadily rising. Clients are free to ignore values
- // that are not following this rule. The value range is [0, 100]
- Percentage uint32 `json:"percentage,omitempty"`
-}
-
-// Workspace specific client capabilities.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceClientCapabilities
-type WorkspaceClientCapabilities struct {
- // The client supports applying batch edits
- // to the workspace by supporting the request
- // 'workspace/applyEdit'
- ApplyEdit bool `json:"applyEdit,omitempty"`
- // Capabilities specific to `WorkspaceEdit`s.
- WorkspaceEdit *WorkspaceEditClientCapabilities `json:"workspaceEdit,omitempty"`
- // Capabilities specific to the `workspace/didChangeConfiguration` notification.
- DidChangeConfiguration DidChangeConfigurationClientCapabilities `json:"didChangeConfiguration,omitempty"`
- // Capabilities specific to the `workspace/didChangeWatchedFiles` notification.
- DidChangeWatchedFiles DidChangeWatchedFilesClientCapabilities `json:"didChangeWatchedFiles,omitempty"`
- // Capabilities specific to the `workspace/symbol` request.
- Symbol *WorkspaceSymbolClientCapabilities `json:"symbol,omitempty"`
- // Capabilities specific to the `workspace/executeCommand` request.
- ExecuteCommand *ExecuteCommandClientCapabilities `json:"executeCommand,omitempty"`
- // The client has support for workspace folders.
- //
- // @since 3.6.0
- WorkspaceFolders bool `json:"workspaceFolders,omitempty"`
- // The client supports `workspace/configuration` requests.
- //
- // @since 3.6.0
- Configuration bool `json:"configuration,omitempty"`
- // Capabilities specific to the semantic token requests scoped to the
- // workspace.
- //
- // @since 3.16.0.
- SemanticTokens *SemanticTokensWorkspaceClientCapabilities `json:"semanticTokens,omitempty"`
- // Capabilities specific to the code lens requests scoped to the
- // workspace.
- //
- // @since 3.16.0.
- CodeLens *CodeLensWorkspaceClientCapabilities `json:"codeLens,omitempty"`
- // The client has support for file notifications/requests for user operations on files.
- //
- // Since 3.16.0
- FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"`
- // Capabilities specific to the inline values requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- InlineValue *InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"`
- // Capabilities specific to the inlay hint requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- InlayHint *InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"`
- // Capabilities specific to the diagnostic requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- Diagnostics *DiagnosticWorkspaceClientCapabilities `json:"diagnostics,omitempty"`
- // Capabilities specific to the folding range requests scoped to the workspace.
- //
- // @since 3.18.0
- // @proposed
- FoldingRange *FoldingRangeWorkspaceClientCapabilities `json:"foldingRange,omitempty"`
- // Capabilities specific to the `workspace/textDocumentContent` request.
- //
- // @since 3.18.0
- // @proposed
- TextDocumentContent *TextDocumentContentClientCapabilities `json:"textDocumentContent,omitempty"`
-}
-
-// Parameters of the workspace diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticParams
-type WorkspaceDiagnosticParams struct {
- // The additional identifier provided during registration.
- Identifier string `json:"identifier,omitempty"`
- // The currently known diagnostic reports with their
- // previous result ids.
- PreviousResultIds []PreviousResultId `json:"previousResultIds"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// A workspace diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReport
-type WorkspaceDiagnosticReport struct {
- Items []WorkspaceDocumentDiagnosticReport `json:"items"`
-}
-
-// A partial result for a workspace diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReportPartialResult
-type WorkspaceDiagnosticReportPartialResult struct {
- Items []WorkspaceDocumentDiagnosticReport `json:"items"`
-}
-
-// A workspace diagnostic document report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDocumentDiagnosticReport
-type (
- WorkspaceDocumentDiagnosticReport = Or_WorkspaceDocumentDiagnosticReport // (alias)
- // A workspace edit represents changes to many resources managed in the workspace. The edit
- // should either provide `changes` or `documentChanges`. If documentChanges are present
- // they are preferred over `changes` if the client can handle versioned document edits.
- //
- // Since version 3.13.0 a workspace edit can contain resource operations as well. If resource
- // operations are present clients need to execute the operations in the order in which they
- // are provided. So a workspace edit for example can consist of the following two changes:
- // (1) a create file a.txt and (2) a text document edit which insert text into file a.txt.
- //
- // An invalid sequence (e.g. (1) delete file a.txt and (2) insert text into file a.txt) will
- // cause failure of the operation. How the client recovers from the failure is described by
- // the client capability: `workspace.workspaceEdit.failureHandling`
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEdit
- WorkspaceEdit struct {
- // Holds changes to existing resources.
- Changes map[DocumentURI][]TextEdit `json:"changes,omitempty"`
- // Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes
- // are either an array of `TextDocumentEdit`s to express changes to n different text documents
- // where each text document edit addresses a specific version of a text document. Or it can contain
- // above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations.
- //
- // Whether a client supports versioned document edits is expressed via
- // `workspace.workspaceEdit.documentChanges` client capability.
- //
- // If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then
- // only plain `TextEdit`s using the `changes` property are supported.
- DocumentChanges []DocumentChange `json:"documentChanges,omitempty"`
- // A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and
- // delete file / folder operations.
- //
- // Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`.
- //
- // @since 3.16.0
- ChangeAnnotations map[ChangeAnnotationIdentifier]ChangeAnnotation `json:"changeAnnotations,omitempty"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditClientCapabilities
-type WorkspaceEditClientCapabilities struct {
- // The client supports versioned document changes in `WorkspaceEdit`s
- DocumentChanges bool `json:"documentChanges,omitempty"`
- // The resource operations the client supports. Clients should at least
- // support 'create', 'rename' and 'delete' files and folders.
- //
- // @since 3.13.0
- ResourceOperations []ResourceOperationKind `json:"resourceOperations,omitempty"`
- // The failure handling strategy of a client if applying the workspace edit
- // fails.
- //
- // @since 3.13.0
- FailureHandling *FailureHandlingKind `json:"failureHandling,omitempty"`
- // Whether the client normalizes line endings to the client specific
- // setting.
- // If set to `true` the client will normalize line ending characters
- // in a workspace edit to the client-specified new line
- // character.
- //
- // @since 3.16.0
- NormalizesLineEndings bool `json:"normalizesLineEndings,omitempty"`
- // Whether the client in general supports change annotations on text edits,
- // create file, rename file and delete file changes.
- //
- // @since 3.16.0
- ChangeAnnotationSupport *ChangeAnnotationsSupportOptions `json:"changeAnnotationSupport,omitempty"`
- // Whether the client supports `WorkspaceEditMetadata` in `WorkspaceEdit`s.
- //
- // @since 3.18.0
- // @proposed
- MetadataSupport bool `json:"metadataSupport,omitempty"`
- // Whether the client supports snippets as text edits.
- //
- // @since 3.18.0
- // @proposed
- SnippetEditSupport bool `json:"snippetEditSupport,omitempty"`
-}
-
-// Additional data about a workspace edit.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditMetadata
-type WorkspaceEditMetadata struct {
- // Signal to the editor that this edit is a refactoring.
- IsRefactoring bool `json:"isRefactoring,omitempty"`
-}
-
-// A workspace folder inside a client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFolder
-type WorkspaceFolder struct {
- // The associated URI for this workspace folder.
- URI URI `json:"uri"`
- // The name of the workspace folder. Used to refer to this
- // workspace folder in the user interface.
- Name string `json:"name"`
-}
-
-// The workspace folder change event.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersChangeEvent
-type WorkspaceFoldersChangeEvent struct {
- // The array of added workspace folders
- Added []WorkspaceFolder `json:"added"`
- // The array of the removed workspace folders
- Removed []WorkspaceFolder `json:"removed"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersInitializeParams
-type WorkspaceFoldersInitializeParams struct {
- // The workspace folders configured in the client when the server starts.
- //
- // This property is only available if the client supports workspace folders.
- // It can be `null` if the client supports workspace folders but none are
- // configured.
- //
- // @since 3.6.0
- WorkspaceFolders []WorkspaceFolder `json:"workspaceFolders,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersServerCapabilities
-type WorkspaceFoldersServerCapabilities struct {
- // The server has support for workspace folders
- Supported bool `json:"supported,omitempty"`
- // Whether the server wants to receive workspace folder
- // change notifications.
- //
- // If a string is provided the string is treated as an ID
- // under which the notification is registered on the client
- // side. The ID can be used to unregister for these events
- // using the `client/unregisterCapability` request.
- ChangeNotifications *Or_WorkspaceFoldersServerCapabilities_changeNotifications `json:"changeNotifications,omitempty"`
-}
-
-// A full document diagnostic report for a workspace diagnostic result.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFullDocumentDiagnosticReport
-type WorkspaceFullDocumentDiagnosticReport struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // The version number for which the diagnostics are reported.
- // If the document is not marked as open `null` can be provided.
- Version int32 `json:"version"`
- FullDocumentDiagnosticReport
-}
-
-// Defines workspace specific capabilities of the server.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceOptions
-type WorkspaceOptions struct {
- // The server supports workspace folder.
- //
- // @since 3.6.0
- WorkspaceFolders *WorkspaceFoldersServerCapabilities `json:"workspaceFolders,omitempty"`
- // The server is interested in notifications/requests for operations on files.
- //
- // @since 3.16.0
- FileOperations *FileOperationOptions `json:"fileOperations,omitempty"`
- // The server supports the `workspace/textDocumentContent` request.
- //
- // @since 3.18.0
- // @proposed
- TextDocumentContent *Or_WorkspaceOptions_textDocumentContent `json:"textDocumentContent,omitempty"`
-}
-
-// A special workspace symbol that supports locations without a range.
-//
-// See also SymbolInformation.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbol
-type WorkspaceSymbol struct {
- // The location of the symbol. Whether a server is allowed to
- // return a location without a range depends on the client
- // capability `workspace.symbol.resolveSupport`.
- //
- // See SymbolInformation#location for more details.
- Location Or_WorkspaceSymbol_location `json:"location"`
- // A data entry field that is preserved on a workspace symbol between a
- // workspace symbol request and a workspace symbol resolve request.
- Data interface{} `json:"data,omitempty"`
- BaseSymbolInformation
-}
-
-// Client capabilities for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolClientCapabilities
-type WorkspaceSymbolClientCapabilities struct {
- // Symbol request supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Specific capabilities for the `SymbolKind` in the `workspace/symbol` request.
- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"`
- // The client supports tags on `SymbolInformation`.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.16.0
- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"`
- // The client support partial workspace symbols. The client will send the
- // request `workspaceSymbol/resolve` to the server to resolve additional
- // properties.
- //
- // @since 3.17.0
- ResolveSupport *ClientSymbolResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Server capabilities for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolOptions
-type WorkspaceSymbolOptions struct {
- // The server provides support to resolve additional
- // information for a workspace symbol.
- //
- // @since 3.17.0
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolParams
-type WorkspaceSymbolParams struct {
- // A query string to filter symbols by. Clients may send an empty
- // string here to request all symbols.
- //
- // The `query`-parameter should be interpreted in a *relaxed way* as editors
- // will apply their own highlighting and scoring on the results. A good rule
- // of thumb is to match case-insensitive and to simply check that the
- // characters of *query* appear in their order in a candidate symbol.
- // Servers shouldn't use prefix, substring, or similar strict matching.
- Query string `json:"query"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolRegistrationOptions
-type WorkspaceSymbolRegistrationOptions struct {
- WorkspaceSymbolOptions
-}
-
-// An unchanged document diagnostic report for a workspace diagnostic result.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceUnchangedDocumentDiagnosticReport
-type WorkspaceUnchangedDocumentDiagnosticReport struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // The version number for which the diagnostics are reported.
- // If the document is not marked as open `null` can be provided.
- Version int32 `json:"version"`
- UnchangedDocumentDiagnosticReport
-}
-
-// The initialize parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams
-type XInitializeParams struct {
- // The process Id of the parent process that started
- // the server.
- //
- // Is `null` if the process has not been started by another process.
- // If the parent process is not alive then the server should exit.
- ProcessID int32 `json:"processId"`
- // Information about the client
- //
- // @since 3.15.0
- ClientInfo *ClientInfo `json:"clientInfo,omitempty"`
- // The locale the client is currently showing the user interface
- // in. This must not necessarily be the locale of the operating
- // system.
- //
- // Uses IETF language tags as the value's syntax
- // (See https://en.wikipedia.org/wiki/IETF_language_tag)
- //
- // @since 3.16.0
- Locale string `json:"locale,omitempty"`
- // The rootPath of the workspace. Is null
- // if no folder is open.
- //
- // @deprecated in favour of rootUri.
- RootPath string `json:"rootPath,omitempty"`
- // The rootUri of the workspace. Is null if no
- // folder is open. If both `rootPath` and `rootUri` are set
- // `rootUri` wins.
- //
- // @deprecated in favour of workspaceFolders.
- RootURI DocumentURI `json:"rootUri"`
- // The capabilities provided by the client (editor or tool)
- Capabilities ClientCapabilities `json:"capabilities"`
- // User provided initialization options.
- InitializationOptions interface{} `json:"initializationOptions,omitempty"`
- // The initial trace setting. If omitted trace is disabled ('off').
- Trace *TraceValue `json:"trace,omitempty"`
- WorkDoneProgressParams
-}
-
-// The initialize parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams
-type _InitializeParams struct {
- // The process Id of the parent process that started
- // the server.
- //
- // Is `null` if the process has not been started by another process.
- // If the parent process is not alive then the server should exit.
- ProcessID int32 `json:"processId"`
- // Information about the client
- //
- // @since 3.15.0
- ClientInfo *ClientInfo `json:"clientInfo,omitempty"`
- // The locale the client is currently showing the user interface
- // in. This must not necessarily be the locale of the operating
- // system.
- //
- // Uses IETF language tags as the value's syntax
- // (See https://en.wikipedia.org/wiki/IETF_language_tag)
- //
- // @since 3.16.0
- Locale string `json:"locale,omitempty"`
- // The rootPath of the workspace. Is null
- // if no folder is open.
- //
- // @deprecated in favour of rootUri.
- RootPath string `json:"rootPath,omitempty"`
- // The rootUri of the workspace. Is null if no
- // folder is open. If both `rootPath` and `rootUri` are set
- // `rootUri` wins.
- //
- // @deprecated in favour of workspaceFolders.
- RootURI DocumentURI `json:"rootUri"`
- // The capabilities provided by the client (editor or tool)
- Capabilities ClientCapabilities `json:"capabilities"`
- // User provided initialization options.
- InitializationOptions interface{} `json:"initializationOptions,omitempty"`
- // The initial trace setting. If omitted trace is disabled ('off').
- Trace *TraceValue `json:"trace,omitempty"`
- WorkDoneProgressParams
-}
-
-const (
- // A set of predefined code action kinds
- // Empty kind.
- Empty CodeActionKind = ""
- // Base kind for quickfix actions: 'quickfix'
- QuickFix CodeActionKind = "quickfix"
- // Base kind for refactoring actions: 'refactor'
- Refactor CodeActionKind = "refactor"
- // Base kind for refactoring extraction actions: 'refactor.extract'
- //
- // Example extract actions:
- //
- //
- // - Extract method
- // - Extract function
- // - Extract variable
- // - Extract interface from class
- // - ...
- RefactorExtract CodeActionKind = "refactor.extract"
- // Base kind for refactoring inline actions: 'refactor.inline'
- //
- // Example inline actions:
- //
- //
- // - Inline function
- // - Inline variable
- // - Inline constant
- // - ...
- RefactorInline CodeActionKind = "refactor.inline"
- // Base kind for refactoring move actions: `refactor.move`
- //
- // Example move actions:
- //
- //
- // - Move a function to a new file
- // - Move a property between classes
- // - Move method to base class
- // - ...
- //
- // @since 3.18.0
- // @proposed
- RefactorMove CodeActionKind = "refactor.move"
- // Base kind for refactoring rewrite actions: 'refactor.rewrite'
- //
- // Example rewrite actions:
- //
- //
- // - Convert JavaScript function to class
- // - Add or remove parameter
- // - Encapsulate field
- // - Make method static
- // - Move method to base class
- // - ...
- RefactorRewrite CodeActionKind = "refactor.rewrite"
- // Base kind for source actions: `source`
- //
- // Source code actions apply to the entire file.
- Source CodeActionKind = "source"
- // Base kind for an organize imports source action: `source.organizeImports`
- SourceOrganizeImports CodeActionKind = "source.organizeImports"
- // Base kind for auto-fix source actions: `source.fixAll`.
- //
- // Fix all actions automatically fix errors that have a clear fix that do not require user input.
- // They should not suppress errors or perform unsafe fixes such as generating new types or classes.
- //
- // @since 3.15.0
- SourceFixAll CodeActionKind = "source.fixAll"
- // Base kind for all code actions applying to the entire notebook's scope. CodeActionKinds using
- // this should always begin with `notebook.`
- //
- // @since 3.18.0
- Notebook CodeActionKind = "notebook"
- // The reason why code actions were requested.
- //
- // @since 3.17.0
- // Code actions were explicitly requested by the user or by an extension.
- CodeActionInvoked CodeActionTriggerKind = 1
- // Code actions were requested automatically.
- //
- // This typically happens when current selection in a file changes, but can
- // also be triggered when file content changes.
- CodeActionAutomatic CodeActionTriggerKind = 2
- // The kind of a completion entry.
- TextCompletion CompletionItemKind = 1
- MethodCompletion CompletionItemKind = 2
- FunctionCompletion CompletionItemKind = 3
- ConstructorCompletion CompletionItemKind = 4
- FieldCompletion CompletionItemKind = 5
- VariableCompletion CompletionItemKind = 6
- ClassCompletion CompletionItemKind = 7
- InterfaceCompletion CompletionItemKind = 8
- ModuleCompletion CompletionItemKind = 9
- PropertyCompletion CompletionItemKind = 10
- UnitCompletion CompletionItemKind = 11
- ValueCompletion CompletionItemKind = 12
- EnumCompletion CompletionItemKind = 13
- KeywordCompletion CompletionItemKind = 14
- SnippetCompletion CompletionItemKind = 15
- ColorCompletion CompletionItemKind = 16
- FileCompletion CompletionItemKind = 17
- ReferenceCompletion CompletionItemKind = 18
- FolderCompletion CompletionItemKind = 19
- EnumMemberCompletion CompletionItemKind = 20
- ConstantCompletion CompletionItemKind = 21
- StructCompletion CompletionItemKind = 22
- EventCompletion CompletionItemKind = 23
- OperatorCompletion CompletionItemKind = 24
- TypeParameterCompletion CompletionItemKind = 25
- // Completion item tags are extra annotations that tweak the rendering of a completion
- // item.
- //
- // @since 3.15.0
- // Render a completion as obsolete, usually using a strike-out.
- ComplDeprecated CompletionItemTag = 1
- // How a completion was triggered
- // Completion was triggered by typing an identifier (24x7 code
- // complete), manual invocation (e.g Ctrl+Space) or via API.
- Invoked CompletionTriggerKind = 1
- // Completion was triggered by a trigger character specified by
- // the `triggerCharacters` properties of the `CompletionRegistrationOptions`.
- TriggerCharacter CompletionTriggerKind = 2
- // Completion was re-triggered as current completion list is incomplete
- TriggerForIncompleteCompletions CompletionTriggerKind = 3
- // The diagnostic's severity.
- // Reports an error.
- SeverityError DiagnosticSeverity = 1
- // Reports a warning.
- SeverityWarning DiagnosticSeverity = 2
- // Reports an information.
- SeverityInformation DiagnosticSeverity = 3
- // Reports a hint.
- SeverityHint DiagnosticSeverity = 4
- // The diagnostic tags.
- //
- // @since 3.15.0
- // Unused or unnecessary code.
- //
- // Clients are allowed to render diagnostics with this tag faded out instead of having
- // an error squiggle.
- Unnecessary DiagnosticTag = 1
- // Deprecated or obsolete code.
- //
- // Clients are allowed to rendered diagnostics with this tag strike through.
- Deprecated DiagnosticTag = 2
- // The document diagnostic report kinds.
- //
- // @since 3.17.0
- // A diagnostic report with a full
- // set of problems.
- DiagnosticFull DocumentDiagnosticReportKind = "full"
- // A report indicating that the last
- // returned report is still accurate.
- DiagnosticUnchanged DocumentDiagnosticReportKind = "unchanged"
- // A document highlight kind.
- // A textual occurrence.
- Text DocumentHighlightKind = 1
- // Read-access of a symbol, like reading a variable.
- Read DocumentHighlightKind = 2
- // Write-access of a symbol, like writing to a variable.
- Write DocumentHighlightKind = 3
- // Predefined error codes.
- ParseError ErrorCodes = -32700
- InvalidRequest ErrorCodes = -32600
- MethodNotFound ErrorCodes = -32601
- InvalidParams ErrorCodes = -32602
- InternalError ErrorCodes = -32603
- // Error code indicating that a server received a notification or
- // request before the server has received the `initialize` request.
- ServerNotInitialized ErrorCodes = -32002
- UnknownErrorCode ErrorCodes = -32001
- // Applying the workspace change is simply aborted if one of the changes provided
- // fails. All operations executed before the failing operation stay executed.
- Abort FailureHandlingKind = "abort"
- // All operations are executed transactional. That means they either all
- // succeed or no changes at all are applied to the workspace.
- Transactional FailureHandlingKind = "transactional"
- // If the workspace edit contains only textual file changes they are executed transactional.
- // If resource changes (create, rename or delete file) are part of the change the failure
- // handling strategy is abort.
- TextOnlyTransactional FailureHandlingKind = "textOnlyTransactional"
- // The client tries to undo the operations already executed. But there is no
- // guarantee that this is succeeding.
- Undo FailureHandlingKind = "undo"
- // The file event type
- // The file got created.
- Created FileChangeType = 1
- // The file got changed.
- Changed FileChangeType = 2
- // The file got deleted.
- Deleted FileChangeType = 3
- // A pattern kind describing if a glob pattern matches a file a folder or
- // both.
- //
- // @since 3.16.0
- // The pattern matches a file only.
- FilePattern FileOperationPatternKind = "file"
- // The pattern matches a folder only.
- FolderPattern FileOperationPatternKind = "folder"
- // A set of predefined range kinds.
- // Folding range for a comment
- Comment FoldingRangeKind = "comment"
- // Folding range for an import or include
- Imports FoldingRangeKind = "imports"
- // Folding range for a region (e.g. `#region`)
- Region FoldingRangeKind = "region"
- // Inlay hint kinds.
- //
- // @since 3.17.0
- // An inlay hint that for a type annotation.
- Type InlayHintKind = 1
- // An inlay hint that is for a parameter.
- Parameter InlayHintKind = 2
- // Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.
- //
- // @since 3.18.0
- // @proposed
- // Completion was triggered explicitly by a user gesture.
- InlineInvoked InlineCompletionTriggerKind = 1
- // Completion was triggered automatically while editing.
- InlineAutomatic InlineCompletionTriggerKind = 2
- // Defines whether the insert text in a completion item should be interpreted as
- // plain text or a snippet.
- // The primary text to be inserted is treated as a plain string.
- PlainTextTextFormat InsertTextFormat = 1
- // The primary text to be inserted is treated as a snippet.
- //
- // A snippet can define tab stops and placeholders with `$1`, `$2`
- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to
- // the end of the snippet. Placeholders with equal identifiers are linked,
- // that is typing in one will update others too.
- //
- // See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax
- SnippetTextFormat InsertTextFormat = 2
- // How whitespace and indentation is handled during completion
- // item insertion.
- //
- // @since 3.16.0
- // The insertion or replace strings is taken as it is. If the
- // value is multi line the lines below the cursor will be
- // inserted using the indentation defined in the string value.
- // The client will not apply any kind of adjustments to the
- // string.
- AsIs InsertTextMode = 1
- // The editor adjusts leading whitespace of new lines so that
- // they match the indentation up to the cursor of the line for
- // which the item is accepted.
- //
- // Consider a line like this: <2tabs><cursor><3tabs>foo. Accepting a
- // multi line completion item is indented using 2 tabs and all
- // following lines inserted will be indented using 2 tabs as well.
- AdjustIndentation InsertTextMode = 2
- // A request failed but it was syntactically correct, e.g the
- // method name was known and the parameters were valid. The error
- // message should contain human readable information about why
- // the request failed.
- //
- // @since 3.17.0
- RequestFailed LSPErrorCodes = -32803
- // The server cancelled the request. This error code should
- // only be used for requests that explicitly support being
- // server cancellable.
- //
- // @since 3.17.0
- ServerCancelled LSPErrorCodes = -32802
- // The server detected that the content of a document got
- // modified outside normal conditions. A server should
- // NOT send this error code if it detects a content change
- // in it unprocessed messages. The result even computed
- // on an older state might still be useful for the client.
- //
- // If a client decides that a result is not of any use anymore
- // the client should cancel the request.
- ContentModified LSPErrorCodes = -32801
- // The client has canceled a request and a server has detected
- // the cancel.
- RequestCancelled LSPErrorCodes = -32800
- // Predefined Language kinds
- // @since 3.18.0
- // @proposed
- LangABAP LanguageKind = "abap"
- LangWindowsBat LanguageKind = "bat"
- LangBibTeX LanguageKind = "bibtex"
- LangClojure LanguageKind = "clojure"
- LangCoffeescript LanguageKind = "coffeescript"
- LangC LanguageKind = "c"
- LangCPP LanguageKind = "cpp"
- LangCSharp LanguageKind = "csharp"
- LangCSS LanguageKind = "css"
- // @since 3.18.0
- // @proposed
- LangD LanguageKind = "d"
- // @since 3.18.0
- // @proposed
- LangDelphi LanguageKind = "pascal"
- LangDiff LanguageKind = "diff"
- LangDart LanguageKind = "dart"
- LangDockerfile LanguageKind = "dockerfile"
- LangElixir LanguageKind = "elixir"
- LangErlang LanguageKind = "erlang"
- LangFSharp LanguageKind = "fsharp"
- LangGitCommit LanguageKind = "git-commit"
- LangGitRebase LanguageKind = "rebase"
- LangGo LanguageKind = "go"
- LangGroovy LanguageKind = "groovy"
- LangHandlebars LanguageKind = "handlebars"
- LangHaskell LanguageKind = "haskell"
- LangHTML LanguageKind = "html"
- LangIni LanguageKind = "ini"
- LangJava LanguageKind = "java"
- LangJavaScript LanguageKind = "javascript"
- LangJavaScriptReact LanguageKind = "javascriptreact"
- LangJSON LanguageKind = "json"
- LangLaTeX LanguageKind = "latex"
- LangLess LanguageKind = "less"
- LangLua LanguageKind = "lua"
- LangMakefile LanguageKind = "makefile"
- LangMarkdown LanguageKind = "markdown"
- LangObjectiveC LanguageKind = "objective-c"
- LangObjectiveCPP LanguageKind = "objective-cpp"
- // @since 3.18.0
- // @proposed
- LangPascal LanguageKind = "pascal"
- LangPerl LanguageKind = "perl"
- LangPerl6 LanguageKind = "perl6"
- LangPHP LanguageKind = "php"
- LangPowershell LanguageKind = "powershell"
- LangPug LanguageKind = "jade"
- LangPython LanguageKind = "python"
- LangR LanguageKind = "r"
- LangRazor LanguageKind = "razor"
- LangRuby LanguageKind = "ruby"
- LangRust LanguageKind = "rust"
- LangSCSS LanguageKind = "scss"
- LangSASS LanguageKind = "sass"
- LangScala LanguageKind = "scala"
- LangShaderLab LanguageKind = "shaderlab"
- LangShellScript LanguageKind = "shellscript"
- LangSQL LanguageKind = "sql"
- LangSwift LanguageKind = "swift"
- LangTypeScript LanguageKind = "typescript"
- LangTypeScriptReact LanguageKind = "typescriptreact"
- LangTeX LanguageKind = "tex"
- LangVisualBasic LanguageKind = "vb"
- LangXML LanguageKind = "xml"
- LangXSL LanguageKind = "xsl"
- LangYAML LanguageKind = "yaml"
- // Describes the content type that a client supports in various
- // result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
- //
- // Please note that `MarkupKinds` must not start with a `$`. This kinds
- // are reserved for internal usage.
- // Plain text is supported as a content format
- PlainText MarkupKind = "plaintext"
- // Markdown is supported as a content format
- Markdown MarkupKind = "markdown"
- // The message type
- // An error message.
- Error MessageType = 1
- // A warning message.
- Warning MessageType = 2
- // An information message.
- Info MessageType = 3
- // A log message.
- Log MessageType = 4
- // A debug message.
- //
- // @since 3.18.0
- // @proposed
- Debug MessageType = 5
- // The moniker kind.
- //
- // @since 3.16.0
- // The moniker represent a symbol that is imported into a project
- Import MonikerKind = "import"
- // The moniker represents a symbol that is exported from a project
- Export MonikerKind = "export"
- // The moniker represents a symbol that is local to a project (e.g. a local
- // variable of a function, a class not visible outside the project, ...)
- Local MonikerKind = "local"
- // A notebook cell kind.
- //
- // @since 3.17.0
- // A markup-cell is formatted source that is used for display.
- Markup NotebookCellKind = 1
- // A code-cell is source code.
- Code NotebookCellKind = 2
- // A set of predefined position encoding kinds.
- //
- // @since 3.17.0
- // Character offsets count UTF-8 code units (e.g. bytes).
- UTF8 PositionEncodingKind = "utf-8"
- // Character offsets count UTF-16 code units.
- //
- // This is the default and must always be supported
- // by servers
- UTF16 PositionEncodingKind = "utf-16"
- // Character offsets count UTF-32 code units.
- //
- // Implementation note: these are the same as Unicode codepoints,
- // so this `PositionEncodingKind` may also be used for an
- // encoding-agnostic representation of character offsets.
- UTF32 PositionEncodingKind = "utf-32"
- // The client's default behavior is to select the identifier
- // according the to language's syntax rule.
- Identifier PrepareSupportDefaultBehavior = 1
- // Supports creating new files and folders.
- Create ResourceOperationKind = "create"
- // Supports renaming existing files and folders.
- Rename ResourceOperationKind = "rename"
- // Supports deleting existing files and folders.
- Delete ResourceOperationKind = "delete"
- // A set of predefined token modifiers. This set is not fixed
- // an clients can specify additional token types via the
- // corresponding client capabilities.
- //
- // @since 3.16.0
- ModDeclaration SemanticTokenModifiers = "declaration"
- ModDefinition SemanticTokenModifiers = "definition"
- ModReadonly SemanticTokenModifiers = "readonly"
- ModStatic SemanticTokenModifiers = "static"
- ModDeprecated SemanticTokenModifiers = "deprecated"
- ModAbstract SemanticTokenModifiers = "abstract"
- ModAsync SemanticTokenModifiers = "async"
- ModModification SemanticTokenModifiers = "modification"
- ModDocumentation SemanticTokenModifiers = "documentation"
- ModDefaultLibrary SemanticTokenModifiers = "defaultLibrary"
- // A set of predefined token types. This set is not fixed
- // an clients can specify additional token types via the
- // corresponding client capabilities.
- //
- // @since 3.16.0
- NamespaceType SemanticTokenTypes = "namespace"
- // Represents a generic type. Acts as a fallback for types which can't be mapped to
- // a specific type like class or enum.
- TypeType SemanticTokenTypes = "type"
- ClassType SemanticTokenTypes = "class"
- EnumType SemanticTokenTypes = "enum"
- InterfaceType SemanticTokenTypes = "interface"
- StructType SemanticTokenTypes = "struct"
- TypeParameterType SemanticTokenTypes = "typeParameter"
- ParameterType SemanticTokenTypes = "parameter"
- VariableType SemanticTokenTypes = "variable"
- PropertyType SemanticTokenTypes = "property"
- EnumMemberType SemanticTokenTypes = "enumMember"
- EventType SemanticTokenTypes = "event"
- FunctionType SemanticTokenTypes = "function"
- MethodType SemanticTokenTypes = "method"
- MacroType SemanticTokenTypes = "macro"
- KeywordType SemanticTokenTypes = "keyword"
- ModifierType SemanticTokenTypes = "modifier"
- CommentType SemanticTokenTypes = "comment"
- StringType SemanticTokenTypes = "string"
- NumberType SemanticTokenTypes = "number"
- RegexpType SemanticTokenTypes = "regexp"
- OperatorType SemanticTokenTypes = "operator"
- // @since 3.17.0
- DecoratorType SemanticTokenTypes = "decorator"
- // @since 3.18.0
- LabelType SemanticTokenTypes = "label"
- // How a signature help was triggered.
- //
- // @since 3.15.0
- // Signature help was invoked manually by the user or by a command.
- SigInvoked SignatureHelpTriggerKind = 1
- // Signature help was triggered by a trigger character.
- SigTriggerCharacter SignatureHelpTriggerKind = 2
- // Signature help was triggered by the cursor moving or by the document content changing.
- SigContentChange SignatureHelpTriggerKind = 3
- // A symbol kind.
- File SymbolKind = 1
- Module SymbolKind = 2
- Namespace SymbolKind = 3
- Package SymbolKind = 4
- Class SymbolKind = 5
- Method SymbolKind = 6
- Property SymbolKind = 7
- Field SymbolKind = 8
- Constructor SymbolKind = 9
- Enum SymbolKind = 10
- Interface SymbolKind = 11
- Function SymbolKind = 12
- Variable SymbolKind = 13
- Constant SymbolKind = 14
- String SymbolKind = 15
- Number SymbolKind = 16
- Boolean SymbolKind = 17
- Array SymbolKind = 18
- Object SymbolKind = 19
- Key SymbolKind = 20
- Null SymbolKind = 21
- EnumMember SymbolKind = 22
- Struct SymbolKind = 23
- Event SymbolKind = 24
- Operator SymbolKind = 25
- TypeParameter SymbolKind = 26
- // Symbol tags are extra annotations that tweak the rendering of a symbol.
- //
- // @since 3.16
- // Render a symbol as obsolete, usually using a strike-out.
- DeprecatedSymbol SymbolTag = 1
- // Represents reasons why a text document is saved.
- // Manually triggered, e.g. by the user pressing save, by starting debugging,
- // or by an API call.
- Manual TextDocumentSaveReason = 1
- // Automatic after a delay.
- AfterDelay TextDocumentSaveReason = 2
- // When the editor lost focus.
- FocusOut TextDocumentSaveReason = 3
- // Defines how the host (editor) should sync
- // document changes to the language server.
- // Documents should not be synced at all.
- None TextDocumentSyncKind = 0
- // Documents are synced by always sending the full content
- // of the document.
- Full TextDocumentSyncKind = 1
- // Documents are synced by sending the full content on open.
- // After that only incremental updates to the document are
- // send.
- Incremental TextDocumentSyncKind = 2
- Relative TokenFormat = "relative"
- // Turn tracing off.
- Off TraceValue = "off"
- // Trace messages only.
- Messages TraceValue = "messages"
- // Verbose message tracing.
- Verbose TraceValue = "verbose"
- // Moniker uniqueness level to define scope of the moniker.
- //
- // @since 3.16.0
- // The moniker is only unique inside a document
- Document UniquenessLevel = "document"
- // The moniker is unique inside a project for which a dump got created
- Project UniquenessLevel = "project"
- // The moniker is unique inside the group to which a project belongs
- Group UniquenessLevel = "group"
- // The moniker is unique inside the moniker scheme.
- Scheme UniquenessLevel = "scheme"
- // The moniker is globally unique
- Global UniquenessLevel = "global"
- // Interested in create events.
- WatchCreate WatchKind = 1
- // Interested in change events
- WatchChange WatchKind = 2
- // Interested in delete events
- WatchDelete WatchKind = 4
-)
@@ -1,229 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package protocol
-
-// This file declares URI, DocumentUri, and its methods.
-//
-// For the LSP definition of these types, see
-// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#uri
-
-import (
- "fmt"
- "net/url"
- "path/filepath"
- "strings"
- "unicode"
-)
-
-// A DocumentURI is the URI of a client editor document.
-//
-// According to the LSP specification:
-//
-// Care should be taken to handle encoding in URIs. For
-// example, some clients (such as VS Code) may encode colons
-// in drive letters while others do not. The URIs below are
-// both valid, but clients and servers should be consistent
-// with the form they use themselves to ensure the other party
-// doesn’t interpret them as distinct URIs. Clients and
-// servers should not assume that each other are encoding the
-// same way (for example a client encoding colons in drive
-// letters cannot assume server responses will have encoded
-// colons). The same applies to casing of drive letters - one
-// party should not assume the other party will return paths
-// with drive letters cased the same as it.
-//
-// file:///c:/project/readme.md
-// file:///C%3A/project/readme.md
-//
-// This is done during JSON unmarshalling;
-// see [DocumentURI.UnmarshalText] for details.
-type DocumentURI string
-
-// A URI is an arbitrary URL (e.g. https), not necessarily a file.
-type URI = string
-
-// UnmarshalText implements decoding of DocumentUri values.
-//
-// In particular, it implements a systematic correction of various odd
-// features of the definition of DocumentUri in the LSP spec that
-// appear to be workarounds for bugs in VS Code. For example, it may
-// URI-encode the URI itself, so that colon becomes %3A, and it may
-// send file://foo.go URIs that have two slashes (not three) and no
-// hostname.
-//
-// We use UnmarshalText, not UnmarshalJSON, because it is called even
-// for non-addressable values such as keys and values of map[K]V,
-// where there is no pointer of type *K or *V on which to call
-// UnmarshalJSON. (See Go issue #28189 for more detail.)
-//
-// Non-empty DocumentUris are valid "file"-scheme URIs.
-// The empty DocumentUri is valid.
-func (uri *DocumentURI) UnmarshalText(data []byte) (err error) {
- *uri, err = ParseDocumentURI(string(data))
- return
-}
-
-// Path returns the file path for the given URI.
-//
-// DocumentUri("").Path() returns the empty string.
-//
-// Path panics if called on a URI that is not a valid filename.
-func (uri DocumentURI) Path() (string, error) {
- filename, err := filename(uri)
- if err != nil {
- // e.g. ParseRequestURI failed.
- //
- // This can only affect DocumentUris created by
- // direct string manipulation; all DocumentUris
- // received from the client pass through
- // ParseRequestURI, which ensures validity.
- return "", fmt.Errorf("invalid URI %q: %w", uri, err)
- }
- return filepath.FromSlash(filename), nil
-}
-
-// Dir returns the URI for the directory containing the receiver.
-func (uri DocumentURI) Dir() (DocumentURI, error) {
- // XXX: Legacy comment:
- // This function could be more efficiently implemented by avoiding any call
- // to Path(), but at least consolidates URI manipulation.
-
- path, err := uri.DirPath()
- if err != nil {
- return "", fmt.Errorf("invalid URI %q: %w", uri, err)
- }
-
- return URIFromPath(path), nil
-}
-
-// DirPath returns the file path to the directory containing this URI, which
-// must be a file URI.
-func (uri DocumentURI) DirPath() (string, error) {
- path, err := uri.Path()
- if err != nil {
- return "", err
- }
- return filepath.Dir(path), nil
-}
-
-func filename(uri DocumentURI) (string, error) {
- if uri == "" {
- return "", nil
- }
-
- // This conservative check for the common case
- // of a simple non-empty absolute POSIX filename
- // avoids the allocation of a net.URL.
- if strings.HasPrefix(string(uri), "file:///") {
- rest := string(uri)[len("file://"):] // leave one slash
- for i := range len(rest) {
- b := rest[i]
- // Reject these cases:
- if b < ' ' || b == 0x7f || // control character
- b == '%' || b == '+' || // URI escape
- b == ':' || // Windows drive letter
- b == '@' || b == '&' || b == '?' { // authority or query
- goto slow
- }
- }
- return rest, nil
- }
-slow:
-
- u, err := url.ParseRequestURI(string(uri))
- if err != nil {
- return "", err
- }
- if u.Scheme != fileScheme {
- return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri)
- }
- // If the URI is a Windows URI, we trim the leading "/" and uppercase
- // the drive letter, which will never be case sensitive.
- if isWindowsDriveURIPath(u.Path) {
- u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:]
- }
-
- return u.Path, nil
-}
-
-// ParseDocumentURI interprets a string as a DocumentUri, applying VS
-// Code workarounds; see [DocumentURI.UnmarshalText] for details.
-func ParseDocumentURI(s string) (DocumentURI, error) {
- if s == "" {
- return "", nil
- }
-
- if !strings.HasPrefix(s, "file://") {
- return "", fmt.Errorf("DocumentUri scheme is not 'file': %s", s)
- }
-
- // VS Code sends URLs with only two slashes,
- // which are invalid. golang/go#39789.
- if !strings.HasPrefix(s, "file:///") {
- s = "file:///" + s[len("file://"):]
- }
-
- // Even though the input is a URI, it may not be in canonical form. VS Code
- // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize.
- path, err := url.PathUnescape(s[len("file://"):])
- if err != nil {
- return "", err
- }
-
- // File URIs from Windows may have lowercase drive letters.
- // Since drive letters are guaranteed to be case insensitive,
- // we change them to uppercase to remain consistent.
- // For example, file:///c:/x/y/z becomes file:///C:/x/y/z.
- if isWindowsDriveURIPath(path) {
- path = path[:1] + strings.ToUpper(string(path[1])) + path[2:]
- }
- u := url.URL{Scheme: fileScheme, Path: path}
- return DocumentURI(u.String()), nil
-}
-
-// URIFromPath returns DocumentUri for the supplied file path.
-// Given "", it returns "".
-func URIFromPath(path string) DocumentURI {
- if path == "" {
- return ""
- }
- if !isWindowsDrivePath(path) {
- if abs, err := filepath.Abs(path); err == nil {
- path = abs
- }
- }
- // Check the file path again, in case it became absolute.
- if isWindowsDrivePath(path) {
- path = "/" + strings.ToUpper(string(path[0])) + path[1:]
- }
- path = filepath.ToSlash(path)
- u := url.URL{
- Scheme: fileScheme,
- Path: path,
- }
- return DocumentURI(u.String())
-}
-
-const fileScheme = "file"
-
-// isWindowsDrivePath returns true if the file path is of the form used by
-// Windows. We check if the path begins with a drive letter, followed by a ":".
-// For example: C:/x/y/z.
-func isWindowsDrivePath(path string) bool {
- if len(path) < 3 {
- return false
- }
- return unicode.IsLetter(rune(path[0])) && path[1] == ':'
-}
-
-// isWindowsDriveURIPath returns true if the file URI is of the format used by
-// Windows URIs. The url.Parse package does not specially handle Windows paths
-// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:").
-func isWindowsDriveURIPath(uri string) bool {
- if len(uri) < 4 {
- return false
- }
- return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
-}
@@ -0,0 +1,37 @@
+package lsp
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestHasRootMarkers(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory for testing
+ tmpDir := t.TempDir()
+
+ // Test with empty root markers (should return true)
+ require.True(t, HasRootMarkers(tmpDir, []string{}))
+
+ // Test with non-existent markers
+ require.False(t, HasRootMarkers(tmpDir, []string{"go.mod", "package.json"}))
+
+ // Create a go.mod file
+ goModPath := filepath.Join(tmpDir, "go.mod")
+ err := os.WriteFile(goModPath, []byte("module test"), 0o644)
+ require.NoError(t, err)
+
+ // Test with existing marker
+ require.True(t, HasRootMarkers(tmpDir, []string{"go.mod", "package.json"}))
+
+ // Test with only non-existent markers
+ require.False(t, HasRootMarkers(tmpDir, []string{"package.json", "Cargo.toml"}))
+
+ // Test with glob patterns
+ require.True(t, HasRootMarkers(tmpDir, []string{"*.mod"}))
+ require.False(t, HasRootMarkers(tmpDir, []string{"*.json"}))
+}
@@ -1,275 +0,0 @@
-package lsp
-
-import (
- "bufio"
- "context"
- "encoding/json"
- "fmt"
- "io"
- "log/slog"
- "strings"
-
- "github.com/charmbracelet/crush/internal/config"
-)
-
-// WriteMessage writes an LSP message to the given writer
-func WriteMessage(w io.Writer, msg *Message) error {
- data, err := json.Marshal(msg)
- if err != nil {
- return fmt.Errorf("failed to marshal message: %w", err)
- }
- cfg := config.Get()
-
- if cfg.Options.DebugLSP {
- slog.Debug("Sending message to server", "method", msg.Method, "id", msg.ID)
- }
-
- _, err = fmt.Fprintf(w, "Content-Length: %d\r\n\r\n", len(data))
- if err != nil {
- return fmt.Errorf("failed to write header: %w", err)
- }
-
- _, err = w.Write(data)
- if err != nil {
- return fmt.Errorf("failed to write message: %w", err)
- }
-
- return nil
-}
-
-// ReadMessage reads a single LSP message from the given reader
-func ReadMessage(r *bufio.Reader) (*Message, error) {
- cfg := config.Get()
- // Read headers
- var contentLength int
- for {
- line, err := r.ReadString('\n')
- if err != nil {
- return nil, fmt.Errorf("failed to read header: %w", err)
- }
- line = strings.TrimSpace(line)
-
- if cfg.Options.DebugLSP {
- slog.Debug("Received header", "line", line)
- }
-
- if line == "" {
- break // End of headers
- }
-
- if strings.HasPrefix(line, "Content-Length: ") {
- _, err := fmt.Sscanf(line, "Content-Length: %d", &contentLength)
- if err != nil {
- return nil, fmt.Errorf("invalid Content-Length: %w", err)
- }
- }
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Content-Length", "length", contentLength)
- }
-
- // Read content
- content := make([]byte, contentLength)
- _, err := io.ReadFull(r, content)
- if err != nil {
- return nil, fmt.Errorf("failed to read content: %w", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Received content", "content", string(content))
- }
-
- // Parse message
- var msg Message
- if err := json.Unmarshal(content, &msg); err != nil {
- return nil, fmt.Errorf("failed to unmarshal message: %w", err)
- }
-
- return &msg, nil
-}
-
-// handleMessages reads and dispatches messages in a loop
-func (c *Client) handleMessages() {
- cfg := config.Get()
- for {
- msg, err := ReadMessage(c.stdout)
- if err != nil {
- if cfg.Options.DebugLSP {
- slog.Error("Error reading message", "error", err)
- }
- return
- }
-
- // Handle server->client request (has both Method and ID)
- if msg.Method != "" && msg.ID != 0 {
- if cfg.Options.DebugLSP {
- slog.Debug("Received request from server", "method", msg.Method, "id", msg.ID)
- }
-
- response := &Message{
- JSONRPC: "2.0",
- ID: msg.ID,
- }
-
- // Look up handler for this method
- c.serverHandlersMu.RLock()
- handler, ok := c.serverRequestHandlers[msg.Method]
- c.serverHandlersMu.RUnlock()
-
- if ok {
- result, err := handler(msg.Params)
- if err != nil {
- response.Error = &ResponseError{
- Code: -32603,
- Message: err.Error(),
- }
- } else {
- rawJSON, err := json.Marshal(result)
- if err != nil {
- response.Error = &ResponseError{
- Code: -32603,
- Message: fmt.Sprintf("failed to marshal response: %v", err),
- }
- } else {
- response.Result = rawJSON
- }
- }
- } else {
- response.Error = &ResponseError{
- Code: -32601,
- Message: fmt.Sprintf("method not found: %s", msg.Method),
- }
- }
-
- // Send response back to server
- if err := WriteMessage(c.stdin, response); err != nil {
- slog.Error("Error sending response to server", "error", err)
- }
-
- continue
- }
-
- // Handle notification (has Method but no ID)
- if msg.Method != "" && msg.ID == 0 {
- c.notificationMu.RLock()
- handler, ok := c.notificationHandlers[msg.Method]
- c.notificationMu.RUnlock()
-
- if ok {
- if cfg.Options.DebugLSP {
- slog.Debug("Handling notification", "method", msg.Method)
- }
- go handler(msg.Params)
- } else if cfg.Options.DebugLSP {
- slog.Debug("No handler for notification", "method", msg.Method)
- }
- continue
- }
-
- // Handle response to our request (has ID but no Method)
- if msg.ID != 0 && msg.Method == "" {
- c.handlersMu.RLock()
- ch, ok := c.handlers[msg.ID]
- c.handlersMu.RUnlock()
-
- if ok {
- if cfg.Options.DebugLSP {
- slog.Debug("Received response for request", "id", msg.ID)
- }
- ch <- msg
- close(ch)
- } else if cfg.Options.DebugLSP {
- slog.Debug("No handler for response", "id", msg.ID)
- }
- }
- }
-}
-
-// Call makes a request and waits for the response
-func (c *Client) Call(ctx context.Context, method string, params any, result any) error {
- cfg := config.Get()
- id := c.nextID.Add(1)
-
- if cfg.Options.DebugLSP {
- slog.Debug("Making call", "method", method, "id", id)
- }
-
- msg, err := NewRequest(id, method, params)
- if err != nil {
- return fmt.Errorf("failed to create request: %w", err)
- }
-
- // Create response channel
- ch := make(chan *Message, 1)
- c.handlersMu.Lock()
- c.handlers[id] = ch
- c.handlersMu.Unlock()
-
- defer func() {
- c.handlersMu.Lock()
- delete(c.handlers, id)
- c.handlersMu.Unlock()
- }()
-
- // Send request
- if err := WriteMessage(c.stdin, msg); err != nil {
- return fmt.Errorf("failed to send request: %w", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Request sent", "method", method, "id", id)
- }
-
- // Wait for response
- select {
- case <-ctx.Done():
- return ctx.Err()
- case resp := <-ch:
- if cfg.Options.DebugLSP {
- slog.Debug("Received response", "id", id)
- }
-
- if resp.Error != nil {
- return fmt.Errorf("request failed: %s (code: %d)", resp.Error.Message, resp.Error.Code)
- }
-
- if result != nil {
- // If result is a json.RawMessage, just copy the raw bytes
- if rawMsg, ok := result.(*json.RawMessage); ok {
- *rawMsg = resp.Result
- return nil
- }
- // Otherwise unmarshal into the provided type
- if err := json.Unmarshal(resp.Result, result); err != nil {
- return fmt.Errorf("failed to unmarshal result: %w", err)
- }
- }
-
- return nil
- }
-}
-
-// Notify sends a notification (a request without an ID that doesn't expect a response)
-func (c *Client) Notify(ctx context.Context, method string, params any) error {
- cfg := config.Get()
- if cfg.Options.DebugLSP {
- slog.Debug("Sending notification", "method", method)
- }
-
- msg, err := NewNotification(method, params)
- if err != nil {
- return fmt.Errorf("failed to create notification: %w", err)
- }
-
- if err := WriteMessage(c.stdin, msg); err != nil {
- return fmt.Errorf("failed to send notification: %w", err)
- }
-
- return nil
-}
-
-type (
- NotificationHandler func(params json.RawMessage)
- ServerRequestHandler func(params json.RawMessage) (any, error)
-)
@@ -7,7 +7,7 @@ import (
"sort"
"strings"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
func applyTextEdits(uri protocol.DocumentURI, edits []protocol.TextEdit) error {
@@ -1,25 +0,0 @@
-//go:build freebsd || openbsd || netbsd || dragonfly
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var currentLimit uint64 = 0
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit = uint64(rLimit.Cur)
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return uint64(rLimit.Cur), nil
-}
@@ -1,24 +0,0 @@
-//go:build darwin
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit := rLimit.Cur
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return rLimit.Cur, nil
-}
@@ -1,8 +0,0 @@
-//go:build !linux && !darwin && !freebsd && !openbsd && !netbsd && !dragonfly && !windows
-
-package watcher
-
-func Ulimit() (uint64, error) {
- // Fallback for exotic systems - return a reasonable default
- return 2048, nil
-}
@@ -1,25 +0,0 @@
-//go:build linux
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var currentLimit uint64 = 0
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit = rLimit.Cur
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return rLimit.Cur, nil
-}
@@ -1,38 +0,0 @@
-//go:build windows
-
-package watcher
-
-import (
- "syscall"
- "unsafe"
-
- "golang.org/x/sys/windows"
-)
-
-var (
- kernel32 = windows.NewLazyDLL("kernel32.dll")
- procGetProcessHandleCount = kernel32.NewProc("GetProcessHandleCount")
-)
-
-func Ulimit() (uint64, error) {
- // Windows doesn't have the same file descriptor limits as Unix systems
- // Instead, we can get the current handle count for monitoring purposes
- currentProcess := windows.CurrentProcess()
-
- var handleCount uint32
- ret, _, err := procGetProcessHandleCount.Call(
- uintptr(currentProcess),
- uintptr(unsafe.Pointer(&handleCount)),
- )
-
- if ret == 0 {
- // If the call failed, return a reasonable default
- if err != syscall.Errno(0) {
- return 2048, nil
- }
- }
-
- // Windows typically allows much higher handle counts than Unix file descriptors
- // Return the current count, which serves as a baseline for monitoring
- return uint64(handleCount), nil
-}
@@ -1,975 +0,0 @@
-package watcher
-
-import (
- "context"
- "fmt"
- "log/slog"
- "os"
- "path/filepath"
- "strings"
- "sync"
- "time"
-
- "github.com/bmatcuk/doublestar/v4"
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/csync"
-
- "github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
- "github.com/fsnotify/fsnotify"
-)
-
-// WorkspaceWatcher manages LSP file watching
-type WorkspaceWatcher struct {
- client *lsp.Client
- name string
- workspacePath string
-
- debounceTime time.Duration
- debounceMap *csync.Map[string, *time.Timer]
-
- // File watchers registered by the server
- registrations []protocol.FileSystemWatcher
- registrationMu sync.RWMutex
-}
-
-func init() {
- // Ensure the watcher is initialized with a reasonable file limit
- if _, err := Ulimit(); err != nil {
- slog.Error("Error setting file limit", "error", err)
- }
-}
-
-// NewWorkspaceWatcher creates a new workspace watcher
-func NewWorkspaceWatcher(name string, client *lsp.Client) *WorkspaceWatcher {
- return &WorkspaceWatcher{
- name: name,
- client: client,
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- registrations: []protocol.FileSystemWatcher{},
- }
-}
-
-// AddRegistrations adds file watchers to track
-func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watchers []protocol.FileSystemWatcher) {
- cfg := config.Get()
-
- slog.Debug("Adding file watcher registrations")
- w.registrationMu.Lock()
- defer w.registrationMu.Unlock()
-
- // Add new watchers
- w.registrations = append(w.registrations, watchers...)
-
- // Print detailed registration information for debugging
- if cfg.Options.DebugLSP {
- slog.Debug("Adding file watcher registrations",
- "id", id,
- "watchers", len(watchers),
- "total", len(w.registrations),
- )
-
- for i, watcher := range watchers {
- slog.Debug("Registration", "index", i+1)
-
- // Log the GlobPattern
- switch v := watcher.GlobPattern.Value.(type) {
- case string:
- slog.Debug("GlobPattern", "pattern", v)
- case protocol.RelativePattern:
- slog.Debug("GlobPattern", "pattern", v.Pattern)
-
- // Log BaseURI details
- switch u := v.BaseURI.Value.(type) {
- case string:
- slog.Debug("BaseURI", "baseURI", u)
- case protocol.DocumentURI:
- slog.Debug("BaseURI", "baseURI", u)
- default:
- slog.Debug("BaseURI", "baseURI", u)
- }
- default:
- slog.Debug("GlobPattern unknown type", "type", fmt.Sprintf("%T", v))
- }
-
- // Log WatchKind
- watchKind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- if watcher.Kind != nil {
- watchKind = *watcher.Kind
- }
-
- slog.Debug("WatchKind", "kind", watchKind)
- }
- }
-
- // Determine server type for specialized handling
- serverName := w.name
- slog.Debug("Server type detected", "serverName", serverName)
-
- // Check if this server has sent file watchers
- hasFileWatchers := len(watchers) > 0
-
- // For servers that need file preloading, we'll use a smart approach
- if shouldPreloadFiles(serverName) || !hasFileWatchers {
- go func() {
- startTime := time.Now()
- filesOpened := 0
-
- // Determine max files to open based on server type
- maxFilesToOpen := 50 // Default conservative limit
-
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- // TypeScript servers benefit from seeing more files
- maxFilesToOpen = 100
- case "java", "jdtls":
- // Java servers need to see many files for project model
- maxFilesToOpen = 200
- }
-
- // First, open high-priority files
- highPriorityFilesOpened := w.openHighPriorityFiles(ctx, serverName)
- filesOpened += highPriorityFilesOpened
-
- if cfg.Options.DebugLSP {
- slog.Debug("Opened high-priority files",
- "count", highPriorityFilesOpened,
- "serverName", serverName)
- }
-
- // If we've already opened enough high-priority files, we might not need more
- if filesOpened >= maxFilesToOpen {
- if cfg.Options.DebugLSP {
- slog.Debug("Reached file limit with high-priority files",
- "filesOpened", filesOpened,
- "maxFiles", maxFilesToOpen)
- }
- return
- }
-
- // For the remaining slots, walk the directory and open matching files
-
- err := filepath.WalkDir(w.workspacePath, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip directories that should be excluded
- if d.IsDir() {
- if path != w.workspacePath && shouldExcludeDir(path) {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping excluded directory", "path", path)
- }
- return filepath.SkipDir
- }
- } else {
- // Process files, but limit the total number
- if filesOpened < maxFilesToOpen {
- // Only process if it's not already open (high-priority files were opened earlier)
- if !w.client.IsFileOpen(path) {
- w.openMatchingFile(ctx, path)
- filesOpened++
-
- // Add a small delay after every 10 files to prevent overwhelming the server
- if filesOpened%10 == 0 {
- time.Sleep(50 * time.Millisecond)
- }
- }
- } else {
- // We've reached our limit, stop walking
- return filepath.SkipAll
- }
- }
-
- return nil
- })
-
- elapsedTime := time.Since(startTime)
- if cfg.Options.DebugLSP {
- slog.Debug("Limited workspace scan complete",
- "filesOpened", filesOpened,
- "maxFiles", maxFilesToOpen,
- "elapsedTime", elapsedTime.Seconds(),
- "workspacePath", w.workspacePath,
- )
- }
-
- if err != nil && cfg.Options.DebugLSP {
- slog.Debug("Error scanning workspace for files to open", "error", err)
- }
- }()
- } else if cfg.Options.DebugLSP {
- slog.Debug("Using on-demand file loading for server", "server", serverName)
- }
-}
-
-// openHighPriorityFiles opens important files for the server type
-// Returns the number of files opened
-func (w *WorkspaceWatcher) openHighPriorityFiles(ctx context.Context, serverName string) int {
- cfg := config.Get()
- filesOpened := 0
-
- // Define patterns for high-priority files based on server type
- var patterns []string
-
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- patterns = []string{
- "**/tsconfig.json",
- "**/package.json",
- "**/jsconfig.json",
- "**/index.ts",
- "**/index.js",
- "**/main.ts",
- "**/main.js",
- }
- case "gopls":
- patterns = []string{
- "**/go.mod",
- "**/go.sum",
- "**/main.go",
- }
- case "rust-analyzer":
- patterns = []string{
- "**/Cargo.toml",
- "**/Cargo.lock",
- "**/src/lib.rs",
- "**/src/main.rs",
- }
- case "python", "pyright", "pylsp":
- patterns = []string{
- "**/pyproject.toml",
- "**/setup.py",
- "**/requirements.txt",
- "**/__init__.py",
- "**/__main__.py",
- }
- case "clangd":
- patterns = []string{
- "**/CMakeLists.txt",
- "**/Makefile",
- "**/compile_commands.json",
- }
- case "java", "jdtls":
- patterns = []string{
- "**/pom.xml",
- "**/build.gradle",
- "**/src/main/java/**/*.java",
- }
- default:
- // For unknown servers, use common configuration files
- patterns = []string{
- "**/package.json",
- "**/Makefile",
- "**/CMakeLists.txt",
- "**/.editorconfig",
- }
- }
-
- // Collect all files to open first
- var filesToOpen []string
-
- // For each pattern, find matching files
- for _, pattern := range patterns {
- // Use doublestar.Glob to find files matching the pattern (supports ** patterns)
- matches, err := doublestar.Glob(os.DirFS(w.workspacePath), pattern)
- if err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Error finding high-priority files", "pattern", pattern, "error", err)
- }
- continue
- }
-
- for _, match := range matches {
- // Convert relative path to absolute
- fullPath := filepath.Join(w.workspacePath, match)
-
- // Skip directories and excluded files
- info, err := os.Stat(fullPath)
- if err != nil || info.IsDir() || shouldExcludeFile(fullPath) {
- continue
- }
-
- filesToOpen = append(filesToOpen, fullPath)
-
- // Limit the number of files per pattern
- if len(filesToOpen) >= 5 && (serverName != "java" && serverName != "jdtls") {
- break
- }
- }
- }
-
- // Open files in batches to reduce overhead
- batchSize := 3
- for i := 0; i < len(filesToOpen); i += batchSize {
- end := min(i+batchSize, len(filesToOpen))
-
- // Open batch of files
- for j := i; j < end; j++ {
- fullPath := filesToOpen[j]
- if err := w.client.OpenFile(ctx, fullPath); err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Error opening high-priority file", "path", fullPath, "error", err)
- }
- } else {
- filesOpened++
- if cfg.Options.DebugLSP {
- slog.Debug("Opened high-priority file", "path", fullPath)
- }
- }
- }
-
- // Only add delay between batches, not individual files
- if end < len(filesToOpen) {
- time.Sleep(50 * time.Millisecond)
- }
- }
-
- return filesOpened
-}
-
-// WatchWorkspace sets up file watching for a workspace
-func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath string) {
- cfg := config.Get()
- w.workspacePath = workspacePath
-
- slog.Debug("Starting workspace watcher", "workspacePath", workspacePath, "serverName", w.name)
-
- // Register handler for file watcher registrations from the server
- lsp.RegisterFileWatchHandler(func(id string, watchers []protocol.FileSystemWatcher) {
- w.AddRegistrations(ctx, id, watchers)
- })
-
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- slog.Error("Error creating watcher", "error", err)
- }
- defer watcher.Close()
-
- // Watch the workspace recursively
- err = filepath.WalkDir(workspacePath, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip excluded directories (except workspace root)
- if d.IsDir() && path != workspacePath {
- if shouldExcludeDir(path) {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping excluded directory", "path", path)
- }
- return filepath.SkipDir
- }
- }
-
- // Add directories to watcher
- if d.IsDir() {
- err = watcher.Add(path)
- if err != nil {
- slog.Error("Error watching path", "path", path, "error", err)
- }
- }
-
- return nil
- })
- if err != nil {
- slog.Error("Error walking workspace", "error", err)
- }
-
- // Event loop
- for {
- select {
- case <-ctx.Done():
- return
- case event, ok := <-watcher.Events:
- if !ok {
- return
- }
-
- uri := string(protocol.URIFromPath(event.Name))
-
- // Add new directories to the watcher
- if event.Op&fsnotify.Create != 0 {
- if info, err := os.Stat(event.Name); err == nil {
- if info.IsDir() {
- // Skip excluded directories
- if !shouldExcludeDir(event.Name) {
- if err := watcher.Add(event.Name); err != nil {
- slog.Error("Error adding directory to watcher", "path", event.Name, "error", err)
- }
- }
- } else {
- // For newly created files
- if !shouldExcludeFile(event.Name) {
- w.openMatchingFile(ctx, event.Name)
- }
- }
- }
- }
-
- // Debug logging
- if cfg.Options.DebugLSP {
- matched, kind := w.isPathWatched(event.Name)
- slog.Debug("File event",
- "path", event.Name,
- "operation", event.Op.String(),
- "watched", matched,
- "kind", kind,
- )
- }
-
- // Check if this path should be watched according to server registrations
- if watched, watchKind := w.isPathWatched(event.Name); watched {
- switch {
- case event.Op&fsnotify.Write != 0:
- if watchKind&protocol.WatchChange != 0 {
- w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Changed))
- }
- case event.Op&fsnotify.Create != 0:
- // Already handled earlier in the event loop
- // Just send the notification if needed
- info, err := os.Stat(event.Name)
- if err != nil {
- slog.Error("Error getting file info", "path", event.Name, "error", err)
- return
- }
- if !info.IsDir() && watchKind&protocol.WatchCreate != 0 {
- w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Created))
- }
- case event.Op&fsnotify.Remove != 0:
- if watchKind&protocol.WatchDelete != 0 {
- w.handleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Deleted))
- }
- case event.Op&fsnotify.Rename != 0:
- // For renames, first delete
- if watchKind&protocol.WatchDelete != 0 {
- w.handleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Deleted))
- }
-
- // Then check if the new file exists and create an event
- if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
- if watchKind&protocol.WatchCreate != 0 {
- w.debounceHandleFileEvent(ctx, uri, protocol.FileChangeType(protocol.Created))
- }
- }
- }
- }
- case err, ok := <-watcher.Errors:
- if !ok {
- return
- }
- slog.Error("Error watching file", "error", err)
- }
- }
-}
-
-// isPathWatched checks if a path should be watched based on server registrations
-func (w *WorkspaceWatcher) isPathWatched(path string) (bool, protocol.WatchKind) {
- w.registrationMu.RLock()
- defer w.registrationMu.RUnlock()
-
- // If no explicit registrations, watch everything
- if len(w.registrations) == 0 {
- return true, protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- }
-
- // Check each registration
- for _, reg := range w.registrations {
- isMatch := w.matchesPattern(path, reg.GlobPattern)
- if isMatch {
- kind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- if reg.Kind != nil {
- kind = *reg.Kind
- }
- return true, kind
- }
- }
-
- return false, 0
-}
-
-// matchesGlob handles advanced glob patterns including ** and alternatives
-func matchesGlob(pattern, path string) bool {
- // Handle file extension patterns with braces like *.{go,mod,sum}
- if strings.Contains(pattern, "{") && strings.Contains(pattern, "}") {
- // Extract extensions from pattern like "*.{go,mod,sum}"
- parts := strings.SplitN(pattern, "{", 2)
- if len(parts) == 2 {
- prefix := parts[0]
- extPart := strings.SplitN(parts[1], "}", 2)
- if len(extPart) == 2 {
- extensions := strings.Split(extPart[0], ",")
- suffix := extPart[1]
-
- // Check if the path matches any of the extensions
- for _, ext := range extensions {
- extPattern := prefix + ext + suffix
- isMatch := matchesSimpleGlob(extPattern, path)
- if isMatch {
- return true
- }
- }
- return false
- }
- }
- }
-
- return matchesSimpleGlob(pattern, path)
-}
-
-// matchesSimpleGlob handles glob patterns with ** wildcards
-func matchesSimpleGlob(pattern, path string) bool {
- // Handle special case for **/*.ext pattern (common in LSP)
- if after, ok := strings.CutPrefix(pattern, "**/"); ok {
- rest := after
-
- // If the rest is a simple file extension pattern like *.go
- if strings.HasPrefix(rest, "*.") {
- ext := strings.TrimPrefix(rest, "*")
- isMatch := strings.HasSuffix(path, ext)
- return isMatch
- }
-
- // Otherwise, try to check if the path ends with the rest part
- isMatch := strings.HasSuffix(path, rest)
-
- // If it matches directly, great!
- if isMatch {
- return true
- }
-
- // Otherwise, check if any path component matches
- pathComponents := strings.Split(path, "/")
- for i := range pathComponents {
- subPath := strings.Join(pathComponents[i:], "/")
- if strings.HasSuffix(subPath, rest) {
- return true
- }
- }
-
- return false
- }
-
- // Handle other ** wildcard pattern cases
- if strings.Contains(pattern, "**") {
- parts := strings.Split(pattern, "**")
-
- // Validate the path starts with the first part
- if !strings.HasPrefix(path, parts[0]) && parts[0] != "" {
- return false
- }
-
- // For patterns like "**/*.go", just check the suffix
- if len(parts) == 2 && parts[0] == "" {
- isMatch := strings.HasSuffix(path, parts[1])
- return isMatch
- }
-
- // For other patterns, handle middle part
- remaining := strings.TrimPrefix(path, parts[0])
- if len(parts) == 2 {
- isMatch := strings.HasSuffix(remaining, parts[1])
- return isMatch
- }
- }
-
- // Handle simple * wildcard for file extension patterns (*.go, *.sum, etc)
- if strings.HasPrefix(pattern, "*.") {
- ext := strings.TrimPrefix(pattern, "*")
- isMatch := strings.HasSuffix(path, ext)
- return isMatch
- }
-
- // Fall back to simple matching for simpler patterns
- matched, err := filepath.Match(pattern, path)
- if err != nil {
- slog.Error("Error matching pattern", "pattern", pattern, "path", path, "error", err)
- return false
- }
-
- return matched
-}
-
-// matchesPattern checks if a path matches the glob pattern
-func (w *WorkspaceWatcher) matchesPattern(path string, pattern protocol.GlobPattern) bool {
- patternInfo, err := pattern.AsPattern()
- if err != nil {
- slog.Error("Error parsing pattern", "pattern", pattern, "error", err)
- return false
- }
-
- basePath := patternInfo.GetBasePath()
- patternText := patternInfo.GetPattern()
-
- path = filepath.ToSlash(path)
-
- // For simple patterns without base path
- if basePath == "" {
- // Check if the pattern matches the full path or just the file extension
- fullPathMatch := matchesGlob(patternText, path)
- baseNameMatch := matchesGlob(patternText, filepath.Base(path))
-
- return fullPathMatch || baseNameMatch
- }
-
- if basePath == "" {
- return false
- }
- // For relative patterns
- if basePath, err = protocol.DocumentURI(basePath).Path(); err != nil {
- // XXX: Do we want to return here, or send the error up the stack?
- slog.Error("Error converting base path to URI", "basePath", basePath, "error", err)
- }
-
- basePath = filepath.ToSlash(basePath)
-
- // Make path relative to basePath for matching
- relPath, err := filepath.Rel(basePath, path)
- if err != nil {
- slog.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err)
- return false
- }
- relPath = filepath.ToSlash(relPath)
-
- isMatch := matchesGlob(patternText, relPath)
-
- return isMatch
-}
-
-// debounceHandleFileEvent handles file events with debouncing to reduce notifications
-func (w *WorkspaceWatcher) debounceHandleFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) {
- // Create a unique key based on URI and change type
- key := fmt.Sprintf("%s:%d", uri, changeType)
-
- // Cancel existing timer if any
- if timer, exists := w.debounceMap.Get(key); exists {
- timer.Stop()
- }
-
- // Create new timer
- w.debounceMap.Set(key, time.AfterFunc(w.debounceTime, func() {
- w.handleFileEvent(ctx, uri, changeType)
-
- // Cleanup timer after execution
- w.debounceMap.Del(key)
- }))
-}
-
-// handleFileEvent sends file change notifications
-func (w *WorkspaceWatcher) handleFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) {
- // If the file is open and it's a change event, use didChange notification
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- // XXX: Do we want to return here, or send the error up the stack?
- slog.Error("Error converting URI to path", "uri", uri, "error", err)
- return
- }
-
- if changeType == protocol.FileChangeType(protocol.Deleted) {
- w.client.ClearDiagnosticsForURI(protocol.DocumentURI(uri))
- } else if changeType == protocol.FileChangeType(protocol.Changed) && w.client.IsFileOpen(filePath) {
- err := w.client.NotifyChange(ctx, filePath)
- if err != nil {
- slog.Error("Error notifying change", "error", err)
- }
- return
- }
-
- // Notify LSP server about the file event using didChangeWatchedFiles
- if err := w.notifyFileEvent(ctx, uri, changeType); err != nil {
- slog.Error("Error notifying LSP server about file event", "error", err)
- }
-}
-
-// notifyFileEvent sends a didChangeWatchedFiles notification for a file event
-func (w *WorkspaceWatcher) notifyFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) error {
- cfg := config.Get()
- if cfg.Options.DebugLSP {
- slog.Debug("Notifying file event",
- "uri", uri,
- "changeType", changeType,
- )
- }
-
- params := protocol.DidChangeWatchedFilesParams{
- Changes: []protocol.FileEvent{
- {
- URI: protocol.DocumentURI(uri),
- Type: changeType,
- },
- },
- }
-
- return w.client.DidChangeWatchedFiles(ctx, params)
-}
-
-// shouldPreloadFiles determines if we should preload files for a specific language server
-// Some servers work better with preloaded files, others don't need it
-func shouldPreloadFiles(serverName string) bool {
- // TypeScript/JavaScript servers typically need some files preloaded
- // to properly resolve imports and provide intellisense
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- return true
- case "java", "jdtls":
- // Java servers often need to see source files to build the project model
- return true
- default:
- // For most servers, we'll use lazy loading by default
- return false
- }
-}
-
-// Common patterns for directories and files to exclude
-// TODO: make configurable
-var (
- excludedDirNames = map[string]bool{
- ".git": true,
- "node_modules": true,
- "dist": true,
- "build": true,
- "out": true,
- "bin": true,
- ".idea": true,
- ".vscode": true,
- ".cache": true,
- "coverage": true,
- "target": true, // Rust build output
- "vendor": true, // Go vendor directory
- }
-
- excludedFileExtensions = map[string]bool{
- ".swp": true,
- ".swo": true,
- ".tmp": true,
- ".temp": true,
- ".bak": true,
- ".log": true,
- ".o": true, // Object files
- ".so": true, // Shared libraries
- ".dylib": true, // macOS shared libraries
- ".dll": true, // Windows shared libraries
- ".a": true, // Static libraries
- ".exe": true, // Windows executables
- ".lock": true, // Lock files
- }
-
- // Large binary files that shouldn't be opened
- largeBinaryExtensions = map[string]bool{
- ".png": true,
- ".jpg": true,
- ".jpeg": true,
- ".gif": true,
- ".bmp": true,
- ".ico": true,
- ".zip": true,
- ".tar": true,
- ".gz": true,
- ".rar": true,
- ".7z": true,
- ".pdf": true,
- ".mp3": true,
- ".mp4": true,
- ".mov": true,
- ".wav": true,
- ".wasm": true,
- }
-
- // Maximum file size to open (5MB)
- maxFileSize int64 = 5 * 1024 * 1024
-)
-
-// shouldExcludeDir returns true if the directory should be excluded from watching/opening
-func shouldExcludeDir(dirPath string) bool {
- dirName := filepath.Base(dirPath)
-
- // Skip dot directories
- if strings.HasPrefix(dirName, ".") {
- return true
- }
-
- // Skip common excluded directories
- if excludedDirNames[dirName] {
- return true
- }
-
- return false
-}
-
-// shouldExcludeFile returns true if the file should be excluded from opening
-func shouldExcludeFile(filePath string) bool {
- fileName := filepath.Base(filePath)
- cfg := config.Get()
- // Skip dot files
- if strings.HasPrefix(fileName, ".") {
- return true
- }
-
- // Check file extension
- ext := strings.ToLower(filepath.Ext(filePath))
- if excludedFileExtensions[ext] || largeBinaryExtensions[ext] {
- return true
- }
-
- // Skip temporary files
- if strings.HasSuffix(filePath, "~") {
- return true
- }
-
- // Check file size
- info, err := os.Stat(filePath)
- if err != nil {
- // If we can't stat the file, skip it
- return true
- }
-
- // Skip large files
- if info.Size() > maxFileSize {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping large file",
- "path", filePath,
- "size", info.Size(),
- "maxSize", maxFileSize,
- "debug", cfg.Options.Debug,
- "sizeMB", float64(info.Size())/(1024*1024),
- "maxSizeMB", float64(maxFileSize)/(1024*1024),
- )
- }
- return true
- }
-
- return false
-}
-
-// openMatchingFile opens a file if it matches any of the registered patterns
-func (w *WorkspaceWatcher) openMatchingFile(ctx context.Context, path string) {
- cfg := config.Get()
- // Skip directories
- info, err := os.Stat(path)
- if err != nil || info.IsDir() {
- return
- }
-
- // Skip excluded files
- if shouldExcludeFile(path) {
- return
- }
-
- // Check if this path should be watched according to server registrations
- if watched, _ := w.isPathWatched(path); !watched {
- return
- }
-
- serverName := w.name
-
- // Get server name for specialized handling
- // Check if the file is a high-priority file that should be opened immediately
- // This helps with project initialization for certain language servers
- if isHighPriorityFile(path, serverName) {
- if cfg.Options.DebugLSP {
- slog.Debug("Opening high-priority file", "path", path, "serverName", serverName)
- }
- if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- slog.Error("Error opening high-priority file", "path", path, "error", err)
- }
- return
- }
-
- // For non-high-priority files, we'll use different strategies based on server type
- if !shouldPreloadFiles(serverName) {
- return
- }
- // For servers that benefit from preloading, open files but with limits
-
- // Check file size - for preloading we're more conservative
- if info.Size() > (1 * 1024 * 1024) { // 1MB limit for preloaded files
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping large file for preloading", "path", path, "size", info.Size())
- }
- return
- }
-
- // Check file extension for common source files
- ext := strings.ToLower(filepath.Ext(path))
-
- // Only preload source files for the specific language
- var shouldOpen bool
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- shouldOpen = ext == ".ts" || ext == ".js" || ext == ".tsx" || ext == ".jsx"
- case "gopls":
- shouldOpen = ext == ".go"
- case "rust-analyzer":
- shouldOpen = ext == ".rs"
- case "python", "pyright", "pylsp":
- shouldOpen = ext == ".py"
- case "clangd":
- shouldOpen = ext == ".c" || ext == ".cpp" || ext == ".h" || ext == ".hpp"
- case "java", "jdtls":
- shouldOpen = ext == ".java"
- }
-
- if shouldOpen {
- // Don't need to check if it's already open - the client.OpenFile handles that
- if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- slog.Error("Error opening file", "path", path, "error", err)
- }
- }
-}
-
-// isHighPriorityFile determines if a file should be opened immediately
-// regardless of the preloading strategy
-func isHighPriorityFile(path string, serverName string) bool {
- fileName := filepath.Base(path)
- ext := filepath.Ext(path)
-
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- // For TypeScript, we want to open configuration files immediately
- return fileName == "tsconfig.json" ||
- fileName == "package.json" ||
- fileName == "jsconfig.json" ||
- // Also open main entry points
- fileName == "index.ts" ||
- fileName == "index.js" ||
- fileName == "main.ts" ||
- fileName == "main.js"
- case "gopls":
- // For Go, we want to open go.mod files immediately
- return fileName == "go.mod" ||
- fileName == "go.sum" ||
- // Also open main.go files
- fileName == "main.go"
- case "rust-analyzer":
- // For Rust, we want to open Cargo.toml files immediately
- return fileName == "Cargo.toml" ||
- fileName == "Cargo.lock" ||
- // Also open lib.rs and main.rs
- fileName == "lib.rs" ||
- fileName == "main.rs"
- case "python", "pyright", "pylsp":
- // For Python, open key project files
- return fileName == "pyproject.toml" ||
- fileName == "setup.py" ||
- fileName == "requirements.txt" ||
- fileName == "__init__.py" ||
- fileName == "__main__.py"
- case "clangd":
- // For C/C++, open key project files
- return fileName == "CMakeLists.txt" ||
- fileName == "Makefile" ||
- fileName == "compile_commands.json"
- case "java", "jdtls":
- // For Java, open key project files
- return fileName == "pom.xml" ||
- fileName == "build.gradle" ||
- ext == ".java" // Java servers often need to see source files
- }
-
- // For unknown servers, prioritize common configuration files
- return fileName == "package.json" ||
- fileName == "Makefile" ||
- fileName == "CMakeLists.txt" ||
- fileName == ".editorconfig"
-}
@@ -49,6 +49,8 @@ type Service interface {
Deny(permission PermissionRequest)
Request(opts CreatePermissionRequest) bool
AutoApproveSession(sessionID string)
+ SetSkipRequests(skip bool)
+ SkipRequests() bool
SubscribeNotifications(ctx context.Context) <-chan pubsub.Event[PermissionNotification]
}
@@ -210,6 +212,14 @@ func (s *permissionService) SubscribeNotifications(ctx context.Context) <-chan p
return s.notificationBroker.Subscribe(ctx)
}
+func (s *permissionService) SetSkipRequests(skip bool) {
+ s.skip = skip
+}
+
+func (s *permissionService) SkipRequests() bool {
+ return s.skip
+}
+
func NewPermissionService(workingDir string, skip bool, allowedTools []string) Service {
return &permissionService{
Broker: pubsub.NewBroker[PermissionRequest](),
@@ -154,12 +154,10 @@ func TestPermissionService_SequentialProperties(t *testing.T) {
events := service.Subscribe(t.Context())
var result1 bool
var wg sync.WaitGroup
- wg.Add(1)
- go func() {
- defer wg.Done()
+ wg.Go(func() {
result1 = service.Request(req)
- }()
+ })
var permissionReq PermissionRequest
event := <-events
@@ -170,12 +168,10 @@ func TestPermissionService_SequentialProperties(t *testing.T) {
assert.True(t, result1, "First request should be granted")
var result2 bool
- wg.Add(1)
- go func() {
- defer wg.Done()
+ wg.Go(func() {
result2 = service.Request(req)
- }()
+ })
event = <-events
permissionReq = event.Payload
@@ -5,6 +5,7 @@ import (
"database/sql"
"github.com/charmbracelet/crush/internal/db"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/google/uuid"
)
@@ -48,6 +49,7 @@ func (s *service) Create(ctx context.Context, title string) (Session, error) {
}
session := s.fromDBItem(dbSession)
s.Publish(pubsub.CreatedEvent, session)
+ event.SessionCreated()
return session, nil
}
@@ -89,6 +91,7 @@ func (s *service) Delete(ctx context.Context, id string) error {
return err
}
s.Publish(pubsub.DeletedEvent, session)
+ event.SessionDeleted()
return nil
}
@@ -1,9 +1,10 @@
package shell
import (
- "context"
"strings"
"testing"
+
+ "github.com/stretchr/testify/require"
)
func TestCommandBlocking(t *testing.T) {
@@ -56,10 +57,7 @@ func TestCommandBlocking(t *testing.T) {
{
name: "block npm global install with -g",
blockFuncs: []BlockFunc{
- ArgumentsBlocker([][]string{
- {"npm", "install", "-g"},
- {"npm", "install", "--global"},
- }),
+ ArgumentsBlocker("npm", []string{"install"}, []string{"-g"}),
},
command: "npm install -g typescript",
shouldBlock: true,
@@ -67,10 +65,7 @@ func TestCommandBlocking(t *testing.T) {
{
name: "block npm global install with --global",
blockFuncs: []BlockFunc{
- ArgumentsBlocker([][]string{
- {"npm", "install", "-g"},
- {"npm", "install", "--global"},
- }),
+ ArgumentsBlocker("npm", []string{"install"}, []string{"--global"}),
},
command: "npm install --global typescript",
shouldBlock: true,
@@ -78,10 +73,8 @@ func TestCommandBlocking(t *testing.T) {
{
name: "allow npm local install",
blockFuncs: []BlockFunc{
- ArgumentsBlocker([][]string{
- {"npm", "install", "-g"},
- {"npm", "install", "--global"},
- }),
+ ArgumentsBlocker("npm", []string{"install"}, []string{"-g"}),
+ ArgumentsBlocker("npm", []string{"install"}, []string{"--global"}),
},
command: "npm install typescript",
shouldBlock: false,
@@ -98,7 +91,7 @@ func TestCommandBlocking(t *testing.T) {
BlockFuncs: tt.blockFuncs,
})
- _, _, err := shell.Exec(context.Background(), tt.command)
+ _, _, err := shell.Exec(t.Context(), tt.command)
if tt.shouldBlock {
if err == nil {
@@ -116,3 +109,268 @@ func TestCommandBlocking(t *testing.T) {
})
}
}
+
+func TestArgumentsBlocker(t *testing.T) {
+ tests := []struct {
+ name string
+ cmd string
+ args []string
+ flags []string
+ input []string
+ shouldBlock bool
+ }{
+ // Basic command blocking
+ {
+ name: "block exact command match",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: nil,
+ input: []string{"npm", "install", "package"},
+ shouldBlock: true,
+ },
+ {
+ name: "allow different command",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: nil,
+ input: []string{"yarn", "install", "package"},
+ shouldBlock: false,
+ },
+ {
+ name: "allow different subcommand",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: nil,
+ input: []string{"npm", "list"},
+ shouldBlock: false,
+ },
+
+ // Flag-based blocking
+ {
+ name: "block with single flag",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: []string{"-g"},
+ input: []string{"npm", "install", "-g", "typescript"},
+ shouldBlock: true,
+ },
+ {
+ name: "block with flag in different position",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: []string{"-g"},
+ input: []string{"npm", "install", "typescript", "-g"},
+ shouldBlock: true,
+ },
+ {
+ name: "allow without required flag",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: []string{"-g"},
+ input: []string{"npm", "install", "typescript"},
+ shouldBlock: false,
+ },
+ {
+ name: "block with multiple flags",
+ cmd: "pip",
+ args: []string{"install"},
+ flags: []string{"--user"},
+ input: []string{"pip", "install", "--user", "--upgrade", "package"},
+ shouldBlock: true,
+ },
+
+ // Complex argument patterns
+ {
+ name: "block multi-arg subcommand",
+ cmd: "yarn",
+ args: []string{"global", "add"},
+ flags: nil,
+ input: []string{"yarn", "global", "add", "typescript"},
+ shouldBlock: true,
+ },
+ {
+ name: "allow partial multi-arg match",
+ cmd: "yarn",
+ args: []string{"global", "add"},
+ flags: nil,
+ input: []string{"yarn", "global", "list"},
+ shouldBlock: false,
+ },
+
+ // Edge cases
+ {
+ name: "handle empty input",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: nil,
+ input: []string{},
+ shouldBlock: false,
+ },
+ {
+ name: "handle command only",
+ cmd: "npm",
+ args: []string{"install"},
+ flags: nil,
+ input: []string{"npm"},
+ shouldBlock: false,
+ },
+ {
+ name: "block pacman with -S flag",
+ cmd: "pacman",
+ args: nil,
+ flags: []string{"-S"},
+ input: []string{"pacman", "-S", "package"},
+ shouldBlock: true,
+ },
+ {
+ name: "allow pacman without -S flag",
+ cmd: "pacman",
+ args: nil,
+ flags: []string{"-S"},
+ input: []string{"pacman", "-Q", "package"},
+ shouldBlock: false,
+ },
+
+ // `go test -exec`
+ {
+ name: "go test exec",
+ cmd: "go",
+ args: []string{"test"},
+ flags: []string{"-exec"},
+ input: []string{"go", "test", "-exec", "bash -c 'echo hello'"},
+ shouldBlock: true,
+ },
+ {
+ name: "go test exec",
+ cmd: "go",
+ args: []string{"test"},
+ flags: []string{"-exec"},
+ input: []string{"go", "test", `-exec="bash -c 'echo hello'"`},
+ shouldBlock: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ blocker := ArgumentsBlocker(tt.cmd, tt.args, tt.flags)
+ result := blocker(tt.input)
+ require.Equal(t, tt.shouldBlock, result,
+ "Expected block=%v for input %v", tt.shouldBlock, tt.input)
+ })
+ }
+}
+
+func TestCommandsBlocker(t *testing.T) {
+ tests := []struct {
+ name string
+ banned []string
+ input []string
+ shouldBlock bool
+ }{
+ {
+ name: "block single banned command",
+ banned: []string{"curl"},
+ input: []string{"curl", "https://example.com"},
+ shouldBlock: true,
+ },
+ {
+ name: "allow non-banned command",
+ banned: []string{"curl", "wget"},
+ input: []string{"echo", "hello"},
+ shouldBlock: false,
+ },
+ {
+ name: "block from multiple banned",
+ banned: []string{"curl", "wget", "nc"},
+ input: []string{"wget", "https://example.com"},
+ shouldBlock: true,
+ },
+ {
+ name: "handle empty input",
+ banned: []string{"curl"},
+ input: []string{},
+ shouldBlock: false,
+ },
+ {
+ name: "case sensitive matching",
+ banned: []string{"curl"},
+ input: []string{"CURL", "https://example.com"},
+ shouldBlock: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ blocker := CommandsBlocker(tt.banned)
+ result := blocker(tt.input)
+ require.Equal(t, tt.shouldBlock, result,
+ "Expected block=%v for input %v", tt.shouldBlock, tt.input)
+ })
+ }
+}
+
+func TestSplitArgsFlags(t *testing.T) {
+ tests := []struct {
+ name string
+ input []string
+ wantArgs []string
+ wantFlags []string
+ }{
+ {
+ name: "only args",
+ input: []string{"install", "package", "another"},
+ wantArgs: []string{"install", "package", "another"},
+ wantFlags: []string{},
+ },
+ {
+ name: "only flags",
+ input: []string{"-g", "--verbose", "-f"},
+ wantArgs: []string{},
+ wantFlags: []string{"-g", "--verbose", "-f"},
+ },
+ {
+ name: "mixed args and flags",
+ input: []string{"install", "-g", "package", "--verbose"},
+ wantArgs: []string{"install", "package"},
+ wantFlags: []string{"-g", "--verbose"},
+ },
+ {
+ name: "empty input",
+ input: []string{},
+ wantArgs: []string{},
+ wantFlags: []string{},
+ },
+ {
+ name: "single dash flag",
+ input: []string{"-S", "package"},
+ wantArgs: []string{"package"},
+ wantFlags: []string{"-S"},
+ },
+ {
+ name: "flag with equals sign",
+ input: []string{"-exec=bash", "package"},
+ wantArgs: []string{"package"},
+ wantFlags: []string{"-exec"},
+ },
+ {
+ name: "long flag with equals sign",
+ input: []string{"--config=/path/to/config", "run"},
+ wantArgs: []string{"run"},
+ wantFlags: []string{"--config"},
+ },
+ {
+ name: "flag with complex value",
+ input: []string{`-exec="bash -c 'echo hello'"`, "test"},
+ wantArgs: []string{"test"},
+ wantFlags: []string{"-exec"},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ args, flags := splitArgsFlags(tt.input)
+ require.Equal(t, tt.wantArgs, args, "args mismatch")
+ require.Equal(t, tt.wantFlags, flags, "flags mismatch")
+ })
+ }
+}
@@ -1,59 +0,0 @@
-package shell
-
-import (
- "context"
-
- "github.com/u-root/u-root/pkg/core"
- "github.com/u-root/u-root/pkg/core/cat"
- "github.com/u-root/u-root/pkg/core/chmod"
- "github.com/u-root/u-root/pkg/core/cp"
- "github.com/u-root/u-root/pkg/core/find"
- "github.com/u-root/u-root/pkg/core/ls"
- "github.com/u-root/u-root/pkg/core/mkdir"
- "github.com/u-root/u-root/pkg/core/mv"
- "github.com/u-root/u-root/pkg/core/rm"
- "github.com/u-root/u-root/pkg/core/touch"
- "github.com/u-root/u-root/pkg/core/xargs"
- "mvdan.cc/sh/v3/interp"
-)
-
-var coreUtils = map[string]func() core.Command{
- "cat": func() core.Command { return cat.New() },
- "chmod": func() core.Command { return chmod.New() },
- "cp": func() core.Command { return cp.New() },
- "find": func() core.Command { return find.New() },
- "ls": func() core.Command { return ls.New() },
- "mkdir": func() core.Command { return mkdir.New() },
- "mv": func() core.Command { return mv.New() },
- "rm": func() core.Command { return rm.New() },
- "touch": func() core.Command { return touch.New() },
- "xargs": func() core.Command { return xargs.New() },
-}
-
-func (s *Shell) coreUtilsHandler() func(next interp.ExecHandlerFunc) interp.ExecHandlerFunc {
- return func(next interp.ExecHandlerFunc) interp.ExecHandlerFunc {
- return func(ctx context.Context, args []string) error {
- if len(args) == 0 {
- return next(ctx, args)
- }
-
- program, programArgs := args[0], args[1:]
-
- newCoreUtil, ok := coreUtils[program]
- if !ok {
- return next(ctx, args)
- }
-
- c := interp.HandlerCtx(ctx)
-
- cmd := newCoreUtil()
- cmd.SetIO(c.Stdin, c.Stdout, c.Stderr)
- cmd.SetWorkingDir(c.Dir)
- cmd.SetLookupEnv(func(key string) (string, bool) {
- v := c.Env.Get(key)
- return v.Str, v.Set
- })
- return cmd.RunContext(ctx, programArgs...)
- }
- }
-}
@@ -16,9 +16,12 @@ import (
"errors"
"fmt"
"os"
+ "slices"
"strings"
"sync"
+ "github.com/charmbracelet/x/exp/slice"
+ "mvdan.cc/sh/moreinterp/coreutils"
"mvdan.cc/sh/v3/expand"
"mvdan.cc/sh/v3/interp"
"mvdan.cc/sh/v3/syntax"
@@ -155,39 +158,56 @@ func (s *Shell) SetBlockFuncs(blockFuncs []BlockFunc) {
}
// CommandsBlocker creates a BlockFunc that blocks exact command matches
-func CommandsBlocker(bannedCommands []string) BlockFunc {
- bannedSet := make(map[string]bool)
- for _, cmd := range bannedCommands {
- bannedSet[cmd] = true
+func CommandsBlocker(cmds []string) BlockFunc {
+ bannedSet := make(map[string]struct{})
+ for _, cmd := range cmds {
+ bannedSet[cmd] = struct{}{}
}
return func(args []string) bool {
if len(args) == 0 {
return false
}
- return bannedSet[args[0]]
+ _, ok := bannedSet[args[0]]
+ return ok
}
}
-// ArgumentsBlocker creates a BlockFunc that blocks specific subcommands
-func ArgumentsBlocker(blockedSubCommands [][]string) BlockFunc {
- return func(args []string) bool {
- for _, blocked := range blockedSubCommands {
- if len(args) >= len(blocked) {
- match := true
- for i, part := range blocked {
- if args[i] != part {
- match = false
- break
- }
- }
- if match {
- return true
- }
+// ArgumentsBlocker creates a BlockFunc that blocks specific subcommand
+func ArgumentsBlocker(cmd string, args []string, flags []string) BlockFunc {
+ return func(parts []string) bool {
+ if len(parts) == 0 || parts[0] != cmd {
+ return false
+ }
+
+ argParts, flagParts := splitArgsFlags(parts[1:])
+ if len(argParts) < len(args) || len(flagParts) < len(flags) {
+ return false
+ }
+
+ argsMatch := slices.Equal(argParts[:len(args)], args)
+ flagsMatch := slice.IsSubset(flags, flagParts)
+
+ return argsMatch && flagsMatch
+ }
+}
+
+func splitArgsFlags(parts []string) (args []string, flags []string) {
+ args = make([]string, 0, len(parts))
+ flags = make([]string, 0, len(parts))
+ for _, part := range parts {
+ if strings.HasPrefix(part, "-") {
+ // Extract flag name before '=' if present
+ flag := part
+ if idx := strings.IndexByte(part, '='); idx != -1 {
+ flag = part[:idx]
}
+ flags = append(flags, flag)
+ } else {
+ args = append(args, part)
}
- return false
}
+ return args, flags
}
func (s *Shell) blockHandler() func(next interp.ExecHandlerFunc) interp.ExecHandlerFunc {
@@ -221,7 +241,7 @@ func (s *Shell) execPOSIX(ctx context.Context, command string) (string, string,
interp.Interactive(false),
interp.Env(expand.ListEnviron(s.env...)),
interp.Dir(s.cwd),
- interp.ExecHandlers(s.blockHandler(), s.coreUtilsHandler()),
+ interp.ExecHandlers(s.blockHandler(), coreutils.ExecHandler),
)
if err != nil {
return "", "", fmt.Errorf("could not run command: %w", err)
@@ -16,7 +16,7 @@ func BenchmarkShellQuickCommands(b *testing.B) {
b.ReportAllocs()
for b.Loop() {
- _, _, err := shell.Exec(context.Background(), "echo test")
+ _, _, err := shell.Exec(b.Context(), "echo test")
exitCode := ExitCode(err)
if err != nil || exitCode != 0 {
b.Fatalf("Command failed: %v, exit code: %d", err, exitCode)
@@ -100,7 +100,7 @@ func TestRunContinuity(t *testing.T) {
func TestCrossPlatformExecution(t *testing.T) {
shell := NewShell(&Options{WorkingDir: "."})
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second)
defer cancel()
// Test a simple command that should work on all platforms
@@ -1,90 +0,0 @@
-package main
-
-import (
- "fmt"
- "image/color"
- "os"
-
- tea "github.com/charmbracelet/bubbletea/v2"
- anim "github.com/charmbracelet/crush/internal/tui/components/anim"
- "github.com/charmbracelet/crush/internal/tui/styles"
- "github.com/charmbracelet/lipgloss/v2"
-)
-
-type model struct {
- anim tea.Model
- bgColor color.Color
- quitting bool
- w, h int
-}
-
-func (m model) Init() tea.Cmd {
- return m.anim.Init()
-}
-
-func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
- switch msg := msg.(type) {
- case tea.WindowSizeMsg:
- m.w, m.h = msg.Width, msg.Height
- return m, nil
- case tea.KeyMsg:
- switch msg.String() {
- case "q", "ctrl+c":
- m.quitting = true
- return m, tea.Quit
- default:
- return m, nil
- }
- case anim.StepMsg:
- var cmd tea.Cmd
- m.anim, cmd = m.anim.Update(msg)
- return m, cmd
- default:
- return m, nil
- }
-}
-
-func (m model) View() tea.View {
- if m.w == 0 || m.h == 0 {
- return tea.NewView("")
- }
-
- v := tea.NewView("")
- v.BackgroundColor = m.bgColor
-
- if m.quitting {
- return v
- }
-
- if a, ok := m.anim.(*anim.Anim); ok {
- l := lipgloss.NewLayer(a.View()).
- Width(a.Width()).
- X(m.w/2 - a.Width()/2).
- Y(m.h / 2)
-
- v = tea.NewView(lipgloss.NewCanvas(l))
- v.BackgroundColor = m.bgColor
- return v
- }
- return v
-}
-
-func main() {
- t := styles.CurrentTheme()
- p := tea.NewProgram(model{
- bgColor: t.BgBase,
- anim: anim.New(anim.Settings{
- Label: "Hello",
- Size: 50,
- LabelColor: t.FgBase,
- GradColorA: t.Primary,
- GradColorB: t.Secondary,
- CycleColors: true,
- }),
- }, tea.WithAltScreen())
-
- if _, err := p.Run(); err != nil {
- fmt.Fprintf(os.Stderr, "Uh oh: %v\n", err)
- os.Exit(1)
- }
-}
@@ -2,8 +2,10 @@ package chat
import (
"context"
+ "strings"
"time"
+ "github.com/atotto/clipboard"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/app"
@@ -28,6 +30,12 @@ type SessionSelectedMsg = session.Session
type SessionClearedMsg struct{}
+type SelectionCopyMsg struct {
+ clickCount int
+ endSelection bool
+ x, y int
+}
+
const (
NotFound = -1
)
@@ -42,6 +50,8 @@ type MessageListCmp interface {
SetSession(session.Session) tea.Cmd
GoToBottom() tea.Cmd
+ GetSelectedText() string
+ CopySelectedText(bool) tea.Cmd
}
// messageListCmp implements MessageListCmp, providing a virtualized list
@@ -56,6 +66,13 @@ type messageListCmp struct {
lastUserMessageTime int64
defaultListKeyMap list.KeyMap
+
+ // Click tracking for double/triple click detection
+ lastClickTime time.Time
+ lastClickX int
+ lastClickY int
+ clickCount int
+ promptQueue int
}
// New creates a new message list component with custom keybindings
@@ -85,46 +102,144 @@ func (m *messageListCmp) Init() tea.Cmd {
// Update handles incoming messages and updates the component state.
func (m *messageListCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
+ var cmds []tea.Cmd
+ if m.session.ID != "" && m.app.CoderAgent != nil {
+ queueSize := m.app.CoderAgent.QueuedPrompts(m.session.ID)
+ if queueSize != m.promptQueue {
+ m.promptQueue = queueSize
+ cmds = append(cmds, m.SetSize(m.width, m.height))
+ }
+ }
switch msg := msg.(type) {
+ case tea.KeyPressMsg:
+ if m.listCmp.IsFocused() && m.listCmp.HasSelection() {
+ switch {
+ case key.Matches(msg, messages.CopyKey):
+ cmds = append(cmds, m.CopySelectedText(true))
+ return m, tea.Batch(cmds...)
+ case key.Matches(msg, messages.ClearSelectionKey):
+ cmds = append(cmds, m.SelectionClear())
+ return m, tea.Batch(cmds...)
+ }
+ }
+ case tea.MouseClickMsg:
+ x := msg.X - 1 // Adjust for padding
+ y := msg.Y - 1 // Adjust for padding
+ if x < 0 || y < 0 || x >= m.width-2 || y >= m.height-1 {
+ return m, nil // Ignore clicks outside the component
+ }
+ if msg.Button == tea.MouseLeft {
+ cmds = append(cmds, m.handleMouseClick(x, y))
+ return m, tea.Batch(cmds...)
+ }
+ return m, tea.Batch(cmds...)
+ case tea.MouseMotionMsg:
+ x := msg.X - 1 // Adjust for padding
+ y := msg.Y - 1 // Adjust for padding
+ if x < 0 || y < 0 || x >= m.width-2 || y >= m.height-1 {
+ if y < 0 {
+ cmds = append(cmds, m.listCmp.MoveUp(1))
+ return m, tea.Batch(cmds...)
+ }
+ if y >= m.height-1 {
+ cmds = append(cmds, m.listCmp.MoveDown(1))
+ return m, tea.Batch(cmds...)
+ }
+ return m, nil // Ignore clicks outside the component
+ }
+ if msg.Button == tea.MouseLeft {
+ m.listCmp.EndSelection(x, y)
+ }
+ return m, tea.Batch(cmds...)
+ case tea.MouseReleaseMsg:
+ x := msg.X - 1 // Adjust for padding
+ y := msg.Y - 1 // Adjust for padding
+ if msg.Button == tea.MouseLeft {
+ clickCount := m.clickCount
+ if x < 0 || y < 0 || x >= m.width-2 || y >= m.height-1 {
+ tick := tea.Tick(doubleClickThreshold, func(time.Time) tea.Msg {
+ return SelectionCopyMsg{
+ clickCount: clickCount,
+ endSelection: false,
+ }
+ })
+
+ cmds = append(cmds, tick)
+ return m, tea.Batch(cmds...)
+ }
+ tick := tea.Tick(doubleClickThreshold, func(time.Time) tea.Msg {
+ return SelectionCopyMsg{
+ clickCount: clickCount,
+ endSelection: true,
+ x: x,
+ y: y,
+ }
+ })
+ cmds = append(cmds, tick)
+ return m, tea.Batch(cmds...)
+ }
+ return m, nil
+ case SelectionCopyMsg:
+ if msg.clickCount == m.clickCount && time.Since(m.lastClickTime) >= doubleClickThreshold {
+ // If the click count matches and within threshold, copy selected text
+ if msg.endSelection {
+ m.listCmp.EndSelection(msg.x, msg.y)
+ }
+ m.listCmp.SelectionStop()
+ cmds = append(cmds, m.CopySelectedText(true))
+ return m, tea.Batch(cmds...)
+ }
case pubsub.Event[permission.PermissionNotification]:
- return m, m.handlePermissionRequest(msg.Payload)
+ cmds = append(cmds, m.handlePermissionRequest(msg.Payload))
+ return m, tea.Batch(cmds...)
case SessionSelectedMsg:
if msg.ID != m.session.ID {
- cmd := m.SetSession(msg)
- return m, cmd
+ cmds = append(cmds, m.SetSession(msg))
}
- return m, nil
+ return m, tea.Batch(cmds...)
case SessionClearedMsg:
m.session = session.Session{}
- return m, m.listCmp.SetItems([]list.Item{})
+ cmds = append(cmds, m.listCmp.SetItems([]list.Item{}))
+ return m, tea.Batch(cmds...)
case pubsub.Event[message.Message]:
- cmd := m.handleMessageEvent(msg)
- return m, cmd
+ cmds = append(cmds, m.handleMessageEvent(msg))
+ return m, tea.Batch(cmds...)
case tea.MouseWheelMsg:
- u, cmd := m.listCmp.Update(msg)
- m.listCmp = u.(list.List[list.Item])
- return m, cmd
- default:
- var cmds []tea.Cmd
u, cmd := m.listCmp.Update(msg)
m.listCmp = u.(list.List[list.Item])
cmds = append(cmds, cmd)
return m, tea.Batch(cmds...)
}
+
+ u, cmd := m.listCmp.Update(msg)
+ m.listCmp = u.(list.List[list.Item])
+ cmds = append(cmds, cmd)
+ return m, tea.Batch(cmds...)
}
// View renders the message list or an initial screen if empty.
func (m *messageListCmp) View() string {
t := styles.CurrentTheme()
- return t.S().Base.
- Padding(1, 1, 0, 1).
- Width(m.width).
- Height(m.height).
- Render(
- m.listCmp.View(),
- )
+ height := m.height
+ if m.promptQueue > 0 {
+ height -= 4 // pill height and padding
+ }
+ view := []string{
+ t.S().Base.
+ Padding(1, 1, 0, 1).
+ Width(m.width).
+ Height(height).
+ Render(
+ m.listCmp.View(),
+ ),
+ }
+ if m.app.CoderAgent != nil && m.promptQueue > 0 {
+ queuePill := queuePill(m.promptQueue, t)
+ view = append(view, t.S().Base.PaddingLeft(4).PaddingTop(1).Render(queuePill))
+ }
+ return strings.Join(view, "\n")
}
func (m *messageListCmp) handlePermissionRequest(permission permission.PermissionNotification) tea.Cmd {
@@ -541,7 +656,12 @@ func (m *messageListCmp) GetSize() (int, int) {
func (m *messageListCmp) SetSize(width int, height int) tea.Cmd {
m.width = width
m.height = height
- return m.listCmp.SetSize(width-2, height-1) // for padding
+ if m.promptQueue > 0 {
+ queueHeight := 3 + 1 // 1 for padding top
+ lHight := max(0, height-(1+queueHeight))
+ return m.listCmp.SetSize(width-2, lHight)
+ }
+ return m.listCmp.SetSize(width-2, max(0, height-1)) // for padding
}
// Blur implements MessageListCmp.
@@ -566,3 +686,97 @@ func (m *messageListCmp) Bindings() []key.Binding {
func (m *messageListCmp) GoToBottom() tea.Cmd {
return m.listCmp.GoToBottom()
}
+
+const (
+ doubleClickThreshold = 500 * time.Millisecond
+ clickTolerance = 2 // pixels
+)
+
+// handleMouseClick handles mouse click events and detects double/triple clicks.
+func (m *messageListCmp) handleMouseClick(x, y int) tea.Cmd {
+ now := time.Now()
+
+ // Check if this is a potential multi-click
+ if now.Sub(m.lastClickTime) <= doubleClickThreshold &&
+ abs(x-m.lastClickX) <= clickTolerance &&
+ abs(y-m.lastClickY) <= clickTolerance {
+ m.clickCount++
+ } else {
+ m.clickCount = 1
+ }
+
+ m.lastClickTime = now
+ m.lastClickX = x
+ m.lastClickY = y
+
+ switch m.clickCount {
+ case 1:
+ // Single click - start selection
+ m.listCmp.StartSelection(x, y)
+ case 2:
+ // Double click - select word
+ m.listCmp.SelectWord(x, y)
+ case 3:
+ // Triple click - select paragraph
+ m.listCmp.SelectParagraph(x, y)
+ m.clickCount = 0 // Reset after triple click
+ }
+
+ return nil
+}
+
+// SelectionClear clears the current selection in the list component.
+func (m *messageListCmp) SelectionClear() tea.Cmd {
+ m.listCmp.SelectionClear()
+ m.previousSelected = ""
+ m.lastClickX, m.lastClickY = 0, 0
+ m.lastClickTime = time.Time{}
+ m.clickCount = 0
+ return nil
+}
+
+// HasSelection checks if there is a selection in the list component.
+func (m *messageListCmp) HasSelection() bool {
+ return m.listCmp.HasSelection()
+}
+
+// GetSelectedText returns the currently selected text from the list component.
+func (m *messageListCmp) GetSelectedText() string {
+ return m.listCmp.GetSelectedText(3) // 3 padding for the left border/padding
+}
+
+// CopySelectedText copies the currently selected text to the clipboard. When
+// clear is true, it clears the selection after copying.
+func (m *messageListCmp) CopySelectedText(clear bool) tea.Cmd {
+ if !m.listCmp.HasSelection() {
+ return nil
+ }
+
+ selectedText := m.GetSelectedText()
+ if selectedText == "" {
+ return util.ReportInfo("No text selected")
+ }
+
+ if clear {
+ defer func() { m.SelectionClear() }()
+ }
+
+ return tea.Sequence(
+ // We use both OSC 52 and native clipboard for compatibility with different
+ // terminal emulators and environments.
+ tea.SetClipboard(selectedText),
+ func() tea.Msg {
+ _ = clipboard.WriteAll(selectedText)
+ return nil
+ },
+ util.ReportInfo("Selected text copied to clipboard"),
+ )
+}
+
+// abs returns the absolute value of an integer.
+func abs(x int) int {
+ if x < 0 {
+ return -x
+ }
+ return x
+}
@@ -55,7 +55,7 @@ type editorCmp struct {
x, y int
app *app.App
session session.Session
- textarea textarea.Model
+ textarea *textarea.Model
attachments []message.Attachment
deleteMode bool
readyPlaceholder string
@@ -75,7 +75,7 @@ var DeleteKeyMaps = DeleteAttachmentKeyMaps{
key.WithHelp("ctrl+r+{i}", "delete attachment at index i"),
),
Escape: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
DeleteAllAttachments: key.NewBinding(
@@ -138,13 +138,6 @@ func (m *editorCmp) Init() tea.Cmd {
}
func (m *editorCmp) send() tea.Cmd {
- if m.app.CoderAgent == nil {
- return util.ReportError(fmt.Errorf("coder agent is not initialized"))
- }
- if m.app.CoderAgent.IsSessionBusy(m.session.ID) {
- return util.ReportWarn("Agent is working, please wait...")
- }
-
value := m.textarea.Value()
value = strings.TrimSpace(value)
@@ -228,7 +221,7 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
case tea.PasteMsg:
path := strings.ReplaceAll(string(msg), "\\ ", " ")
// try to get an image
- path, err := filepath.Abs(path)
+ path, err := filepath.Abs(strings.TrimSpace(path))
if err != nil {
m.textarea, cmd = m.textarea.Update(msg)
return m, cmd
@@ -263,6 +256,9 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
Attachment: attachment,
})
+ case commands.ToggleYoloModeMsg:
+ m.setEditorPrompt()
+ return m, nil
case tea.KeyPressMsg:
cur := m.textarea.Cursor()
curIdx := m.textarea.Width()*cur.Y + cur.X
@@ -317,9 +313,9 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// Handle Enter key
if m.textarea.Focused() && key.Matches(msg, m.keyMap.SendMessage) {
value := m.textarea.Value()
- if len(value) > 0 && value[len(value)-1] == '\\' {
- // If the last character is a backslash, remove it and add a newline
- m.textarea.SetValue(value[:len(value)-1])
+ if strings.HasSuffix(value, "\\") {
+ // If the last character is a backslash, remove it and add a newline.
+ m.textarea.SetValue(strings.TrimSuffix(value, "\\"))
} else {
// Otherwise, send the message
return m, m.send()
@@ -368,6 +364,14 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, tea.Batch(cmds...)
}
+func (m *editorCmp) setEditorPrompt() {
+ if m.app.Permissions.SkipRequests() {
+ m.textarea.SetPromptFunc(4, yoloPromptFunc)
+ return
+ }
+ m.textarea.SetPromptFunc(4, normalPromptFunc)
+}
+
func (m *editorCmp) completionsPosition() (int, int) {
cur := m.textarea.Cursor()
if cur == nil {
@@ -416,6 +420,9 @@ func (m *editorCmp) View() string {
} else {
m.textarea.Placeholder = m.readyPlaceholder
}
+ if m.app.Permissions.SkipRequests() {
+ m.textarea.Placeholder = "Yolo mode!"
+ }
if len(m.attachments) == 0 {
content := t.S().Base.Padding(1).Render(
m.textarea.View(),
@@ -473,7 +480,8 @@ func (m *editorCmp) SetPosition(x, y int) tea.Cmd {
}
func (m *editorCmp) startCompletions() tea.Msg {
- files, _, _ := fsext.ListDirectory(".", []string{}, 0)
+ files, _, _ := fsext.ListDirectory(".", nil, 0)
+ slices.Sort(files)
completionItems := make([]completions.Completion, 0, len(files))
for _, file := range files {
file = strings.TrimPrefix(file, "./")
@@ -529,31 +537,47 @@ func (c *editorCmp) HasAttachments() bool {
return len(c.attachments) > 0
}
-func New(app *app.App) Editor {
+func normalPromptFunc(info textarea.PromptInfo) string {
t := styles.CurrentTheme()
- ta := textarea.New()
- ta.SetStyles(t.S().TextArea)
- ta.SetPromptFunc(4, func(info textarea.PromptInfo) string {
- if info.LineNumber == 0 {
- return " > "
- }
+ if info.LineNumber == 0 {
+ return " > "
+ }
+ if info.Focused {
+ return t.S().Base.Foreground(t.GreenDark).Render("::: ")
+ }
+ return t.S().Muted.Render("::: ")
+}
+
+func yoloPromptFunc(info textarea.PromptInfo) string {
+ t := styles.CurrentTheme()
+ if info.LineNumber == 0 {
if info.Focused {
- return t.S().Base.Foreground(t.GreenDark).Render("::: ")
+ return fmt.Sprintf("%s ", t.YoloIconFocused)
} else {
- return t.S().Muted.Render("::: ")
+ return fmt.Sprintf("%s ", t.YoloIconBlurred)
}
- })
+ }
+ if info.Focused {
+ return fmt.Sprintf("%s ", t.YoloDotsFocused)
+ }
+ return fmt.Sprintf("%s ", t.YoloDotsBlurred)
+}
+
+func New(app *app.App) Editor {
+ t := styles.CurrentTheme()
+ ta := textarea.New()
+ ta.SetStyles(t.S().TextArea)
ta.ShowLineNumbers = false
ta.CharLimit = -1
ta.SetVirtualCursor(false)
ta.Focus()
-
e := &editorCmp{
// TODO: remove the app instance from here
app: app,
textarea: ta,
keyMap: DefaultEditorKeyMap(),
}
+ e.setEditorPrompt()
e.randomizePlaceholders()
e.textarea.Placeholder = e.readyPlaceholder
@@ -61,7 +61,7 @@ var AttachmentsKeyMaps = DeleteAttachmentKeyMaps{
key.WithHelp("ctrl+r+{i}", "delete attachment at index i"),
),
Escape: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
DeleteAllAttachments: key.NewBinding(
@@ -6,14 +6,16 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
"github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/ansi"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
type Header interface {
@@ -27,11 +29,11 @@ type Header interface {
type header struct {
width int
session session.Session
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
detailsOpen bool
}
-func New(lspClients map[string]*lsp.Client) Header {
+func New(lspClients *csync.Map[string, *lsp.Client]) Header {
return &header{
lspClients: lspClients,
width: 0,
@@ -42,59 +44,68 @@ func (h *header) Init() tea.Cmd {
return nil
}
-func (p *header) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
+func (h *header) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
switch msg := msg.(type) {
case pubsub.Event[session.Session]:
if msg.Type == pubsub.UpdatedEvent {
- if p.session.ID == msg.Payload.ID {
- p.session = msg.Payload
+ if h.session.ID == msg.Payload.ID {
+ h.session = msg.Payload
}
}
}
- return p, nil
+ return h, nil
}
-func (p *header) View() string {
- if p.session.ID == "" {
+func (h *header) View() string {
+ if h.session.ID == "" {
return ""
}
+ const (
+ gap = " "
+ diag = "╱"
+ minDiags = 3
+ leftPadding = 1
+ rightPadding = 1
+ )
+
t := styles.CurrentTheme()
- details := p.details()
- parts := []string{
- t.S().Base.Foreground(t.Secondary).Render("Charm™"),
- " ",
- styles.ApplyBoldForegroundGrad("CRUSH", t.Secondary, t.Primary),
- " ",
- }
- remainingWidth := p.width - lipgloss.Width(strings.Join(parts, "")) - lipgloss.Width(details) - 2
+ var b strings.Builder
+
+ b.WriteString(t.S().Base.Foreground(t.Secondary).Render("Charm™"))
+ b.WriteString(gap)
+ b.WriteString(styles.ApplyBoldForegroundGrad("CRUSH", t.Secondary, t.Primary))
+ b.WriteString(gap)
+
+ availDetailWidth := h.width - leftPadding - rightPadding - lipgloss.Width(b.String()) - minDiags
+ details := h.details(availDetailWidth)
+
+ remainingWidth := h.width -
+ lipgloss.Width(b.String()) -
+ lipgloss.Width(details) -
+ leftPadding -
+ rightPadding
+
if remainingWidth > 0 {
- char := "╱"
- lines := strings.Repeat(char, remainingWidth)
- parts = append(parts, t.S().Base.Foreground(t.Primary).Render(lines), " ")
+ b.WriteString(t.S().Base.Foreground(t.Primary).Render(
+ strings.Repeat(diag, max(minDiags, remainingWidth)),
+ ))
+ b.WriteString(gap)
}
- parts = append(parts, details)
+ b.WriteString(details)
- content := t.S().Base.Padding(0, 1).Render(
- lipgloss.JoinHorizontal(
- lipgloss.Left,
- parts...,
- ),
- )
- return content
+ return t.S().Base.Padding(0, rightPadding, 0, leftPadding).Render(b.String())
}
-func (h *header) details() string {
- t := styles.CurrentTheme()
- cwd := fsext.DirTrim(fsext.PrettyPath(config.Get().WorkingDir()), 4)
- parts := []string{
- t.S().Muted.Render(cwd),
- }
+func (h *header) details(availWidth int) string {
+ s := styles.CurrentTheme().S()
+
+ var parts []string
errorCount := 0
- for _, l := range h.lspClients {
+ for l := range h.lspClients.Seq() {
for _, diagnostics := range l.GetDiagnostics() {
for _, diagnostic := range diagnostics {
if diagnostic.Severity == protocol.SeverityError {
@@ -105,22 +116,33 @@ func (h *header) details() string {
}
if errorCount > 0 {
- parts = append(parts, t.S().Error.Render(fmt.Sprintf("%s%d", styles.ErrorIcon, errorCount)))
+ parts = append(parts, s.Error.Render(fmt.Sprintf("%s%d", styles.ErrorIcon, errorCount)))
}
agentCfg := config.Get().Agents["coder"]
model := config.Get().GetModelByType(agentCfg.Model)
percentage := (float64(h.session.CompletionTokens+h.session.PromptTokens) / float64(model.ContextWindow)) * 100
- formattedPercentage := t.S().Muted.Render(fmt.Sprintf("%d%%", int(percentage)))
+ formattedPercentage := s.Muted.Render(fmt.Sprintf("%d%%", int(percentage)))
parts = append(parts, formattedPercentage)
+ const keystroke = "ctrl+d"
if h.detailsOpen {
- parts = append(parts, t.S().Muted.Render("ctrl+d")+t.S().Subtle.Render(" close"))
+ parts = append(parts, s.Muted.Render(keystroke)+s.Subtle.Render(" close"))
} else {
- parts = append(parts, t.S().Muted.Render("ctrl+d")+t.S().Subtle.Render(" open "))
+ parts = append(parts, s.Muted.Render(keystroke)+s.Subtle.Render(" open "))
}
- dot := t.S().Subtle.Render(" • ")
- return strings.Join(parts, dot)
+
+ dot := s.Subtle.Render(" • ")
+ metadata := strings.Join(parts, dot)
+ metadata = dot + metadata
+
+ // Truncate cwd if necessary, and insert it at the beginning.
+ const dirTrimLimit = 4
+ cwd := fsext.DirTrim(fsext.PrettyPath(config.Get().WorkingDir()), dirTrimLimit)
+ cwd = ansi.Truncate(cwd, max(0, availWidth-lipgloss.Width(metadata)), "…")
+ cwd = s.Muted.Render(cwd)
+
+ return cwd + metadata
}
func (h *header) SetDetailsOpen(open bool) {
@@ -25,7 +25,11 @@ import (
"github.com/charmbracelet/crush/internal/tui/util"
)
-var copyKey = key.NewBinding(key.WithKeys("c", "y", "C", "Y"), key.WithHelp("c/y", "copy"))
+// CopyKey is the key binding for copying message content to the clipboard.
+var CopyKey = key.NewBinding(key.WithKeys("c", "y", "C", "Y"), key.WithHelp("c/y", "copy"))
+
+// ClearSelectionKey is the key binding for clearing the current selection in the chat interface.
+var ClearSelectionKey = key.NewBinding(key.WithKeys("esc", "alt+esc"), key.WithHelp("esc", "clear selection"))
// MessageCmp defines the interface for message components in the chat interface.
// It combines standard UI model interfaces with message-specific functionality.
@@ -99,12 +103,15 @@ func (m *messageCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, cmd
}
case tea.KeyPressMsg:
- if key.Matches(msg, copyKey) {
- err := clipboard.WriteAll(m.message.Content().Text)
- if err != nil {
- return m, util.ReportError(fmt.Errorf("failed to copy message content to clipboard: %w", err))
- }
- return m, util.ReportInfo("Message copied to clipboard")
+ if key.Matches(msg, CopyKey) {
+ return m, tea.Sequence(
+ tea.SetClipboard(m.message.Content().Text),
+ func() tea.Msg {
+ _ = clipboard.WriteAll(m.message.Content().Text)
+ return nil
+ },
+ util.ReportInfo("Message copied to clipboard"),
+ )
}
}
return m, nil
@@ -278,9 +285,10 @@ func (m *messageCmp) renderThinkingContent() string {
opts := core.StatusOpts{
Title: "Thought for",
Description: duration.String(),
- NoIcon: true,
}
- return t.S().Base.PaddingLeft(1).Render(core.Status(opts, m.textWidth()-1))
+ if duration.String() != "0s" {
+ footer = t.S().Base.PaddingLeft(1).Render(core.Status(opts, m.textWidth()-1))
+ }
} else if finishReason != nil && finishReason.Reason == message.FinishReasonCanceled {
footer = t.S().Base.PaddingLeft(1).Render(m.toMarkdown("*Canceled*"))
} else {
@@ -165,7 +165,7 @@ func (m *toolCallCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
return m, tea.Batch(cmds...)
case tea.KeyPressMsg:
- if key.Matches(msg, copyKey) {
+ if key.Matches(msg, CopyKey) {
return m, m.copyTool()
}
}
@@ -198,11 +198,14 @@ func (m *toolCallCmp) SetCancelled() {
func (m *toolCallCmp) copyTool() tea.Cmd {
content := m.formatToolForCopy()
- err := clipboard.WriteAll(content)
- if err != nil {
- return util.ReportError(fmt.Errorf("failed to copy tool content to clipboard: %w", err))
- }
- return util.ReportInfo("Tool content copied to clipboard")
+ return tea.Sequence(
+ tea.SetClipboard(content),
+ func() tea.Msg {
+ _ = clipboard.WriteAll(content)
+ return nil
+ },
+ util.ReportInfo("Tool content copied to clipboard"),
+ )
}
func (m *toolCallCmp) formatToolForCopy() string {
@@ -0,0 +1,28 @@
+package chat
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/charmbracelet/crush/internal/tui/styles"
+ "github.com/charmbracelet/lipgloss/v2"
+)
+
+func queuePill(queue int, t *styles.Theme) string {
+ if queue <= 0 {
+ return ""
+ }
+ triangles := styles.ForegroundGrad("▶▶▶▶▶▶▶▶▶", false, t.RedDark, t.Accent)
+ if queue < 10 {
+ triangles = triangles[:queue]
+ }
+
+ allTriangles := strings.Join(triangles, "")
+
+ return t.S().Base.
+ BorderStyle(lipgloss.RoundedBorder()).
+ BorderForeground(t.BgOverlay).
+ PaddingLeft(1).
+ PaddingRight(1).
+ Render(fmt.Sprintf("%s %d Queued", allTriangles, queue))
+}
@@ -3,9 +3,7 @@ package sidebar
import (
"context"
"fmt"
- "os"
"slices"
- "sort"
"strings"
tea "github.com/charmbracelet/bubbletea/v2"
@@ -15,19 +13,21 @@ import (
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/components/chat"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/core/layout"
+ "github.com/charmbracelet/crush/internal/tui/components/files"
"github.com/charmbracelet/crush/internal/tui/components/logo"
+ lspcomponent "github.com/charmbracelet/crush/internal/tui/components/lsp"
+ "github.com/charmbracelet/crush/internal/tui/components/mcp"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
"github.com/charmbracelet/crush/internal/version"
"github.com/charmbracelet/lipgloss/v2"
- "github.com/charmbracelet/x/ansi"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
@@ -69,13 +69,13 @@ type sidebarCmp struct {
session session.Session
logo string
cwd string
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
compactMode bool
history history.Service
files *csync.Map[string, SessionFile]
}
-func New(history history.Service, lspClients map[string]*lsp.Client, compact bool) Sidebar {
+func New(history history.Service, lspClients *csync.Map[string, *lsp.Client], compact bool) Sidebar {
return &sidebarCmp{
lspClients: lspClients,
history: history,
@@ -191,8 +191,8 @@ func (m *sidebarCmp) handleFileHistoryEvent(event pubsub.Event[history.File]) te
// If the version is not greater than the latest, we ignore it
continue
}
- before := existing.History.initialVersion.Content
- after := existing.History.latestVersion.Content
+ before, _ := fsext.ToUnixLineEndings(existing.History.initialVersion.Content)
+ after, _ := fsext.ToUnixLineEndings(existing.History.latestVersion.Content)
path := existing.History.initialVersion.Path
cwd := config.Get().WorkingDir()
path = strings.TrimPrefix(path, cwd)
@@ -249,7 +249,9 @@ func (m *sidebarCmp) loadSessionFiles() tea.Msg {
for path, fh := range fileMap {
cwd := config.Get().WorkingDir()
path = strings.TrimPrefix(path, cwd)
- _, additions, deletions := diff.GenerateDiff(fh.initialVersion.Content, fh.latestVersion.Content, path)
+ before, _ := fsext.ToUnixLineEndings(fh.initialVersion.Content)
+ after, _ := fsext.ToUnixLineEndings(fh.latestVersion.Content)
+ _, additions, deletions := diff.GenerateDiff(before, after, path)
sessionFiles = append(sessionFiles, SessionFile{
History: fh,
FilePath: path,
@@ -382,459 +384,125 @@ func (m *sidebarCmp) renderSectionsHorizontal() string {
// filesBlockCompact renders the files block with limited width and height for horizontal layout
func (m *sidebarCmp) filesBlockCompact(maxWidth int) string {
- t := styles.CurrentTheme()
-
- section := t.S().Subtle.Render("Modified Files")
-
- files := slices.Collect(m.files.Seq())
-
- if len(files) == 0 {
- content := lipgloss.JoinVertical(
- lipgloss.Left,
- section,
- "",
- t.S().Base.Foreground(t.Border).Render("None"),
- )
- return lipgloss.NewStyle().Width(maxWidth).Render(content)
+ // Convert map to slice and handle type conversion
+ sessionFiles := slices.Collect(m.files.Seq())
+ fileSlice := make([]files.SessionFile, len(sessionFiles))
+ for i, sf := range sessionFiles {
+ fileSlice[i] = files.SessionFile{
+ History: files.FileHistory{
+ InitialVersion: sf.History.initialVersion,
+ LatestVersion: sf.History.latestVersion,
+ },
+ FilePath: sf.FilePath,
+ Additions: sf.Additions,
+ Deletions: sf.Deletions,
+ }
}
- fileList := []string{section, ""}
- sort.Slice(files, func(i, j int) bool {
- return files[i].History.latestVersion.CreatedAt > files[j].History.latestVersion.CreatedAt
- })
-
- // Limit items for horizontal layout - use less space
- maxItems := min(5, len(files))
+ // Limit items for horizontal layout
+ maxItems := min(5, len(fileSlice))
availableHeight := m.height - 8 // Reserve space for header and other content
if availableHeight > 0 {
maxItems = min(maxItems, availableHeight)
}
- filesShown := 0
- for _, file := range files {
- if file.Additions == 0 && file.Deletions == 0 {
- continue
- }
- if filesShown >= maxItems {
- break
- }
-
- var statusParts []string
- if file.Additions > 0 {
- statusParts = append(statusParts, t.S().Base.Foreground(t.Success).Render(fmt.Sprintf("+%d", file.Additions)))
- }
- if file.Deletions > 0 {
- statusParts = append(statusParts, t.S().Base.Foreground(t.Error).Render(fmt.Sprintf("-%d", file.Deletions)))
- }
-
- extraContent := strings.Join(statusParts, " ")
- cwd := config.Get().WorkingDir() + string(os.PathSeparator)
- filePath := file.FilePath
- filePath = strings.TrimPrefix(filePath, cwd)
- filePath = fsext.DirTrim(fsext.PrettyPath(filePath), 2)
- filePath = ansi.Truncate(filePath, maxWidth-lipgloss.Width(extraContent)-2, "…")
-
- fileList = append(fileList,
- core.Status(
- core.StatusOpts{
- IconColor: t.FgMuted,
- NoIcon: true,
- Title: filePath,
- ExtraContent: extraContent,
- },
- maxWidth,
- ),
- )
- filesShown++
- }
-
- // Add "..." indicator if there are more files
- totalFilesWithChanges := 0
- for _, file := range files {
- if file.Additions > 0 || file.Deletions > 0 {
- totalFilesWithChanges++
- }
- }
- if totalFilesWithChanges > maxItems {
- fileList = append(fileList, t.S().Base.Foreground(t.FgMuted).Render("…"))
- }
-
- content := lipgloss.JoinVertical(lipgloss.Left, fileList...)
- return lipgloss.NewStyle().Width(maxWidth).Render(content)
+ return files.RenderFileBlock(fileSlice, files.RenderOptions{
+ MaxWidth: maxWidth,
+ MaxItems: maxItems,
+ ShowSection: true,
+ SectionName: "Modified Files",
+ }, true)
}
// lspBlockCompact renders the LSP block with limited width and height for horizontal layout
func (m *sidebarCmp) lspBlockCompact(maxWidth int) string {
- t := styles.CurrentTheme()
-
- section := t.S().Subtle.Render("LSPs")
-
- lspList := []string{section, ""}
-
- lsp := config.Get().LSP.Sorted()
- if len(lsp) == 0 {
- content := lipgloss.JoinVertical(
- lipgloss.Left,
- section,
- "",
- t.S().Base.Foreground(t.Border).Render("None"),
- )
- return lipgloss.NewStyle().Width(maxWidth).Render(content)
- }
-
// Limit items for horizontal layout
- maxItems := min(5, len(lsp))
+ lspConfigs := config.Get().LSP.Sorted()
+ maxItems := min(5, len(lspConfigs))
availableHeight := m.height - 8
if availableHeight > 0 {
maxItems = min(maxItems, availableHeight)
}
- for i, l := range lsp {
- if i >= maxItems {
- break
- }
-
- iconColor := t.Success
- if l.LSP.Disabled {
- iconColor = t.FgMuted
- }
-
- lspErrs := map[protocol.DiagnosticSeverity]int{
- protocol.SeverityError: 0,
- protocol.SeverityWarning: 0,
- protocol.SeverityHint: 0,
- protocol.SeverityInformation: 0,
- }
- if client, ok := m.lspClients[l.Name]; ok {
- for _, diagnostics := range client.GetDiagnostics() {
- for _, diagnostic := range diagnostics {
- if severity, ok := lspErrs[diagnostic.Severity]; ok {
- lspErrs[diagnostic.Severity] = severity + 1
- }
- }
- }
- }
-
- errs := []string{}
- if lspErrs[protocol.SeverityError] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.Error).Render(fmt.Sprintf("%s %d", styles.ErrorIcon, lspErrs[protocol.SeverityError])))
- }
- if lspErrs[protocol.SeverityWarning] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.Warning).Render(fmt.Sprintf("%s %d", styles.WarningIcon, lspErrs[protocol.SeverityWarning])))
- }
- if lspErrs[protocol.SeverityHint] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.FgHalfMuted).Render(fmt.Sprintf("%s %d", styles.HintIcon, lspErrs[protocol.SeverityHint])))
- }
- if lspErrs[protocol.SeverityInformation] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.FgHalfMuted).Render(fmt.Sprintf("%s %d", styles.InfoIcon, lspErrs[protocol.SeverityInformation])))
- }
-
- lspList = append(lspList,
- core.Status(
- core.StatusOpts{
- IconColor: iconColor,
- Title: l.Name,
- Description: l.LSP.Command,
- ExtraContent: strings.Join(errs, " "),
- },
- maxWidth,
- ),
- )
- }
-
- // Add "..." indicator if there are more LSPs
- if len(lsp) > maxItems {
- lspList = append(lspList, t.S().Base.Foreground(t.FgMuted).Render("…"))
- }
-
- content := lipgloss.JoinVertical(lipgloss.Left, lspList...)
- return lipgloss.NewStyle().Width(maxWidth).Render(content)
+ return lspcomponent.RenderLSPBlock(m.lspClients, lspcomponent.RenderOptions{
+ MaxWidth: maxWidth,
+ MaxItems: maxItems,
+ ShowSection: true,
+ SectionName: "LSPs",
+ }, true)
}
// mcpBlockCompact renders the MCP block with limited width and height for horizontal layout
func (m *sidebarCmp) mcpBlockCompact(maxWidth int) string {
- t := styles.CurrentTheme()
-
- section := t.S().Subtle.Render("MCPs")
-
- mcpList := []string{section, ""}
-
- mcps := config.Get().MCP.Sorted()
- if len(mcps) == 0 {
- content := lipgloss.JoinVertical(
- lipgloss.Left,
- section,
- "",
- t.S().Base.Foreground(t.Border).Render("None"),
- )
- return lipgloss.NewStyle().Width(maxWidth).Render(content)
- }
-
// Limit items for horizontal layout
- maxItems := min(5, len(mcps))
+ maxItems := min(5, len(config.Get().MCP.Sorted()))
availableHeight := m.height - 8
if availableHeight > 0 {
maxItems = min(maxItems, availableHeight)
}
- for i, l := range mcps {
- if i >= maxItems {
- break
- }
-
- iconColor := t.Success
- if l.MCP.Disabled {
- iconColor = t.FgMuted
- }
-
- mcpList = append(mcpList,
- core.Status(
- core.StatusOpts{
- IconColor: iconColor,
- Title: l.Name,
- Description: l.MCP.Command,
- },
- maxWidth,
- ),
- )
- }
-
- // Add "..." indicator if there are more MCPs
- if len(mcps) > maxItems {
- mcpList = append(mcpList, t.S().Base.Foreground(t.FgMuted).Render("…"))
- }
-
- content := lipgloss.JoinVertical(lipgloss.Left, mcpList...)
- return lipgloss.NewStyle().Width(maxWidth).Render(content)
+ return mcp.RenderMCPBlock(mcp.RenderOptions{
+ MaxWidth: maxWidth,
+ MaxItems: maxItems,
+ ShowSection: true,
+ SectionName: "MCPs",
+ }, true)
}
func (m *sidebarCmp) filesBlock() string {
- t := styles.CurrentTheme()
-
- section := t.S().Subtle.Render(
- core.Section("Modified Files", m.getMaxWidth()),
- )
-
- files := slices.Collect(m.files.Seq())
- if len(files) == 0 {
- return lipgloss.JoinVertical(
- lipgloss.Left,
- section,
- "",
- t.S().Base.Foreground(t.Border).Render("None"),
- )
+ // Convert map to slice and handle type conversion
+ sessionFiles := slices.Collect(m.files.Seq())
+ fileSlice := make([]files.SessionFile, len(sessionFiles))
+ for i, sf := range sessionFiles {
+ fileSlice[i] = files.SessionFile{
+ History: files.FileHistory{
+ InitialVersion: sf.History.initialVersion,
+ LatestVersion: sf.History.latestVersion,
+ },
+ FilePath: sf.FilePath,
+ Additions: sf.Additions,
+ Deletions: sf.Deletions,
+ }
}
- fileList := []string{section, ""}
- // order files by the latest version's created time
- sort.Slice(files, func(i, j int) bool {
- return files[i].History.latestVersion.CreatedAt > files[j].History.latestVersion.CreatedAt
- })
-
// Limit the number of files shown
maxFiles, _, _ := m.getDynamicLimits()
- maxFiles = min(len(files), maxFiles)
- filesShown := 0
-
- for _, file := range files {
- if file.Additions == 0 && file.Deletions == 0 {
- continue // skip files with no changes
- }
- if filesShown >= maxFiles {
- break
- }
-
- var statusParts []string
- if file.Additions > 0 {
- statusParts = append(statusParts, t.S().Base.Foreground(t.Success).Render(fmt.Sprintf("+%d", file.Additions)))
- }
- if file.Deletions > 0 {
- statusParts = append(statusParts, t.S().Base.Foreground(t.Error).Render(fmt.Sprintf("-%d", file.Deletions)))
- }
-
- extraContent := strings.Join(statusParts, " ")
- cwd := config.Get().WorkingDir() + string(os.PathSeparator)
- filePath := file.FilePath
- filePath = strings.TrimPrefix(filePath, cwd)
- filePath = fsext.DirTrim(fsext.PrettyPath(filePath), 2)
- filePath = ansi.Truncate(filePath, m.getMaxWidth()-lipgloss.Width(extraContent)-2, "…")
- fileList = append(fileList,
- core.Status(
- core.StatusOpts{
- IconColor: t.FgMuted,
- NoIcon: true,
- Title: filePath,
- ExtraContent: extraContent,
- },
- m.getMaxWidth(),
- ),
- )
- filesShown++
- }
-
- // Add indicator if there are more files
- totalFilesWithChanges := 0
- for _, file := range files {
- if file.Additions > 0 || file.Deletions > 0 {
- totalFilesWithChanges++
- }
- }
- if totalFilesWithChanges > maxFiles {
- remaining := totalFilesWithChanges - maxFiles
- fileList = append(fileList,
- t.S().Base.Foreground(t.FgSubtle).Render(fmt.Sprintf("…and %d more", remaining)),
- )
- }
-
- return lipgloss.JoinVertical(
- lipgloss.Left,
- fileList...,
- )
+ maxFiles = min(len(fileSlice), maxFiles)
+
+ return files.RenderFileBlock(fileSlice, files.RenderOptions{
+ MaxWidth: m.getMaxWidth(),
+ MaxItems: maxFiles,
+ ShowSection: true,
+ SectionName: core.Section("Modified Files", m.getMaxWidth()),
+ }, true)
}
func (m *sidebarCmp) lspBlock() string {
- t := styles.CurrentTheme()
-
- section := t.S().Subtle.Render(
- core.Section("LSPs", m.getMaxWidth()),
- )
-
- lspList := []string{section, ""}
-
- lsp := config.Get().LSP.Sorted()
- if len(lsp) == 0 {
- return lipgloss.JoinVertical(
- lipgloss.Left,
- section,
- "",
- t.S().Base.Foreground(t.Border).Render("None"),
- )
- }
-
// Limit the number of LSPs shown
_, maxLSPs, _ := m.getDynamicLimits()
- maxLSPs = min(len(lsp), maxLSPs)
- for i, l := range lsp {
- if i >= maxLSPs {
- break
- }
-
- iconColor := t.Success
- if l.LSP.Disabled {
- iconColor = t.FgMuted
- }
- lspErrs := map[protocol.DiagnosticSeverity]int{
- protocol.SeverityError: 0,
- protocol.SeverityWarning: 0,
- protocol.SeverityHint: 0,
- protocol.SeverityInformation: 0,
- }
- if client, ok := m.lspClients[l.Name]; ok {
- for _, diagnostics := range client.GetDiagnostics() {
- for _, diagnostic := range diagnostics {
- if severity, ok := lspErrs[diagnostic.Severity]; ok {
- lspErrs[diagnostic.Severity] = severity + 1
- }
- }
- }
- }
-
- errs := []string{}
- if lspErrs[protocol.SeverityError] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.Error).Render(fmt.Sprintf("%s %d", styles.ErrorIcon, lspErrs[protocol.SeverityError])))
- }
- if lspErrs[protocol.SeverityWarning] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.Warning).Render(fmt.Sprintf("%s %d", styles.WarningIcon, lspErrs[protocol.SeverityWarning])))
- }
- if lspErrs[protocol.SeverityHint] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.FgHalfMuted).Render(fmt.Sprintf("%s %d", styles.HintIcon, lspErrs[protocol.SeverityHint])))
- }
- if lspErrs[protocol.SeverityInformation] > 0 {
- errs = append(errs, t.S().Base.Foreground(t.FgHalfMuted).Render(fmt.Sprintf("%s %d", styles.InfoIcon, lspErrs[protocol.SeverityInformation])))
- }
-
- lspList = append(lspList,
- core.Status(
- core.StatusOpts{
- IconColor: iconColor,
- Title: l.Name,
- Description: l.LSP.Command,
- ExtraContent: strings.Join(errs, " "),
- },
- m.getMaxWidth(),
- ),
- )
- }
-
- // Add indicator if there are more LSPs
- if len(lsp) > maxLSPs {
- remaining := len(lsp) - maxLSPs
- lspList = append(lspList,
- t.S().Base.Foreground(t.FgSubtle).Render(fmt.Sprintf("…and %d more", remaining)),
- )
- }
-
- return lipgloss.JoinVertical(
- lipgloss.Left,
- lspList...,
- )
+ lspConfigs := config.Get().LSP.Sorted()
+ maxLSPs = min(len(lspConfigs), maxLSPs)
+
+ return lspcomponent.RenderLSPBlock(m.lspClients, lspcomponent.RenderOptions{
+ MaxWidth: m.getMaxWidth(),
+ MaxItems: maxLSPs,
+ ShowSection: true,
+ SectionName: core.Section("LSPs", m.getMaxWidth()),
+ }, true)
}
func (m *sidebarCmp) mcpBlock() string {
- t := styles.CurrentTheme()
-
- section := t.S().Subtle.Render(
- core.Section("MCPs", m.getMaxWidth()),
- )
-
- mcpList := []string{section, ""}
-
- mcps := config.Get().MCP.Sorted()
- if len(mcps) == 0 {
- return lipgloss.JoinVertical(
- lipgloss.Left,
- section,
- "",
- t.S().Base.Foreground(t.Border).Render("None"),
- )
- }
-
// Limit the number of MCPs shown
_, _, maxMCPs := m.getDynamicLimits()
+ mcps := config.Get().MCP.Sorted()
maxMCPs = min(len(mcps), maxMCPs)
- for i, l := range mcps {
- if i >= maxMCPs {
- break
- }
-
- iconColor := t.Success
- if l.MCP.Disabled {
- iconColor = t.FgMuted
- }
- mcpList = append(mcpList,
- core.Status(
- core.StatusOpts{
- IconColor: iconColor,
- Title: l.Name,
- Description: l.MCP.Command,
- },
- m.getMaxWidth(),
- ),
- )
- }
-
- // Add indicator if there are more MCPs
- if len(mcps) > maxMCPs {
- remaining := len(mcps) - maxMCPs
- mcpList = append(mcpList,
- t.S().Base.Foreground(t.FgSubtle).Render(fmt.Sprintf("…and %d more", remaining)),
- )
- }
- return lipgloss.JoinVertical(
- lipgloss.Left,
- mcpList...,
- )
+ return mcp.RenderMCPBlock(mcp.RenderOptions{
+ MaxWidth: m.getMaxWidth(),
+ MaxItems: maxMCPs,
+ ShowSection: true,
+ SectionName: core.Section("MCPs", m.getMaxWidth()),
+ }, true)
}
func formatTokensAndCost(tokens, contextWindow int64, cost float64) string {
@@ -941,11 +609,5 @@ func (m *sidebarCmp) SetCompactMode(compact bool) {
func cwd() string {
cwd := config.Get().WorkingDir()
t := styles.CurrentTheme()
- // Replace home directory with ~, unless we're at the top level of the
- // home directory).
- homeDir, err := os.UserHomeDir()
- if err == nil && cwd != homeDir {
- cwd = strings.ReplaceAll(cwd, homeDir, "~")
- }
- return t.S().Muted.Render(cwd)
+ return t.S().Muted.Render(home.Short(cwd))
}
@@ -46,7 +46,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("←/→", "switch"),
),
Back: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "back"),
),
}
@@ -2,8 +2,6 @@ package splash
import (
"fmt"
- "os"
- "slices"
"strings"
"time"
@@ -12,12 +10,15 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/llm/prompt"
"github.com/charmbracelet/crush/internal/tui/components/chat"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/core/layout"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/models"
"github.com/charmbracelet/crush/internal/tui/components/logo"
+ lspcomponent "github.com/charmbracelet/crush/internal/tui/components/lsp"
+ "github.com/charmbracelet/crush/internal/tui/components/mcp"
"github.com/charmbracelet/crush/internal/tui/exp/list"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
@@ -101,27 +102,6 @@ func New() Splash {
func (s *splashCmp) SetOnboarding(onboarding bool) {
s.isOnboarding = onboarding
- if onboarding {
- providers, err := config.Providers()
- if err != nil {
- return
- }
- filteredProviders := []catwalk.Provider{}
- simpleProviders := []string{
- "anthropic",
- "openai",
- "gemini",
- "xai",
- "groq",
- "openrouter",
- }
- for _, p := range providers {
- if slices.Contains(simpleProviders, string(p.ID)) {
- filteredProviders = append(filteredProviders, p)
- }
- }
- s.modelList.SetProviders(filteredProviders)
- }
}
func (s *splashCmp) SetProjectInit(needsInit bool) {
@@ -273,6 +253,7 @@ func (s *splashCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return s, cmd
}
if s.needsProjectInit {
+ s.selectedNo = false
return s, s.initializeProject()
}
case key.Matches(msg, s.keyMap.No):
@@ -417,7 +398,8 @@ func (s *splashCmp) setPreferredModel(selectedItem models.ModelOption) tea.Cmd {
}
func (s *splashCmp) getProvider(providerID catwalk.InferenceProvider) (*catwalk.Provider, error) {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
if err != nil {
return nil, err
}
@@ -472,6 +454,7 @@ func (s *splashCmp) View() string {
)
} else if s.needsProjectInit {
titleStyle := t.S().Base.Foreground(t.FgBase)
+ pathStyle := t.S().Base.Foreground(t.Success).PaddingLeft(2)
bodyStyle := t.S().Base.Foreground(t.FgMuted)
shortcutStyle := t.S().Base.Foreground(t.Success)
@@ -479,6 +462,8 @@ func (s *splashCmp) View() string {
lipgloss.Left,
titleStyle.Render("Would you like to initialize this project?"),
"",
+ pathStyle.Render(s.cwd()),
+ "",
bodyStyle.Render("When I initialize your codebase I examine the project and put the"),
bodyStyle.Render("result into a CRUSH.md file which serves as general context."),
"",
@@ -565,7 +550,7 @@ func (s *splashCmp) infoSection() string {
return infoStyle.Render(
lipgloss.JoinVertical(
lipgloss.Left,
- s.cwd(),
+ s.cwdPart(),
"",
s.currentModelBlock(),
"",
@@ -655,44 +640,24 @@ func (s *splashCmp) Bindings() []key.Binding {
}
func (s *splashCmp) getMaxInfoWidth() int {
- return min(s.width-2, 40) // 2 for left padding
+ return min(s.width-2, 90) // 2 for left padding
}
-func (s *splashCmp) cwd() string {
- cwd := config.Get().WorkingDir()
+func (s *splashCmp) cwdPart() string {
t := styles.CurrentTheme()
- homeDir, err := os.UserHomeDir()
- if err == nil && cwd != homeDir {
- cwd = strings.ReplaceAll(cwd, homeDir, "~")
- }
maxWidth := s.getMaxInfoWidth()
- return t.S().Muted.Width(maxWidth).Render(cwd)
+ return t.S().Muted.Width(maxWidth).Render(s.cwd())
+}
+
+func (s *splashCmp) cwd() string {
+ return home.Short(config.Get().WorkingDir())
}
func LSPList(maxWidth int) []string {
- t := styles.CurrentTheme()
- lspList := []string{}
- lsp := config.Get().LSP.Sorted()
- if len(lsp) == 0 {
- return []string{t.S().Base.Foreground(t.Border).Render("None")}
- }
- for _, l := range lsp {
- iconColor := t.Success
- if l.LSP.Disabled {
- iconColor = t.FgMuted
- }
- lspList = append(lspList,
- core.Status(
- core.StatusOpts{
- IconColor: iconColor,
- Title: l.Name,
- Description: l.LSP.Command,
- },
- maxWidth,
- ),
- )
- }
- return lspList
+ return lspcomponent.RenderLSPList(nil, lspcomponent.RenderOptions{
+ MaxWidth: maxWidth,
+ ShowSection: false,
+ })
}
func (s *splashCmp) lspBlock() string {
@@ -709,29 +674,10 @@ func (s *splashCmp) lspBlock() string {
}
func MCPList(maxWidth int) []string {
- t := styles.CurrentTheme()
- mcpList := []string{}
- mcps := config.Get().MCP.Sorted()
- if len(mcps) == 0 {
- return []string{t.S().Base.Foreground(t.Border).Render("None")}
- }
- for _, l := range mcps {
- iconColor := t.Success
- if l.MCP.Disabled {
- iconColor = t.FgMuted
- }
- mcpList = append(mcpList,
- core.Status(
- core.StatusOpts{
- IconColor: iconColor,
- Title: l.Name,
- Description: l.MCP.Command,
- },
- maxWidth,
- ),
- )
- }
- return mcpList
+ return mcp.RenderMCPList(mcp.RenderOptions{
+ MaxWidth: maxWidth,
+ ShowSection: false,
+ })
}
func (s *splashCmp) mcpBlock() string {
@@ -28,7 +28,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("enter", "select"),
),
Cancel: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
DownInsert: key.NewBinding(
@@ -82,41 +82,30 @@ func Title(title string, width int) string {
}
type StatusOpts struct {
- Icon string
- IconColor color.Color
- NoIcon bool // If true, no icon will be displayed
+ Icon string // if empty no icon will be shown
Title string
TitleColor color.Color
Description string
DescriptionColor color.Color
- ExtraContent string // Additional content to append after the description
+ ExtraContent string // additional content to append after the description
}
-func Status(ops StatusOpts, width int) string {
+func Status(opts StatusOpts, width int) string {
t := styles.CurrentTheme()
- icon := "●"
- iconColor := t.Success
- if ops.Icon != "" {
- icon = ops.Icon
- } else if ops.NoIcon {
- icon = ""
- }
- if ops.IconColor != nil {
- iconColor = ops.IconColor
- }
- title := ops.Title
+ icon := opts.Icon
+ title := opts.Title
titleColor := t.FgMuted
- if ops.TitleColor != nil {
- titleColor = ops.TitleColor
+ if opts.TitleColor != nil {
+ titleColor = opts.TitleColor
}
- description := ops.Description
+ description := opts.Description
descriptionColor := t.FgSubtle
- if ops.DescriptionColor != nil {
- descriptionColor = ops.DescriptionColor
+ if opts.DescriptionColor != nil {
+ descriptionColor = opts.DescriptionColor
}
title = t.S().Base.Foreground(titleColor).Render(title)
if description != "" {
- extraContentWidth := lipgloss.Width(ops.ExtraContent)
+ extraContentWidth := lipgloss.Width(opts.ExtraContent)
if extraContentWidth > 0 {
extraContentWidth += 1
}
@@ -126,11 +115,11 @@ func Status(ops StatusOpts, width int) string {
content := []string{}
if icon != "" {
- content = append(content, t.S().Base.Foreground(iconColor).Render(icon))
+ content = append(content, icon)
}
content = append(content, title, description)
- if ops.ExtraContent != "" {
- content = append(content, ops.ExtraContent)
+ if opts.ExtraContent != "" {
+ content = append(content, opts.ExtraContent)
}
return strings.Join(content, " ")
@@ -37,7 +37,6 @@ func TestStatus(t *testing.T) {
{
name: "NoIcon",
opts: core.StatusOpts{
- NoIcon: true,
Title: "Info",
Description: "This status has no icon",
},
@@ -47,7 +46,6 @@ func TestStatus(t *testing.T) {
name: "WithColors",
opts: core.StatusOpts{
Icon: "⚠",
- IconColor: color.RGBA{255, 165, 0, 255}, // Orange
Title: "Warning",
TitleColor: color.RGBA{255, 255, 0, 255}, // Yellow
Description: "This is a warning message",
@@ -102,7 +100,6 @@ func TestStatus(t *testing.T) {
name: "AllFieldsWithExtraContent",
opts: core.StatusOpts{
Icon: "🚀",
- IconColor: color.RGBA{0, 255, 0, 255}, // Green
Title: "Deployment",
TitleColor: color.RGBA{0, 0, 255, 255}, // Blue
Description: "Deploying to production environment",
@@ -1 +1 @@
-[38;2;0;255;0m🚀[m [38;2;0;0;255mDeployment[m [38;2;128;128;128mDeploying to production environment[m v1.2.3
+🚀 [38;2;0;0;255mDeployment[m [38;2;128;128;128mDeploying to production environment[m v1.2.3
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mStatus[m [38;2;96;95;107mEverything is working fine[m
+[38;2;133;131;146mStatus[m [38;2;96;95;107mEverything is working fine[m
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mTitle Only[m [38;2;96;95;107m[m
+● [38;2;133;131;146mTitle Only[m [38;2;96;95;107m[m
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mProcessing[m [38;2;96;95;107mThis is a very long description that should be…[m
+[38;2;133;131;146mProcessing[m [38;2;96;95;107mThis is a very long description that should be …[m
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mStatus[m [38;2;96;95;107mShort message[m
+● [38;2;133;131;146mStatus[m [38;2;96;95;107mShort message[m
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mTest[m [38;2;96;95;107mThis will be…[m
+● [38;2;133;131;146mTest[m [38;2;96;95;107mThis will be…[m
@@ -1 +1 @@
-[38;2;255;165;0m⚠[m [38;2;255;255;0mWarning[m [38;2;255;0;0mThis is a warning message[m
+⚠ [38;2;255;255;0mWarning[m [38;2;255;0;0mThis is a warning message[m
@@ -1 +1 @@
-[38;2;18;199;143m✓[m [38;2;133;131;146mSuccess[m [38;2;96;95;107mOperation completed successfully[m
+✓ [38;2;133;131;146mSuccess[m [38;2;96;95;107mOperation completed successfully[m
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mBuild[m [38;2;96;95;107mBuilding project[m [2/5]
+[38;2;133;131;146mBuild[m [38;2;96;95;107mBuilding project[m [2/5]
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mVery Long Title[m [38;2;96;95;107m[m [extra]
+● [38;2;133;131;146mVery Long Title[m [38;2;96;95;107m[m [extra]
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThi…[m [extra]
+● [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThi…[m [extra]
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThis is an ex…[m [extra]
+● [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThis is an ex…[m [extra]
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThis is an extremely lo…[m [extra]
+● [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThis is an extremely lo…[m [extra]
@@ -1 +1 @@
-[38;2;18;199;143m●[m [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThis is an extremely long descrip…[m [extra]
+● [38;2;133;131;146mVery Long Title[m [38;2;96;95;107mThis is an extremely long descrip…[m [extra]
@@ -60,16 +60,18 @@ type commandDialogCmp struct {
}
type (
- SwitchSessionsMsg struct{}
- NewSessionsMsg struct{}
- SwitchModelMsg struct{}
- QuitMsg struct{}
- OpenFilePickerMsg struct{}
- ToggleHelpMsg struct{}
- ToggleCompactModeMsg struct{}
- ToggleThinkingMsg struct{}
- OpenExternalEditorMsg struct{}
- CompactMsg struct {
+ SwitchSessionsMsg struct{}
+ NewSessionsMsg struct{}
+ SwitchModelMsg struct{}
+ QuitMsg struct{}
+ OpenFilePickerMsg struct{}
+ ToggleHelpMsg struct{}
+ ToggleCompactModeMsg struct{}
+ ToggleThinkingMsg struct{}
+ OpenReasoningDialogMsg struct{}
+ OpenExternalEditorMsg struct{}
+ ToggleYoloModeMsg struct{}
+ CompactMsg struct {
SessionID string
}
)
@@ -119,7 +121,10 @@ func (c *commandDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
case tea.WindowSizeMsg:
c.wWidth = msg.Width
c.wHeight = msg.Height
- return c, c.commandList.SetSize(c.listWidth(), c.listHeight())
+ return c, tea.Batch(
+ c.SetCommandType(c.commandType),
+ c.commandList.SetSize(c.listWidth(), c.listHeight()),
+ )
case tea.KeyPressMsg:
switch {
case key.Matches(msg, c.keyMap.Select):
@@ -296,29 +301,43 @@ func (c *commandDialogCmp) defaultCommands() []Command {
})
}
- // Only show thinking toggle for Anthropic models that can reason
+ // Add reasoning toggle for models that support it
cfg := config.Get()
if agentCfg, ok := cfg.Agents["coder"]; ok {
providerCfg := cfg.GetProviderForModel(agentCfg.Model)
model := cfg.GetModelByType(agentCfg.Model)
- if providerCfg != nil && model != nil &&
- providerCfg.Type == catwalk.TypeAnthropic && model.CanReason {
+ if providerCfg != nil && model != nil && model.CanReason {
selectedModel := cfg.Models[agentCfg.Model]
- status := "Enable"
- if selectedModel.Think {
- status = "Disable"
+
+ // Anthropic models: thinking toggle
+ if providerCfg.Type == catwalk.TypeAnthropic {
+ status := "Enable"
+ if selectedModel.Think {
+ status = "Disable"
+ }
+ commands = append(commands, Command{
+ ID: "toggle_thinking",
+ Title: status + " Thinking Mode",
+ Description: "Toggle model thinking for reasoning-capable models",
+ Handler: func(cmd Command) tea.Cmd {
+ return util.CmdHandler(ToggleThinkingMsg{})
+ },
+ })
+ }
+
+ // OpenAI models: reasoning effort dialog
+ if providerCfg.Type == catwalk.TypeOpenAI && model.HasReasoningEffort {
+ commands = append(commands, Command{
+ ID: "select_reasoning_effort",
+ Title: "Select Reasoning Effort",
+ Description: "Choose reasoning effort level (low/medium/high)",
+ Handler: func(cmd Command) tea.Cmd {
+ return util.CmdHandler(OpenReasoningDialogMsg{})
+ },
+ })
}
- commands = append(commands, Command{
- ID: "toggle_thinking",
- Title: status + " Thinking Mode",
- Description: "Toggle model thinking for reasoning-capable models",
- Handler: func(cmd Command) tea.Cmd {
- return util.CmdHandler(ToggleThinkingMsg{})
- },
- })
}
}
-
// Only show toggle compact mode command if window width is larger than compact breakpoint (90)
if c.wWidth > 120 && c.sessionID != "" {
commands = append(commands, Command{
@@ -360,6 +379,14 @@ func (c *commandDialogCmp) defaultCommands() []Command {
}
return append(commands, []Command{
+ {
+ ID: "toggle_yolo",
+ Title: "Toggle Yolo Mode",
+ Description: "Toggle yolo mode",
+ Handler: func(cmd Command) tea.Cmd {
+ return util.CmdHandler(ToggleYoloModeMsg{})
+ },
+ },
{
ID: "toggle_help",
Title: "Toggle Help",
@@ -31,7 +31,7 @@ func DefaultCommandsDialogKeyMap() CommandsDialogKeyMap {
key.WithHelp("tab", "switch selection"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
@@ -10,6 +10,7 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/tui/util"
)
@@ -54,7 +55,7 @@ func buildCommandSources(cfg *config.Config) []commandSource {
}
// Home directory
- if home, err := os.UserHomeDir(); err == nil {
+ if home := home.Dir(); home != "" {
sources = append(sources, commandSource{
path: filepath.Join(home, ".crush", "commands"),
prefix: UserCommandPrefix,
@@ -73,7 +74,7 @@ func buildCommandSources(cfg *config.Config) []commandSource {
func getXDGCommandsDir() string {
xdgHome := os.Getenv("XDG_CONFIG_HOME")
if xdgHome == "" {
- if home, err := os.UserHomeDir(); err == nil {
+ if home := home.Dir(); home != "" {
xdgHome = filepath.Join(home, ".config")
}
}
@@ -104,17 +104,24 @@ func (c *compactDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
case agent.AgentEvent:
- if msg.Type == agent.AgentEventTypeSummarize {
+ switch msg.Type {
+ case agent.AgentEventTypeSummarize:
if msg.Error != nil {
c.state = stateError
c.progress = "Error: " + msg.Error.Error()
} else if msg.Done {
- return c, util.CmdHandler(
- dialogs.CloseDialogMsg{},
- )
+ return c, util.CmdHandler(dialogs.CloseDialogMsg{})
} else {
c.progress = msg.Progress
}
+ case agent.AgentEventTypeError:
+ // Handle errors that occur during summarization but are sent as separate error events.
+ c.state = stateError
+ if msg.Error != nil {
+ c.progress = "Error: " + msg.Error.Error()
+ } else {
+ c.progress = "An unknown error occurred"
+ }
}
return c, nil
}
@@ -33,7 +33,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("n", "no"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
@@ -11,6 +11,7 @@ import (
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/dialogs"
@@ -21,9 +22,10 @@ import (
)
const (
- MaxAttachmentSize = int64(5 * 1024 * 1024) // 5MB
- FilePickerID = "filepicker"
- fileSelectionHight = 10
+ MaxAttachmentSize = int64(5 * 1024 * 1024) // 5MB
+ FilePickerID = "filepicker"
+ fileSelectionHeight = 10
+ previewHeight = 20
)
type FilePickedMsg struct {
@@ -59,7 +61,7 @@ func NewFilePickerCmp(workingDir string) FilePicker {
if cwd, err := os.Getwd(); err == nil {
fp.CurrentDirectory = cwd
} else {
- fp.CurrentDirectory, _ = os.UserHomeDir()
+ fp.CurrentDirectory = home.Dir()
}
}
@@ -68,7 +70,7 @@ func NewFilePickerCmp(workingDir string) FilePicker {
fp.AutoHeight = false
fp.Styles = t.S().FilePicker
fp.Cursor = ""
- fp.SetHeight(fileSelectionHight)
+ fp.SetHeight(fileSelectionHeight)
image := image.New(1, 1, "")
@@ -105,8 +107,7 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
if key.Matches(msg, m.filePicker.KeyMap.Back) {
// make sure we don't go back if we are at the home directory
- homeDir, _ := os.UserHomeDir()
- if m.filePicker.CurrentDirectory == homeDir {
+ if m.filePicker.CurrentDirectory == home.Dir() {
return m, nil
}
}
@@ -160,13 +161,25 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
func (m *model) View() string {
t := styles.CurrentTheme()
- content := lipgloss.JoinVertical(
- lipgloss.Left,
+ strs := []string{
t.S().Base.Padding(0, 1, 1, 1).Render(core.Title("Add Image", m.width-4)),
- m.imagePreview(),
+ }
+
+ // hide image preview if the terminal is too small
+ if x, y := m.imagePreviewSize(); x > 0 && y > 0 {
+ strs = append(strs, m.imagePreview())
+ }
+
+ strs = append(
+ strs,
m.filePicker.View(),
t.S().Base.Width(m.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(m.help.View(m.keyMap)),
)
+
+ content := lipgloss.JoinVertical(
+ lipgloss.Left,
+ strs...,
+ )
return m.style().Render(content)
}
@@ -180,12 +193,15 @@ func (m *model) currentImage() string {
}
func (m *model) imagePreview() string {
+ const padding = 2
+
t := styles.CurrentTheme()
w, h := m.imagePreviewSize()
+
if m.currentImage() == "" {
imgPreview := t.S().Base.
- Width(w).
- Height(h).
+ Width(w - padding).
+ Height(h - padding).
Background(t.BgOverlay)
return m.imagePreviewStyle().Render(imgPreview.Render())
@@ -200,7 +216,10 @@ func (m *model) imagePreviewStyle() lipgloss.Style {
}
func (m *model) imagePreviewSize() (int, int) {
- return m.width - 4, min(20, m.wHeight/2)
+ if m.wHeight-fileSelectionHeight-8 > previewHeight {
+ return m.width - 4, previewHeight
+ }
+ return 0, 0
}
func (m *model) style() lipgloss.Style {
@@ -218,7 +237,10 @@ func (m *model) ID() dialogs.DialogID {
// Position implements FilePicker.
func (m *model) Position() (int, int) {
- row := m.wHeight/4 - 2 // just a bit above the center
+ _, imageHeight := m.imagePreviewSize()
+ dialogHeight := fileSelectionHeight + imageHeight + 4
+ row := (m.wHeight - dialogHeight) / 2
+
col := m.wWidth / 2
col -= m.width / 2
return row, col
@@ -38,7 +38,7 @@ func DefaultKeyMap() KeyMap {
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "close/exit"),
),
}
@@ -12,7 +12,7 @@ type KeyMap struct {
func DefaultKeyMap() KeyMap {
return KeyMap{
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
),
}
}
@@ -2,12 +2,12 @@ package models
import (
"fmt"
- "strings"
"github.com/charmbracelet/bubbles/v2/spinner"
"github.com/charmbracelet/bubbles/v2/textinput"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/lipgloss/v2"
)
@@ -144,7 +144,7 @@ func (a *APIKeyInput) View() string {
inputView := a.input.View()
dataPath := config.GlobalConfigData()
- dataPath = strings.Replace(dataPath, config.HomeDir(), "~", 1)
+ dataPath = home.Short(dataPath)
helpText := styles.CurrentTheme().S().Muted.
Render(fmt.Sprintf("This will be written to the global configuration: %s", dataPath))
@@ -19,7 +19,7 @@ func DefaultKeyMap() KeyMap {
return KeyMap{
Select: key.NewBinding(
key.WithKeys("enter", "ctrl+y"),
- key.WithHelp("enter", "confirm"),
+ key.WithHelp("enter", "choose"),
),
Next: key.NewBinding(
key.WithKeys("down", "ctrl+n"),
@@ -34,8 +34,8 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("tab", "toggle type"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
- key.WithHelp("esc", "cancel"),
+ key.WithKeys("esc", "alt+esc"),
+ key.WithHelp("esc", "exit"),
),
}
}
@@ -3,6 +3,7 @@ package models
import (
"fmt"
"slices"
+ "strings"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/catwalk/pkg/catwalk"
@@ -48,8 +49,17 @@ func NewModelListComponent(keyMap list.KeyMap, inputPlaceholder string, shouldRe
func (m *ModelListComponent) Init() tea.Cmd {
var cmds []tea.Cmd
if len(m.providers) == 0 {
- providers, err := config.Providers()
- m.providers = providers
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
+ filteredProviders := []catwalk.Provider{}
+ for _, p := range providers {
+ hasAPIKeyEnv := strings.HasPrefix(p.APIKey, "$")
+ if hasAPIKeyEnv && p.ID != catwalk.InferenceProviderAzure {
+ filteredProviders = append(filteredProviders, p)
+ }
+ }
+
+ m.providers = filteredProviders
if err != nil {
cmds = append(cmds, util.ReportError(err))
}
@@ -110,7 +120,7 @@ func (m *ModelListComponent) SetModelType(modelType int) tea.Cmd {
// First, add any configured providers that are not in the known providers list
// These should appear at the top of the list
- knownProviders, err := config.Providers()
+ knownProviders, err := config.Providers(cfg)
if err != nil {
return util.ReportError(err)
}
@@ -242,7 +252,3 @@ func (m *ModelListComponent) GetModelType() int {
func (m *ModelListComponent) SetInputPlaceholder(placeholder string) {
m.list.SetInputPlaceholder(placeholder)
}
-
-func (m *ModelListComponent) SetProviders(providers []catwalk.Provider) {
- m.providers = providers
-}
@@ -2,7 +2,6 @@ package models
import (
"fmt"
- "slices"
"time"
"github.com/charmbracelet/bubbles/v2/help"
@@ -80,7 +79,7 @@ func NewModelDialogCmp() ModelDialog {
listKeyMap.UpOneItem = keyMap.Previous
t := styles.CurrentTheme()
- modelList := NewModelListComponent(listKeyMap, "Choose a model for large, complex tasks", true)
+ modelList := NewModelListComponent(listKeyMap, largeModelInputPlaceholder, true)
apiKeyInput := NewAPIKeyInput()
apiKeyInput.SetShowTitle(false)
help := help.New()
@@ -96,24 +95,6 @@ func NewModelDialogCmp() ModelDialog {
}
func (m *modelDialogCmp) Init() tea.Cmd {
- providers, err := config.Providers()
- if err == nil {
- filteredProviders := []catwalk.Provider{}
- simpleProviders := []string{
- "anthropic",
- "openai",
- "gemini",
- "xai",
- "groq",
- "openrouter",
- }
- for _, p := range providers {
- if slices.Contains(simpleProviders, string(p.ID)) {
- filteredProviders = append(filteredProviders, p)
- }
- }
- m.modelList.SetProviders(filteredProviders)
- }
return tea.Batch(m.modelList.Init(), m.apiKeyInput.Init())
}
@@ -189,8 +170,10 @@ func (m *modelDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(ModelSelectedMsg{
Model: config.SelectedModel{
- Model: selectedItem.Model.ID,
- Provider: string(selectedItem.Provider.ID),
+ Model: selectedItem.Model.ID,
+ Provider: string(selectedItem.Provider.ID),
+ ReasoningEffort: selectedItem.Model.DefaultReasoningEffort,
+ MaxTokens: selectedItem.Model.DefaultMaxTokens,
},
ModelType: modelType,
}),
@@ -369,7 +352,8 @@ func (m *modelDialogCmp) isProviderConfigured(providerID string) bool {
}
func (m *modelDialogCmp) getProvider(providerID catwalk.InferenceProvider) (*catwalk.Provider, error) {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
if err != nil {
return nil, err
}
@@ -398,8 +382,10 @@ func (m *modelDialogCmp) saveAPIKeyAndContinue(apiKey string) tea.Cmd {
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(ModelSelectedMsg{
Model: config.SelectedModel{
- Model: selectedModel.Model.ID,
- Provider: string(selectedModel.Provider.ID),
+ Model: selectedModel.Model.ID,
+ Provider: string(selectedModel.Provider.ID),
+ ReasoningEffort: selectedModel.Model.DefaultReasoningEffort,
+ MaxTokens: selectedModel.Model.DefaultMaxTokens,
},
ModelType: m.selectedModelType,
}),
@@ -42,7 +42,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("s", "allow session"),
),
Deny: key.NewBinding(
- key.WithKeys("d", "D", "ctrl+d"),
+ key.WithKeys("d", "D", "ctrl+d", "esc"),
key.WithHelp("d", "deny"),
),
Select: key.NewBinding(
@@ -1,6 +1,7 @@
package permissions
import (
+ "encoding/json"
"fmt"
"strings"
@@ -64,16 +65,23 @@ type permissionDialogCmp struct {
positionRow int // Row position for dialog
positionCol int // Column position for dialog
+ finalDialogHeight int
+
keyMap KeyMap
}
-func NewPermissionDialogCmp(permission permission.PermissionRequest) PermissionDialogCmp {
+func NewPermissionDialogCmp(permission permission.PermissionRequest, opts *Options) PermissionDialogCmp {
+ if opts == nil {
+ opts = &Options{}
+ }
+
// Create viewport for content
contentViewport := viewport.New()
return &permissionDialogCmp{
contentViewPort: contentViewport,
selectedOption: 0, // Default to "Allow"
permission: permission,
+ diffSplitMode: opts.isSplitMode(),
keyMap: DefaultKeyMap(),
contentDirty: true, // Mark as dirty initially
}
@@ -134,26 +142,22 @@ func (p *permissionDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
case key.Matches(msg, p.keyMap.ScrollDown):
if p.supportsDiffView() {
- p.diffYOffset += 1
- p.contentDirty = true // Mark content as dirty when scrolling
+ p.scrollDown()
return p, nil
}
case key.Matches(msg, p.keyMap.ScrollUp):
if p.supportsDiffView() {
- p.diffYOffset = max(0, p.diffYOffset-1)
- p.contentDirty = true // Mark content as dirty when scrolling
+ p.scrollUp()
return p, nil
}
case key.Matches(msg, p.keyMap.ScrollLeft):
if p.supportsDiffView() {
- p.diffXOffset = max(0, p.diffXOffset-5)
- p.contentDirty = true // Mark content as dirty when scrolling
+ p.scrollLeft()
return p, nil
}
case key.Matches(msg, p.keyMap.ScrollRight):
if p.supportsDiffView() {
- p.diffXOffset += 5
- p.contentDirty = true // Mark content as dirty when scrolling
+ p.scrollRight()
return p, nil
}
default:
@@ -162,11 +166,59 @@ func (p *permissionDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
p.contentViewPort = viewPort
cmds = append(cmds, cmd)
}
+ case tea.MouseWheelMsg:
+ if p.supportsDiffView() && p.isMouseOverDialog(msg.Mouse().X, msg.Mouse().Y) {
+ switch msg.Button {
+ case tea.MouseWheelDown:
+ p.scrollDown()
+ case tea.MouseWheelUp:
+ p.scrollUp()
+ case tea.MouseWheelLeft:
+ p.scrollLeft()
+ case tea.MouseWheelRight:
+ p.scrollRight()
+ }
+ }
}
return p, tea.Batch(cmds...)
}
+func (p *permissionDialogCmp) scrollDown() {
+ p.diffYOffset += 1
+ p.contentDirty = true
+}
+
+func (p *permissionDialogCmp) scrollUp() {
+ p.diffYOffset = max(0, p.diffYOffset-1)
+ p.contentDirty = true
+}
+
+func (p *permissionDialogCmp) scrollLeft() {
+ p.diffXOffset = max(0, p.diffXOffset-5)
+ p.contentDirty = true
+}
+
+func (p *permissionDialogCmp) scrollRight() {
+ p.diffXOffset += 5
+ p.contentDirty = true
+}
+
+// isMouseOverDialog checks if the given mouse coordinates are within the dialog bounds.
+// Returns true if the mouse is over the dialog area, false otherwise.
+func (p *permissionDialogCmp) isMouseOverDialog(x, y int) bool {
+ if p.permission.ID == "" {
+ return false
+ }
+ var (
+ dialogX = p.positionCol
+ dialogY = p.positionRow
+ dialogWidth = p.width
+ dialogHeight = p.finalDialogHeight
+ )
+ return x >= dialogX && x < dialogX+dialogWidth && y >= dialogY && y < dialogY+dialogHeight
+}
+
func (p *permissionDialogCmp) selectCurrentOption() tea.Cmd {
var action PermissionAction
@@ -563,6 +615,35 @@ func (p *permissionDialogCmp) generateDefaultContent() string {
content := p.permission.Description
+ // Add pretty-printed JSON parameters for MCP tools
+ if p.permission.Params != nil {
+ var paramStr string
+
+ // Ensure params is a string
+ if str, ok := p.permission.Params.(string); ok {
+ paramStr = str
+ } else {
+ paramStr = fmt.Sprintf("%v", p.permission.Params)
+ }
+
+ // Try to parse as JSON for pretty printing
+ var parsed any
+ if err := json.Unmarshal([]byte(paramStr), &parsed); err == nil {
+ if b, err := json.MarshalIndent(parsed, "", " "); err == nil {
+ if content != "" {
+ content += "\n\n"
+ }
+ content += string(b)
+ }
+ } else {
+ // Not JSON, show as-is
+ if content != "" {
+ content += "\n\n"
+ }
+ content += paramStr
+ }
+ }
+
content = strings.TrimSpace(content)
content = "\n" + content + "\n"
lines := strings.Split(content, "\n")
@@ -597,9 +678,8 @@ func (p *permissionDialogCmp) generateDefaultContent() string {
func (p *permissionDialogCmp) useDiffSplitMode() bool {
if p.diffSplitMode != nil {
return *p.diffSplitMode
- } else {
- return p.defaultDiffSplitMode
}
+ return p.defaultDiffSplitMode
}
func (p *permissionDialogCmp) styleViewport() string {
@@ -654,7 +734,7 @@ func (p *permissionDialogCmp) render() string {
}
content := lipgloss.JoinVertical(lipgloss.Top, strs...)
- return baseStyle.
+ dialog := baseStyle.
Padding(0, 1).
Border(lipgloss.RoundedBorder()).
BorderForeground(t.BorderFocus).
@@ -662,6 +742,8 @@ func (p *permissionDialogCmp) render() string {
Render(
content,
)
+ p.finalDialogHeight = lipgloss.Height(dialog)
+ return dialog
}
func (p *permissionDialogCmp) View() string {
@@ -741,3 +823,24 @@ func (p *permissionDialogCmp) ID() dialogs.DialogID {
func (p *permissionDialogCmp) Position() (int, int) {
return p.positionRow, p.positionCol
}
+
+// Options for create a new permission dialog
+type Options struct {
+ DiffMode string // split or unified, empty means use defaultDiffSplitMode
+}
+
+// isSplitMode returns internal representation of diff mode switch
+func (o Options) isSplitMode() *bool {
+ var split bool
+
+ switch o.DiffMode {
+ case "split":
+ split = true
+ case "unified":
+ split = false
+ default:
+ return nil
+ }
+
+ return &split
+}
@@ -37,7 +37,7 @@ func DefaultKeymap() KeyMap {
key.WithHelp("tab", "switch options"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
@@ -80,8 +80,10 @@ func (q *quitDialogCmp) View() string {
}
const horizontalPadding = 3
- yesButton := yesStyle.Padding(0, horizontalPadding).Render("Yep!")
- noButton := noStyle.Padding(0, horizontalPadding).Render("Nope")
+ yesButton := yesStyle.PaddingLeft(horizontalPadding).Underline(true).Render("Y") +
+ yesStyle.PaddingRight(horizontalPadding).Render("ep!")
+ noButton := noStyle.PaddingLeft(horizontalPadding).Underline(true).Render("N") +
+ noStyle.PaddingRight(horizontalPadding).Render("ope")
buttons := baseStyle.Width(lipgloss.Width(question)).Align(lipgloss.Right).Render(
lipgloss.JoinHorizontal(lipgloss.Center, yesButton, " ", noButton),
@@ -0,0 +1,268 @@
+package reasoning
+
+import (
+ "github.com/charmbracelet/bubbles/v2/help"
+ "github.com/charmbracelet/bubbles/v2/key"
+ tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/lipgloss/v2"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/tui/components/core"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs"
+ "github.com/charmbracelet/crush/internal/tui/exp/list"
+ "github.com/charmbracelet/crush/internal/tui/styles"
+ "github.com/charmbracelet/crush/internal/tui/util"
+)
+
+const (
+ ReasoningDialogID dialogs.DialogID = "reasoning"
+
+ defaultWidth int = 50
+)
+
+type listModel = list.FilterableList[list.CompletionItem[EffortOption]]
+
+type EffortOption struct {
+ Title string
+ Effort string
+}
+
+type ReasoningDialog interface {
+ dialogs.DialogModel
+}
+
+type reasoningDialogCmp struct {
+ width int
+ wWidth int // Width of the terminal window
+ wHeight int // Height of the terminal window
+
+ effortList listModel
+ keyMap ReasoningDialogKeyMap
+ help help.Model
+}
+
+type ReasoningEffortSelectedMsg struct {
+ Effort string
+}
+
+type ReasoningDialogKeyMap struct {
+ Next key.Binding
+ Previous key.Binding
+ Select key.Binding
+ Close key.Binding
+}
+
+func DefaultReasoningDialogKeyMap() ReasoningDialogKeyMap {
+ return ReasoningDialogKeyMap{
+ Next: key.NewBinding(
+ key.WithKeys("down", "j", "ctrl+n"),
+ key.WithHelp("↓/j/ctrl+n", "next"),
+ ),
+ Previous: key.NewBinding(
+ key.WithKeys("up", "k", "ctrl+p"),
+ key.WithHelp("↑/k/ctrl+p", "previous"),
+ ),
+ Select: key.NewBinding(
+ key.WithKeys("enter"),
+ key.WithHelp("enter", "select"),
+ ),
+ Close: key.NewBinding(
+ key.WithKeys("esc", "ctrl+c"),
+ key.WithHelp("esc/ctrl+c", "close"),
+ ),
+ }
+}
+
+func (k ReasoningDialogKeyMap) ShortHelp() []key.Binding {
+ return []key.Binding{k.Select, k.Close}
+}
+
+func (k ReasoningDialogKeyMap) FullHelp() [][]key.Binding {
+ return [][]key.Binding{
+ {k.Next, k.Previous},
+ {k.Select, k.Close},
+ }
+}
+
+func NewReasoningDialog() ReasoningDialog {
+ keyMap := DefaultReasoningDialogKeyMap()
+ listKeyMap := list.DefaultKeyMap()
+ listKeyMap.Down.SetEnabled(false)
+ listKeyMap.Up.SetEnabled(false)
+ listKeyMap.DownOneItem = keyMap.Next
+ listKeyMap.UpOneItem = keyMap.Previous
+
+ t := styles.CurrentTheme()
+ inputStyle := t.S().Base.PaddingLeft(1).PaddingBottom(1)
+ effortList := list.NewFilterableList(
+ []list.CompletionItem[EffortOption]{},
+ list.WithFilterInputStyle(inputStyle),
+ list.WithFilterListOptions(
+ list.WithKeyMap(listKeyMap),
+ list.WithWrapNavigation(),
+ list.WithResizeByList(),
+ ),
+ )
+ help := help.New()
+ help.Styles = t.S().Help
+
+ return &reasoningDialogCmp{
+ effortList: effortList,
+ width: defaultWidth,
+ keyMap: keyMap,
+ help: help,
+ }
+}
+
+func (r *reasoningDialogCmp) Init() tea.Cmd {
+ return r.populateEffortOptions()
+}
+
+func (r *reasoningDialogCmp) populateEffortOptions() tea.Cmd {
+ cfg := config.Get()
+ if agentCfg, ok := cfg.Agents["coder"]; ok {
+ selectedModel := cfg.Models[agentCfg.Model]
+ model := cfg.GetModelByType(agentCfg.Model)
+
+ // Get current reasoning effort
+ currentEffort := selectedModel.ReasoningEffort
+ if currentEffort == "" && model != nil {
+ currentEffort = model.DefaultReasoningEffort
+ }
+
+ efforts := []EffortOption{
+ {
+ Title: "Low",
+ Effort: "low",
+ },
+ {
+ Title: "Medium",
+ Effort: "medium",
+ },
+ {
+ Title: "High",
+ Effort: "high",
+ },
+ }
+
+ effortItems := []list.CompletionItem[EffortOption]{}
+ selectedID := ""
+ for _, effort := range efforts {
+ opts := []list.CompletionItemOption{
+ list.WithCompletionID(effort.Effort),
+ }
+ if effort.Effort == currentEffort {
+ opts = append(opts, list.WithCompletionShortcut("current"))
+ selectedID = effort.Effort
+ }
+ effortItems = append(effortItems, list.NewCompletionItem(
+ effort.Title,
+ effort,
+ opts...,
+ ))
+ }
+
+ cmd := r.effortList.SetItems(effortItems)
+ // Set the current effort as the selected item
+ if currentEffort != "" && selectedID != "" {
+ return tea.Sequence(cmd, r.effortList.SetSelected(selectedID))
+ }
+ return cmd
+ }
+ return nil
+}
+
+func (r *reasoningDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
+ switch msg := msg.(type) {
+ case tea.WindowSizeMsg:
+ r.wWidth = msg.Width
+ r.wHeight = msg.Height
+ return r, r.effortList.SetSize(r.listWidth(), r.listHeight())
+ case tea.KeyPressMsg:
+ switch {
+ case key.Matches(msg, r.keyMap.Select):
+ selectedItem := r.effortList.SelectedItem()
+ if selectedItem == nil {
+ return r, nil // No item selected, do nothing
+ }
+ effort := (*selectedItem).Value()
+ return r, tea.Sequence(
+ util.CmdHandler(dialogs.CloseDialogMsg{}),
+ func() tea.Msg {
+ return ReasoningEffortSelectedMsg{
+ Effort: effort.Effort,
+ }
+ },
+ )
+ case key.Matches(msg, r.keyMap.Close):
+ return r, util.CmdHandler(dialogs.CloseDialogMsg{})
+ default:
+ u, cmd := r.effortList.Update(msg)
+ r.effortList = u.(listModel)
+ return r, cmd
+ }
+ }
+ return r, nil
+}
+
+func (r *reasoningDialogCmp) View() string {
+ t := styles.CurrentTheme()
+ listView := r.effortList
+
+ header := t.S().Base.Padding(0, 1, 1, 1).Render(core.Title("Select Reasoning Effort", r.width-4))
+ content := lipgloss.JoinVertical(
+ lipgloss.Left,
+ header,
+ listView.View(),
+ "",
+ t.S().Base.Width(r.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(r.help.View(r.keyMap)),
+ )
+ return r.style().Render(content)
+}
+
+func (r *reasoningDialogCmp) Cursor() *tea.Cursor {
+ if cursor, ok := r.effortList.(util.Cursor); ok {
+ cursor := cursor.Cursor()
+ if cursor != nil {
+ cursor = r.moveCursor(cursor)
+ }
+ return cursor
+ }
+ return nil
+}
+
+func (r *reasoningDialogCmp) listWidth() int {
+ return r.width - 2 // 4 for padding
+}
+
+func (r *reasoningDialogCmp) listHeight() int {
+ listHeight := len(r.effortList.Items()) + 2 + 4 // height based on items + 2 for the input + 4 for the sections
+ return min(listHeight, r.wHeight/2)
+}
+
+func (r *reasoningDialogCmp) moveCursor(cursor *tea.Cursor) *tea.Cursor {
+ row, col := r.Position()
+ offset := row + 3
+ cursor.Y += offset
+ cursor.X = cursor.X + col + 2
+ return cursor
+}
+
+func (r *reasoningDialogCmp) style() lipgloss.Style {
+ t := styles.CurrentTheme()
+ return t.S().Base.
+ Width(r.width).
+ Border(lipgloss.RoundedBorder()).
+ BorderForeground(t.BorderFocus)
+}
+
+func (r *reasoningDialogCmp) Position() (int, int) {
+ row := r.wHeight/4 - 2 // just a bit above the center
+ col := r.wWidth / 2
+ col -= r.width / 2
+ return row, col
+}
+
+func (r *reasoningDialogCmp) ID() dialogs.DialogID {
+ return ReasoningDialogID
+}
@@ -15,7 +15,7 @@ func DefaultKeyMap() KeyMap {
return KeyMap{
Select: key.NewBinding(
key.WithKeys("enter", "tab", "ctrl+y"),
- key.WithHelp("enter", "confirm"),
+ key.WithHelp("enter", "choose"),
),
Next: key.NewBinding(
key.WithKeys("down", "ctrl+n"),
@@ -26,8 +26,8 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("↑", "previous item"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
- key.WithHelp("esc", "cancel"),
+ key.WithKeys("esc", "alt+esc"),
+ key.WithHelp("esc", "exit"),
),
}
}
@@ -4,6 +4,7 @@ import (
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/components/chat"
"github.com/charmbracelet/crush/internal/tui/components/core"
@@ -99,6 +100,7 @@ func (s *sessionDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
selectedItem := s.sessionsList.SelectedItem()
if selectedItem != nil {
selected := *selectedItem
+ event.SessionSwitched()
return s, tea.Sequence(
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(
@@ -0,0 +1,146 @@
+package files
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/ansi"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/history"
+ "github.com/charmbracelet/crush/internal/tui/components/core"
+ "github.com/charmbracelet/crush/internal/tui/styles"
+)
+
+// FileHistory represents a file history with initial and latest versions.
+type FileHistory struct {
+ InitialVersion history.File
+ LatestVersion history.File
+}
+
+// SessionFile represents a file with its history information.
+type SessionFile struct {
+ History FileHistory
+ FilePath string
+ Additions int
+ Deletions int
+}
+
+// RenderOptions contains options for rendering file lists.
+type RenderOptions struct {
+ MaxWidth int
+ MaxItems int
+ ShowSection bool
+ SectionName string
+}
+
+// RenderFileList renders a list of file status items with the given options.
+func RenderFileList(fileSlice []SessionFile, opts RenderOptions) []string {
+ t := styles.CurrentTheme()
+ fileList := []string{}
+
+ if opts.ShowSection {
+ sectionName := opts.SectionName
+ if sectionName == "" {
+ sectionName = "Modified Files"
+ }
+ section := t.S().Subtle.Render(sectionName)
+ fileList = append(fileList, section, "")
+ }
+
+ if len(fileSlice) == 0 {
+ fileList = append(fileList, t.S().Base.Foreground(t.Border).Render("None"))
+ return fileList
+ }
+
+ // Sort files by the latest version's created time
+ sort.Slice(fileSlice, func(i, j int) bool {
+ if fileSlice[i].History.LatestVersion.CreatedAt == fileSlice[j].History.LatestVersion.CreatedAt {
+ return strings.Compare(fileSlice[i].FilePath, fileSlice[j].FilePath) < 0
+ }
+ return fileSlice[i].History.LatestVersion.CreatedAt > fileSlice[j].History.LatestVersion.CreatedAt
+ })
+
+ // Determine how many items to show
+ maxItems := len(fileSlice)
+ if opts.MaxItems > 0 {
+ maxItems = min(opts.MaxItems, len(fileSlice))
+ }
+
+ filesShown := 0
+ for _, file := range fileSlice {
+ if file.Additions == 0 && file.Deletions == 0 {
+ continue // skip files with no changes
+ }
+ if filesShown >= maxItems {
+ break
+ }
+
+ var statusParts []string
+ if file.Additions > 0 {
+ statusParts = append(statusParts, t.S().Base.Foreground(t.Success).Render(fmt.Sprintf("+%d", file.Additions)))
+ }
+ if file.Deletions > 0 {
+ statusParts = append(statusParts, t.S().Base.Foreground(t.Error).Render(fmt.Sprintf("-%d", file.Deletions)))
+ }
+
+ extraContent := strings.Join(statusParts, " ")
+ cwd := config.Get().WorkingDir() + string(os.PathSeparator)
+ filePath := file.FilePath
+ if rel, err := filepath.Rel(cwd, filePath); err == nil {
+ filePath = rel
+ }
+ filePath = fsext.DirTrim(fsext.PrettyPath(filePath), 2)
+ filePath = ansi.Truncate(filePath, opts.MaxWidth-lipgloss.Width(extraContent)-2, "…")
+
+ fileList = append(fileList,
+ core.Status(
+ core.StatusOpts{
+ Title: filePath,
+ ExtraContent: extraContent,
+ },
+ opts.MaxWidth,
+ ),
+ )
+ filesShown++
+ }
+
+ return fileList
+}
+
+// RenderFileBlock renders a complete file block with optional truncation indicator.
+func RenderFileBlock(fileSlice []SessionFile, opts RenderOptions, showTruncationIndicator bool) string {
+ t := styles.CurrentTheme()
+ fileList := RenderFileList(fileSlice, opts)
+
+ // Add truncation indicator if needed
+ if showTruncationIndicator && opts.MaxItems > 0 {
+ totalFilesWithChanges := 0
+ for _, file := range fileSlice {
+ if file.Additions > 0 || file.Deletions > 0 {
+ totalFilesWithChanges++
+ }
+ }
+ if totalFilesWithChanges > opts.MaxItems {
+ remaining := totalFilesWithChanges - opts.MaxItems
+ if remaining == 1 {
+ fileList = append(fileList, t.S().Base.Foreground(t.FgMuted).Render("…"))
+ } else {
+ fileList = append(fileList,
+ t.S().Base.Foreground(t.FgSubtle).Render(fmt.Sprintf("…and %d more", remaining)),
+ )
+ }
+ }
+ }
+
+ content := lipgloss.JoinVertical(lipgloss.Left, fileList...)
+ if opts.MaxWidth > 0 {
+ return lipgloss.NewStyle().Width(opts.MaxWidth).Render(content)
+ }
+ return content
+}
@@ -0,0 +1,162 @@
+package lsp
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/charmbracelet/crush/internal/app"
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/lsp"
+ "github.com/charmbracelet/crush/internal/tui/components/core"
+ "github.com/charmbracelet/crush/internal/tui/styles"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
+)
+
+// RenderOptions contains options for rendering LSP lists.
+type RenderOptions struct {
+ MaxWidth int
+ MaxItems int
+ ShowSection bool
+ SectionName string
+}
+
+// RenderLSPList renders a list of LSP status items with the given options.
+func RenderLSPList(lspClients *csync.Map[string, *lsp.Client], opts RenderOptions) []string {
+ t := styles.CurrentTheme()
+ lspList := []string{}
+
+ if opts.ShowSection {
+ sectionName := opts.SectionName
+ if sectionName == "" {
+ sectionName = "LSPs"
+ }
+ section := t.S().Subtle.Render(sectionName)
+ lspList = append(lspList, section, "")
+ }
+
+ lspConfigs := config.Get().LSP.Sorted()
+ if len(lspConfigs) == 0 {
+ lspList = append(lspList, t.S().Base.Foreground(t.Border).Render("None"))
+ return lspList
+ }
+
+ // Get LSP states
+ lspStates := app.GetLSPStates()
+
+ // Determine how many items to show
+ maxItems := len(lspConfigs)
+ if opts.MaxItems > 0 {
+ maxItems = min(opts.MaxItems, len(lspConfigs))
+ }
+
+ for i, l := range lspConfigs {
+ if i >= maxItems {
+ break
+ }
+
+ // Determine icon color and description based on state
+ icon := t.ItemOfflineIcon
+ description := l.LSP.Command
+
+ if l.LSP.Disabled {
+ description = t.S().Subtle.Render("disabled")
+ } else if state, exists := lspStates[l.Name]; exists {
+ switch state.State {
+ case lsp.StateStarting:
+ icon = t.ItemBusyIcon
+ description = t.S().Subtle.Render("starting...")
+ case lsp.StateReady:
+ icon = t.ItemOnlineIcon
+ description = l.LSP.Command
+ case lsp.StateError:
+ icon = t.ItemErrorIcon
+ if state.Error != nil {
+ description = t.S().Subtle.Render(fmt.Sprintf("error: %s", state.Error.Error()))
+ } else {
+ description = t.S().Subtle.Render("error")
+ }
+ case lsp.StateDisabled:
+ icon = t.ItemOfflineIcon.Foreground(t.FgMuted)
+ description = t.S().Base.Foreground(t.FgMuted).Render("no root markers found")
+ }
+ }
+
+ // Calculate diagnostic counts if we have LSP clients
+ var extraContent string
+ if lspClients != nil {
+ lspErrs := map[protocol.DiagnosticSeverity]int{
+ protocol.SeverityError: 0,
+ protocol.SeverityWarning: 0,
+ protocol.SeverityHint: 0,
+ protocol.SeverityInformation: 0,
+ }
+ if client, ok := lspClients.Get(l.Name); ok {
+ for _, diagnostics := range client.GetDiagnostics() {
+ for _, diagnostic := range diagnostics {
+ if severity, ok := lspErrs[diagnostic.Severity]; ok {
+ lspErrs[diagnostic.Severity] = severity + 1
+ }
+ }
+ }
+ }
+
+ errs := []string{}
+ if lspErrs[protocol.SeverityError] > 0 {
+ errs = append(errs, t.S().Base.Foreground(t.Error).Render(fmt.Sprintf("%s %d", styles.ErrorIcon, lspErrs[protocol.SeverityError])))
+ }
+ if lspErrs[protocol.SeverityWarning] > 0 {
+ errs = append(errs, t.S().Base.Foreground(t.Warning).Render(fmt.Sprintf("%s %d", styles.WarningIcon, lspErrs[protocol.SeverityWarning])))
+ }
+ if lspErrs[protocol.SeverityHint] > 0 {
+ errs = append(errs, t.S().Base.Foreground(t.FgHalfMuted).Render(fmt.Sprintf("%s %d", styles.HintIcon, lspErrs[protocol.SeverityHint])))
+ }
+ if lspErrs[protocol.SeverityInformation] > 0 {
+ errs = append(errs, t.S().Base.Foreground(t.FgHalfMuted).Render(fmt.Sprintf("%s %d", styles.InfoIcon, lspErrs[protocol.SeverityInformation])))
+ }
+ extraContent = strings.Join(errs, " ")
+ }
+
+ lspList = append(lspList,
+ core.Status(
+ core.StatusOpts{
+ Icon: icon.String(),
+ Title: l.Name,
+ Description: description,
+ ExtraContent: extraContent,
+ },
+ opts.MaxWidth,
+ ),
+ )
+ }
+
+ return lspList
+}
+
+// RenderLSPBlock renders a complete LSP block with optional truncation indicator.
+func RenderLSPBlock(lspClients *csync.Map[string, *lsp.Client], opts RenderOptions, showTruncationIndicator bool) string {
+ t := styles.CurrentTheme()
+ lspList := RenderLSPList(lspClients, opts)
+
+ // Add truncation indicator if needed
+ if showTruncationIndicator && opts.MaxItems > 0 {
+ lspConfigs := config.Get().LSP.Sorted()
+ if len(lspConfigs) > opts.MaxItems {
+ remaining := len(lspConfigs) - opts.MaxItems
+ if remaining == 1 {
+ lspList = append(lspList, t.S().Base.Foreground(t.FgMuted).Render("…"))
+ } else {
+ lspList = append(lspList,
+ t.S().Base.Foreground(t.FgSubtle).Render(fmt.Sprintf("…and %d more", remaining)),
+ )
+ }
+ }
+ }
+
+ content := lipgloss.JoinVertical(lipgloss.Left, lspList...)
+ if opts.MaxWidth > 0 {
+ return lipgloss.NewStyle().Width(opts.MaxWidth).Render(content)
+ }
+ return content
+}
@@ -0,0 +1,126 @@
+package mcp
+
+import (
+ "fmt"
+
+ "github.com/charmbracelet/lipgloss/v2"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/llm/agent"
+ "github.com/charmbracelet/crush/internal/tui/components/core"
+ "github.com/charmbracelet/crush/internal/tui/styles"
+)
+
+// RenderOptions contains options for rendering MCP lists.
+type RenderOptions struct {
+ MaxWidth int
+ MaxItems int
+ ShowSection bool
+ SectionName string
+}
+
+// RenderMCPList renders a list of MCP status items with the given options.
+func RenderMCPList(opts RenderOptions) []string {
+ t := styles.CurrentTheme()
+ mcpList := []string{}
+
+ if opts.ShowSection {
+ sectionName := opts.SectionName
+ if sectionName == "" {
+ sectionName = "MCPs"
+ }
+ section := t.S().Subtle.Render(sectionName)
+ mcpList = append(mcpList, section, "")
+ }
+
+ mcps := config.Get().MCP.Sorted()
+ if len(mcps) == 0 {
+ mcpList = append(mcpList, t.S().Base.Foreground(t.Border).Render("None"))
+ return mcpList
+ }
+
+ // Get MCP states
+ mcpStates := agent.GetMCPStates()
+
+ // Determine how many items to show
+ maxItems := len(mcps)
+ if opts.MaxItems > 0 {
+ maxItems = min(opts.MaxItems, len(mcps))
+ }
+
+ for i, l := range mcps {
+ if i >= maxItems {
+ break
+ }
+
+ // Determine icon and color based on state
+ icon := t.ItemOfflineIcon
+ description := l.MCP.Command
+ extraContent := ""
+
+ if state, exists := mcpStates[l.Name]; exists {
+ switch state.State {
+ case agent.MCPStateDisabled:
+ description = t.S().Subtle.Render("disabled")
+ case agent.MCPStateStarting:
+ icon = t.ItemBusyIcon
+ description = t.S().Subtle.Render("starting...")
+ case agent.MCPStateConnected:
+ icon = t.ItemOnlineIcon
+ if state.ToolCount > 0 {
+ extraContent = t.S().Subtle.Render(fmt.Sprintf("%d tools", state.ToolCount))
+ }
+ case agent.MCPStateError:
+ icon = t.ItemErrorIcon
+ if state.Error != nil {
+ description = t.S().Subtle.Render(fmt.Sprintf("error: %s", state.Error.Error()))
+ } else {
+ description = t.S().Subtle.Render("error")
+ }
+ }
+ } else if l.MCP.Disabled {
+ description = t.S().Subtle.Render("disabled")
+ }
+
+ mcpList = append(mcpList,
+ core.Status(
+ core.StatusOpts{
+ Icon: icon.String(),
+ Title: l.Name,
+ Description: description,
+ ExtraContent: extraContent,
+ },
+ opts.MaxWidth,
+ ),
+ )
+ }
+
+ return mcpList
+}
+
+// RenderMCPBlock renders a complete MCP block with optional truncation indicator.
+func RenderMCPBlock(opts RenderOptions, showTruncationIndicator bool) string {
+ t := styles.CurrentTheme()
+ mcpList := RenderMCPList(opts)
+
+ // Add truncation indicator if needed
+ if showTruncationIndicator && opts.MaxItems > 0 {
+ mcps := config.Get().MCP.Sorted()
+ if len(mcps) > opts.MaxItems {
+ remaining := len(mcps) - opts.MaxItems
+ if remaining == 1 {
+ mcpList = append(mcpList, t.S().Base.Foreground(t.FgMuted).Render("…"))
+ } else {
+ mcpList = append(mcpList,
+ t.S().Base.Foreground(t.FgSubtle).Render(fmt.Sprintf("…and %d more", remaining)),
+ )
+ }
+ }
+ }
+
+ content := lipgloss.JoinVertical(lipgloss.Left, mcpList...)
+ if opts.MaxWidth > 0 {
+ return lipgloss.NewStyle().Width(opts.MaxWidth).Render(content)
+ }
+ return content
+}
@@ -408,7 +408,7 @@ func (dv *DiffView) renderUnified() string {
content = ansi.GraphemeWidth.Cut(content, dv.xOffset, len(content))
content = ansi.Truncate(content, dv.codeWidth, "…")
leadingEllipsis = dv.xOffset > 0 && strings.TrimSpace(content) != ""
- return
+ return content, leadingEllipsis
}
outer:
@@ -531,7 +531,7 @@ func (dv *DiffView) renderSplit() string {
content = ansi.GraphemeWidth.Cut(content, dv.xOffset, len(content))
content = ansi.Truncate(content, dv.codeWidth, "…")
leadingEllipsis = dv.xOffset > 0 && strings.TrimSpace(content) != ""
- return
+ return content, leadingEllipsis
}
outer:
@@ -716,7 +716,7 @@ func (dv *DiffView) hunkShownLines(h *udiff.Hunk) (before, after int) {
before++
}
}
- return
+ return before, after
}
func (dv *DiffView) lineStyleForType(t udiff.OpKind) LineStyle {
@@ -69,5 +69,5 @@ func hunkToSplit(h *udiff.Hunk) (sh splitHunk) {
sh.lines = append(sh.lines, &sl)
}
- return
+ return sh
}
@@ -15,12 +15,8 @@ import (
"github.com/sahilm/fuzzy"
)
-var (
- // Pre-compiled regex for checking if a string contains alphabetic characters.
- alphaRegex = regexp.MustCompile(`[a-zA-Z]`)
- // Pre-compiled regex for checking if a string is alphanumeric.
- alphanumericRegex = regexp.MustCompile(`^[a-zA-Z0-9]*$`)
-)
+// Pre-compiled regex for checking if a string is alphanumeric.
+var alphanumericRegex = regexp.MustCompile(`^[a-zA-Z0-9]*$`)
type FilterableItem interface {
Item
@@ -246,7 +242,7 @@ func (f *filterableList[T]) Filter(query string) tea.Cmd {
}
f.selectedItem = ""
- if query == "" {
+ if query == "" || len(f.items) == 0 {
return f.list.SetItems(f.items)
}
@@ -180,7 +180,7 @@ func (f *filterableGroupList[T]) inputHeight() int {
return lipgloss.Height(f.inputStyle.Render(f.input.View()))
}
-func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
+func (f *filterableGroupList[T]) clearItemState() []tea.Cmd {
var cmds []tea.Cmd
for _, item := range slices.Collect(f.items.Seq()) {
if i, ok := any(item).(layout.Focusable); ok {
@@ -190,41 +190,92 @@ func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
i.MatchIndexes(make([]int, 0))
}
}
+ return cmds
+}
- f.selectedItem = ""
- if query == "" {
- return f.groupedList.SetGroups(f.groups)
+func (f *filterableGroupList[T]) getGroupName(g Group[T]) string {
+ if section, ok := g.Section.(*itemSectionModel); ok {
+ return strings.ToLower(section.title)
}
+ return strings.ToLower(g.Section.ID())
+}
- var newGroups []Group[T]
- for _, g := range f.groups {
- words := make([]string, len(g.Items))
- for i, item := range g.Items {
- words[i] = strings.ToLower(item.FilterValue())
+func (f *filterableGroupList[T]) setMatchIndexes(item T, indexes []int) {
+ if i, ok := any(item).(HasMatchIndexes); ok {
+ i.MatchIndexes(indexes)
+ }
+}
+
+func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string) []T {
+ if query == "" {
+ // No query, return all items with cleared match indexes
+ var items []T
+ for _, item := range group.Items {
+ f.setMatchIndexes(item, make([]int, 0))
+ items = append(items, item)
}
+ return items
+ }
- matches := fuzzy.Find(query, words)
+ name := f.getGroupName(group) + " "
- sort.SliceStable(matches, func(i, j int) bool {
- return matches[i].Score > matches[j].Score
- })
+ names := make([]string, len(group.Items))
+ for i, item := range group.Items {
+ names[i] = strings.ToLower(name + item.FilterValue())
+ }
+
+ matches := fuzzy.Find(query, names)
+ sort.SliceStable(matches, func(i, j int) bool {
+ return matches[i].Score > matches[j].Score
+ })
+ if len(matches) > 0 {
var matchedItems []T
for _, match := range matches {
- item := g.Items[match.Index]
- if i, ok := any(item).(HasMatchIndexes); ok {
- i.MatchIndexes(match.MatchedIndexes)
+ item := group.Items[match.Index]
+ var idxs []int
+ for _, idx := range match.MatchedIndexes {
+ // adjusts removing group name highlights
+ if idx < len(name) {
+ continue
+ }
+ idxs = append(idxs, idx-len(name))
}
+ f.setMatchIndexes(item, idxs)
matchedItems = append(matchedItems, item)
}
+ return matchedItems
+ }
+
+ return []T{}
+}
+
+func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
+ cmds := f.clearItemState()
+ f.selectedItem = ""
+
+ if query == "" {
+ return f.groupedList.SetGroups(f.groups)
+ }
+
+ query = strings.ToLower(strings.ReplaceAll(query, " ", ""))
+
+ var result []Group[T]
+ for _, g := range f.groups {
+ if matches := fuzzy.Find(query, []string{f.getGroupName(g)}); len(matches) > 0 && matches[0].Score > 0 {
+ result = append(result, g)
+ continue
+ }
+ matchedItems := f.filterItemsInGroup(g, query)
if len(matchedItems) > 0 {
- newGroups = append(newGroups, Group[T]{
+ result = append(result, Group[T]{
Section: g.Section,
Items: matchedItems,
})
}
}
- cmds = append(cmds, f.groupedList.SetGroups(newGroups))
+
+ cmds = append(cmds, f.groupedList.SetGroups(result))
return tea.Batch(cmds...)
}
@@ -259,5 +310,6 @@ func (f *filterableGroupList[T]) SetInputWidth(w int) {
}
func (f *filterableGroupList[T]) SetInputPlaceholder(ph string) {
+ f.input.Placeholder = ph
f.placeholder = ph
}
@@ -327,18 +327,20 @@ type itemSectionModel struct {
width int
title string
inx int
+ id string
info string
}
// ID implements ItemSection.
func (m *itemSectionModel) ID() string {
- return uuid.NewString()
+ return m.id
}
func NewItemSection(title string) ItemSection {
return &itemSectionModel{
title: title,
inx: -1,
+ id: uuid.NewString(),
}
}
@@ -3,6 +3,7 @@ package list
import (
"slices"
"strings"
+ "sync"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
@@ -12,6 +13,9 @@ import (
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
"github.com/charmbracelet/lipgloss/v2"
+ uv "github.com/charmbracelet/ultraviolet"
+ "github.com/charmbracelet/x/ansi"
+ "github.com/rivo/uniseg"
)
type Item interface {
@@ -45,6 +49,14 @@ type List[T Item] interface {
DeleteItem(string) tea.Cmd
PrependItem(T) tea.Cmd
AppendItem(T) tea.Cmd
+ StartSelection(col, line int)
+ EndSelection(col, line int)
+ SelectionStop()
+ SelectionClear()
+ SelectWord(col, line int)
+ SelectParagraph(col, line int)
+ GetSelectedText(paddingLeft int) string
+ HasSelection() bool
}
type direction int
@@ -90,9 +102,16 @@ type list[T Item] struct {
renderedItems *csync.Map[string, renderedItem]
+ renderMu sync.Mutex
rendered string
- movingByItem bool
+ movingByItem bool
+ selectionStartCol int
+ selectionStartLine int
+ selectionEndCol int
+ selectionEndLine int
+
+ selectionActive bool
}
type ListOption func(*confOptions)
@@ -170,9 +189,13 @@ func New[T Item](items []T, opts ...ListOption) List[T] {
keyMap: DefaultKeyMap(),
focused: true,
},
- items: csync.NewSliceFrom(items),
- indexMap: csync.NewMap[string, int](),
- renderedItems: csync.NewMap[string, renderedItem](),
+ items: csync.NewSliceFrom(items),
+ indexMap: csync.NewMap[string, int](),
+ renderedItems: csync.NewMap[string, renderedItem](),
+ selectionStartCol: -1,
+ selectionStartLine: -1,
+ selectionEndLine: -1,
+ selectionEndCol: -1,
}
for _, opt := range opts {
opt(list.confOptions)
@@ -264,6 +287,157 @@ func (l *list[T]) handleMouseWheel(msg tea.MouseWheelMsg) (tea.Model, tea.Cmd) {
return l, cmd
}
+// selectionView renders the highlighted selection in the view and returns it
+// as a string. If textOnly is true, it won't render any styles.
+func (l *list[T]) selectionView(view string, textOnly bool) string {
+ t := styles.CurrentTheme()
+ area := uv.Rect(0, 0, l.width, l.height)
+ scr := uv.NewScreenBuffer(area.Dx(), area.Dy())
+ uv.NewStyledString(view).Draw(scr, area)
+
+ selArea := uv.Rectangle{
+ Min: uv.Pos(l.selectionStartCol, l.selectionStartLine),
+ Max: uv.Pos(l.selectionEndCol, l.selectionEndLine),
+ }
+ selArea = selArea.Canon()
+
+ specialChars := make(map[string]bool, len(styles.SelectionIgnoreIcons))
+ for _, icon := range styles.SelectionIgnoreIcons {
+ specialChars[icon] = true
+ }
+
+ isNonWhitespace := func(r rune) bool {
+ return r != ' ' && r != '\t' && r != 0 && r != '\n' && r != '\r'
+ }
+
+ type selectionBounds struct {
+ startX, endX int
+ inSelection bool
+ }
+ lineSelections := make([]selectionBounds, scr.Height())
+
+ for y := range scr.Height() {
+ bounds := selectionBounds{startX: -1, endX: -1, inSelection: false}
+
+ if y >= selArea.Min.Y && y <= selArea.Max.Y {
+ bounds.inSelection = true
+ if selArea.Min.Y == selArea.Max.Y {
+ // Single line selection
+ bounds.startX = selArea.Min.X
+ bounds.endX = selArea.Max.X
+ } else if y == selArea.Min.Y {
+ // First line of multi-line selection
+ bounds.startX = selArea.Min.X
+ bounds.endX = scr.Width()
+ } else if y == selArea.Max.Y {
+ // Last line of multi-line selection
+ bounds.startX = 0
+ bounds.endX = selArea.Max.X
+ } else {
+ // Middle lines
+ bounds.startX = 0
+ bounds.endX = scr.Width()
+ }
+ }
+ lineSelections[y] = bounds
+ }
+
+ type lineBounds struct {
+ start, end int
+ }
+ lineTextBounds := make([]lineBounds, scr.Height())
+
+ // First pass: find text bounds for lines that have selections
+ for y := range scr.Height() {
+ bounds := lineBounds{start: -1, end: -1}
+
+ // Only process lines that might have selections
+ if lineSelections[y].inSelection {
+ for x := range scr.Width() {
+ cell := scr.CellAt(x, y)
+ if cell == nil {
+ continue
+ }
+
+ cellStr := cell.String()
+ if len(cellStr) == 0 {
+ continue
+ }
+
+ char := rune(cellStr[0])
+ isSpecial := specialChars[cellStr]
+
+ if (isNonWhitespace(char) && !isSpecial) || cell.Style.Bg != nil {
+ if bounds.start == -1 {
+ bounds.start = x
+ }
+ bounds.end = x + 1 // Position after last character
+ }
+ }
+ }
+ lineTextBounds[y] = bounds
+ }
+
+ var selectedText strings.Builder
+
+ // Second pass: apply selection highlighting
+ for y := range scr.Height() {
+ selBounds := lineSelections[y]
+ if !selBounds.inSelection {
+ continue
+ }
+
+ textBounds := lineTextBounds[y]
+ if textBounds.start < 0 {
+ if textOnly {
+ // We don't want to get rid of all empty lines in text-only mode
+ selectedText.WriteByte('\n')
+ }
+
+ continue // No text on this line
+ }
+
+ // Only scan within the intersection of text bounds and selection bounds
+ scanStart := max(textBounds.start, selBounds.startX)
+ scanEnd := min(textBounds.end, selBounds.endX)
+
+ for x := scanStart; x < scanEnd; x++ {
+ cell := scr.CellAt(x, y)
+ if cell == nil {
+ continue
+ }
+
+ cellStr := cell.String()
+ if len(cellStr) > 0 && !specialChars[cellStr] {
+ if textOnly {
+ // Collect selected text without styles
+ selectedText.WriteString(cell.String())
+ continue
+ }
+
+ // Text selection styling, which is a Lip Gloss style. We must
+ // extract the values to use in a UV style, below.
+ ts := t.TextSelection
+
+ cell = cell.Clone()
+ cell.Style = cell.Style.Background(ts.GetBackground()).Foreground(ts.GetForeground())
+ scr.SetCell(x, y, cell)
+ }
+ }
+
+ if textOnly {
+ // Make sure we add a newline after each line of selected text
+ selectedText.WriteByte('\n')
+ }
+ }
+
+ if textOnly {
+ return strings.TrimSpace(selectedText.String())
+ }
+
+ return scr.Render()
+}
+
// View implements List.
func (l *list[T]) View() string {
if l.height <= 0 || l.width <= 0 {
@@ -276,14 +450,25 @@ func (l *list[T]) View() string {
start, end := l.viewPosition()
viewStart := max(0, start)
viewEnd := min(len(lines), end+1)
+
+ if viewStart > viewEnd {
+ viewStart = viewEnd
+ }
lines = lines[viewStart:viewEnd]
+
if l.resize {
return strings.Join(lines, "\n")
}
- return t.S().Base.
+ view = t.S().Base.
Height(l.height).
Width(l.width).
Render(strings.Join(lines, "\n"))
+
+ if !l.hasSelection() {
+ return view
+ }
+
+ return l.selectionView(view, false)
}
func (l *list[T]) viewPosition() (int, int) {
@@ -296,6 +481,7 @@ func (l *list[T]) viewPosition() (int, int) {
start = max(0, renderedLines-l.offset-l.height+1)
end = max(0, renderedLines-l.offset)
}
+ start = min(start, end)
return start, end
}
@@ -328,7 +514,9 @@ func (l *list[T]) render() tea.Cmd {
// we are not rendering the first time
if l.rendered != "" {
// rerender everything will mostly hit cache
+ l.renderMu.Lock()
l.rendered, _ = l.renderIterator(0, false, "")
+ l.renderMu.Unlock()
if l.direction == DirectionBackward {
l.recalculateItemPositions()
}
@@ -338,9 +526,10 @@ func (l *list[T]) render() tea.Cmd {
}
return focusChangeCmd
}
+ l.renderMu.Lock()
rendered, finishIndex := l.renderIterator(0, true, "")
l.rendered = rendered
-
+ l.renderMu.Unlock()
// recalculate for the initial items
if l.direction == DirectionBackward {
l.recalculateItemPositions()
@@ -348,7 +537,10 @@ func (l *list[T]) render() tea.Cmd {
renderCmd := func() tea.Msg {
l.offset = 0
// render the rest
+
+ l.renderMu.Lock()
l.rendered, _ = l.renderIterator(finishIndex, false, l.rendered)
+ l.renderMu.Unlock()
// needed for backwards
if l.direction == DirectionBackward {
l.recalculateItemPositions()
@@ -357,7 +549,6 @@ func (l *list[T]) render() tea.Cmd {
if l.focused {
l.scrollToSelection()
}
-
return nil
}
return tea.Batch(focusChangeCmd, renderCmd)
@@ -604,15 +795,28 @@ func (l *list[T]) blurSelectedItem() tea.Cmd {
return tea.Batch(cmds...)
}
-// render iterator renders items starting from the specific index and limits hight if limitHeight != -1
+// renderFragment holds updated rendered view fragments
+type renderFragment struct {
+ view string
+ gap int
+}
+
+// renderIterator renders items starting from the specific index and limits height if limitHeight != -1
// returns the last index and the rendered content so far
// we pass the rendered content around and don't use l.rendered to prevent jumping of the content
func (l *list[T]) renderIterator(startInx int, limitHeight bool, rendered string) (string, int) {
+ var fragments []renderFragment
+
currentContentHeight := lipgloss.Height(rendered) - 1
itemsLen := l.items.Len()
+ finalIndex := itemsLen
+
+ // first pass: accumulate all fragments to render until the height limit is
+ // reached
for i := startInx; i < itemsLen; i++ {
- if currentContentHeight >= l.height && limitHeight {
- return rendered, i
+ if limitHeight && currentContentHeight >= l.height {
+ finalIndex = i
+ break
}
// cool way to go through the list in both directions
inx := i
@@ -625,6 +829,7 @@ func (l *list[T]) renderIterator(startInx int, limitHeight bool, rendered string
if !ok {
continue
}
+
var rItem renderedItem
if cache, ok := l.renderedItems.Get(item.ID()); ok {
rItem = cache
@@ -634,19 +839,42 @@ func (l *list[T]) renderIterator(startInx int, limitHeight bool, rendered string
rItem.end = currentContentHeight + rItem.height - 1
l.renderedItems.Set(item.ID(), rItem)
}
+
gap := l.gap + 1
if inx == itemsLen-1 {
gap = 0
}
- if l.direction == DirectionForward {
- rendered += rItem.view + strings.Repeat("\n", gap)
- } else {
- rendered = rItem.view + strings.Repeat("\n", gap) + rendered
- }
+ fragments = append(fragments, renderFragment{view: rItem.view, gap: gap})
+
currentContentHeight = rItem.end + 1 + l.gap
}
- return rendered, itemsLen
+
+ // second pass: build rendered string efficiently
+ var b strings.Builder
+ if l.direction == DirectionForward {
+ b.WriteString(rendered)
+ for _, f := range fragments {
+ b.WriteString(f.view)
+ for range f.gap {
+ b.WriteByte('\n')
+ }
+ }
+
+ return b.String(), finalIndex
+ }
+
+ // iterate backwards as fragments are in reversed order
+ for i := len(fragments) - 1; i >= 0; i-- {
+ f := fragments[i]
+ b.WriteString(f.view)
+ for range f.gap {
+ b.WriteByte('\n')
+ }
+ }
+ b.WriteString(rendered)
+
+ return b.String(), finalIndex
}
func (l *list[T]) renderItem(item Item) renderedItem {
@@ -810,21 +1038,67 @@ func (l *list[T]) decrementOffset(n int) {
// MoveDown implements List.
func (l *list[T]) MoveDown(n int) tea.Cmd {
+ oldOffset := l.offset
if l.direction == DirectionForward {
l.incrementOffset(n)
} else {
l.decrementOffset(n)
}
+
+ if oldOffset == l.offset {
+ // no change in offset, so no need to change selection
+ return nil
+ }
+ // if we are not actively selecting move the whole selection down
+ if l.hasSelection() && !l.selectionActive {
+ if l.selectionStartLine < l.selectionEndLine {
+ l.selectionStartLine -= n
+ l.selectionEndLine -= n
+ } else {
+ l.selectionStartLine -= n
+ l.selectionEndLine -= n
+ }
+ }
+ if l.selectionActive {
+ if l.selectionStartLine < l.selectionEndLine {
+ l.selectionStartLine -= n
+ } else {
+ l.selectionEndLine -= n
+ }
+ }
return l.changeSelectionWhenScrolling()
}
// MoveUp implements List.
func (l *list[T]) MoveUp(n int) tea.Cmd {
+ oldOffset := l.offset
if l.direction == DirectionForward {
l.decrementOffset(n)
} else {
l.incrementOffset(n)
}
+
+ if oldOffset == l.offset {
+ // no change in offset, so no need to change selection
+ return nil
+ }
+
+ if l.hasSelection() && !l.selectionActive {
+ if l.selectionStartLine > l.selectionEndLine {
+ l.selectionStartLine += n
+ l.selectionEndLine += n
+ } else {
+ l.selectionStartLine += n
+ l.selectionEndLine += n
+ }
+ }
+ if l.selectionActive {
+ if l.selectionStartLine > l.selectionEndLine {
+ l.selectionStartLine += n
+ } else {
+ l.selectionEndLine += n
+ }
+ }
return l.changeSelectionWhenScrolling()
}
@@ -1022,3 +1296,176 @@ func (l *list[T]) UpdateItem(id string, item T) tea.Cmd {
}
return tea.Sequence(cmds...)
}
+
+func (l *list[T]) hasSelection() bool {
+ return l.selectionEndCol != l.selectionStartCol || l.selectionEndLine != l.selectionStartLine
+}
+
+// StartSelection implements List.
+func (l *list[T]) StartSelection(col, line int) {
+ l.selectionStartCol = col
+ l.selectionStartLine = line
+ l.selectionEndCol = col
+ l.selectionEndLine = line
+ l.selectionActive = true
+}
+
+// EndSelection implements List.
+func (l *list[T]) EndSelection(col, line int) {
+ if !l.selectionActive {
+ return
+ }
+ l.selectionEndCol = col
+ l.selectionEndLine = line
+}
+
+func (l *list[T]) SelectionStop() {
+ l.selectionActive = false
+}
+
+func (l *list[T]) SelectionClear() {
+ l.selectionStartCol = -1
+ l.selectionStartLine = -1
+ l.selectionEndCol = -1
+ l.selectionEndLine = -1
+ l.selectionActive = false
+}
+
+func (l *list[T]) findWordBoundaries(col, line int) (startCol, endCol int) {
+ lines := strings.Split(l.rendered, "\n")
+ for i, l := range lines {
+ lines[i] = ansi.Strip(l)
+ }
+
+ if l.direction == DirectionBackward && len(lines) > l.height {
+ line = ((len(lines) - 1) - l.height) + line + 1
+ }
+
+ if l.offset > 0 {
+ if l.direction == DirectionBackward {
+ line -= l.offset
+ } else {
+ line += l.offset
+ }
+ }
+
+ if line < 0 || line >= len(lines) {
+ return 0, 0
+ }
+
+ currentLine := lines[line]
+ gr := uniseg.NewGraphemes(currentLine)
+ startCol = -1
+ upTo := col
+ for gr.Next() {
+ if gr.IsWordBoundary() && upTo > 0 {
+ startCol = col - upTo + 1
+ } else if gr.IsWordBoundary() && upTo < 0 {
+ endCol = col - upTo + 1
+ break
+ }
+ if upTo == 0 && gr.Str() == " " {
+ return 0, 0
+ }
+ upTo -= 1
+ }
+ if startCol == -1 {
+ return 0, 0
+ }
+ return startCol, endCol
+}
+
+func (l *list[T]) findParagraphBoundaries(line int) (startLine, endLine int, found bool) {
+ lines := strings.Split(l.rendered, "\n")
+ for i, l := range lines {
+ lines[i] = ansi.Strip(l)
+ for _, icon := range styles.SelectionIgnoreIcons {
+ lines[i] = strings.ReplaceAll(lines[i], icon, " ")
+ }
+ }
+ if l.direction == DirectionBackward && len(lines) > l.height {
+ line = (len(lines) - 1) - l.height + line + 1
+ }
+
+ if l.offset > 0 {
+ if l.direction == DirectionBackward {
+ line -= l.offset
+ } else {
+ line += l.offset
+ }
+ }
+
+ // Ensure line is within bounds
+ if line < 0 || line >= len(lines) {
+ return 0, 0, false
+ }
+
+ if strings.TrimSpace(lines[line]) == "" {
+ return 0, 0, false
+ }
+
+ // Find start of paragraph (search backwards for empty line or start of text)
+ startLine = line
+ for startLine > 0 && strings.TrimSpace(lines[startLine-1]) != "" {
+ startLine--
+ }
+
+ // Find end of paragraph (search forwards for empty line or end of text)
+ endLine = line
+ for endLine < len(lines)-1 && strings.TrimSpace(lines[endLine+1]) != "" {
+ endLine++
+ }
+
+ // revert the line numbers if we are in backward direction
+ if l.direction == DirectionBackward && len(lines) > l.height {
+ startLine = startLine - (len(lines) - 1) + l.height - 1
+ endLine = endLine - (len(lines) - 1) + l.height - 1
+ }
+ if l.offset > 0 {
+ if l.direction == DirectionBackward {
+ startLine += l.offset
+ endLine += l.offset
+ } else {
+ startLine -= l.offset
+ endLine -= l.offset
+ }
+ }
+ return startLine, endLine, true
+}
+
+// SelectWord selects the word at the given position.
+func (l *list[T]) SelectWord(col, line int) {
+ startCol, endCol := l.findWordBoundaries(col, line)
+ l.selectionStartCol = startCol
+ l.selectionStartLine = line
+ l.selectionEndCol = endCol
+ l.selectionEndLine = line
+ l.selectionActive = false // Not actively selecting, just selected
+}
+
+// SelectParagraph selects the paragraph at the given position.
+func (l *list[T]) SelectParagraph(col, line int) {
+ startLine, endLine, found := l.findParagraphBoundaries(line)
+ if !found {
+ return
+ }
+ l.selectionStartCol = 0
+ l.selectionStartLine = startLine
+ l.selectionEndCol = l.width - 1
+ l.selectionEndLine = endLine
+ l.selectionActive = false // Not actively selecting, just selected
+}
+
+// HasSelection returns whether there is an active selection.
+func (l *list[T]) HasSelection() bool {
+ return l.hasSelection()
+}
+
+// GetSelectedText returns the currently selected text.
+func (l *list[T]) GetSelectedText(paddingLeft int) string {
+ if !l.hasSelection() {
+ return ""
+ }
+
+ return l.selectionView(l.View(), true)
+}
@@ -2,12 +2,14 @@ package chat
import (
"context"
+ "fmt"
"time"
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
"github.com/charmbracelet/bubbles/v2/spinner"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/history"
@@ -19,14 +21,17 @@ import (
"github.com/charmbracelet/crush/internal/tui/components/chat"
"github.com/charmbracelet/crush/internal/tui/components/chat/editor"
"github.com/charmbracelet/crush/internal/tui/components/chat/header"
+ "github.com/charmbracelet/crush/internal/tui/components/chat/messages"
"github.com/charmbracelet/crush/internal/tui/components/chat/sidebar"
"github.com/charmbracelet/crush/internal/tui/components/chat/splash"
"github.com/charmbracelet/crush/internal/tui/components/completions"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/core/layout"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/commands"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/filepicker"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/models"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs/reasoning"
"github.com/charmbracelet/crush/internal/tui/page"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
@@ -165,12 +170,61 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
p.keyboardEnhancements = msg
return p, nil
case tea.MouseWheelMsg:
- if p.isMouseOverChat(msg.Mouse().X, msg.Mouse().Y) {
+ if p.compact {
+ msg.Y -= 1
+ }
+ if p.isMouseOverChat(msg.X, msg.Y) {
u, cmd := p.chat.Update(msg)
p.chat = u.(chat.MessageListCmp)
return p, cmd
}
return p, nil
+ case tea.MouseClickMsg:
+ if p.isOnboarding {
+ return p, nil
+ }
+ if p.compact {
+ msg.Y -= 1
+ }
+ if p.isMouseOverChat(msg.X, msg.Y) {
+ p.focusedPane = PanelTypeChat
+ p.chat.Focus()
+ p.editor.Blur()
+ } else {
+ p.focusedPane = PanelTypeEditor
+ p.editor.Focus()
+ p.chat.Blur()
+ }
+ u, cmd := p.chat.Update(msg)
+ p.chat = u.(chat.MessageListCmp)
+ return p, cmd
+ case tea.MouseMotionMsg:
+ if p.compact {
+ msg.Y -= 1
+ }
+ if msg.Button == tea.MouseLeft {
+ u, cmd := p.chat.Update(msg)
+ p.chat = u.(chat.MessageListCmp)
+ return p, cmd
+ }
+ return p, nil
+ case tea.MouseReleaseMsg:
+ if p.isOnboarding {
+ return p, nil
+ }
+ if p.compact {
+ msg.Y -= 1
+ }
+ if msg.Button == tea.MouseLeft {
+ u, cmd := p.chat.Update(msg)
+ p.chat = u.(chat.MessageListCmp)
+ return p, cmd
+ }
+ return p, nil
+ case chat.SelectionCopyMsg:
+ u, cmd := p.chat.Update(msg)
+ p.chat = u.(chat.MessageListCmp)
+ return p, cmd
case tea.WindowSizeMsg:
u, cmd := p.editor.Update(msg)
p.editor = u.(editor.Editor)
@@ -204,6 +258,10 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return p, tea.Batch(p.SetSize(p.width, p.height), cmd)
case commands.ToggleThinkingMsg:
return p, p.toggleThinking()
+ case commands.OpenReasoningDialogMsg:
+ return p, p.openReasoningDialog()
+ case reasoning.ReasoningEffortSelectedMsg:
+ return p, p.handleReasoningEffortSelected(msg.Effort)
case commands.OpenExternalEditorMsg:
u, cmd := p.editor.Update(msg)
p.editor = u.(editor.Editor)
@@ -256,7 +314,11 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
return p, tea.Batch(cmds...)
-
+ case commands.ToggleYoloModeMsg:
+ // update the editor style
+ u, cmd := p.editor.Update(msg)
+ p.editor = u.(editor.Editor)
+ return p, cmd
case pubsub.Event[history.File], sidebar.SessionFilesMsg:
u, cmd := p.sidebar.Update(msg)
p.sidebar = u.(sidebar.Sidebar)
@@ -494,6 +556,49 @@ func (p *chatPage) toggleThinking() tea.Cmd {
}
}
+func (p *chatPage) openReasoningDialog() tea.Cmd {
+ return func() tea.Msg {
+ cfg := config.Get()
+ agentCfg := cfg.Agents["coder"]
+ model := cfg.GetModelByType(agentCfg.Model)
+ providerCfg := cfg.GetProviderForModel(agentCfg.Model)
+
+ if providerCfg != nil && model != nil &&
+ providerCfg.Type == catwalk.TypeOpenAI && model.HasReasoningEffort {
+ // Return the OpenDialogMsg directly so it bubbles up to the main TUI
+ return dialogs.OpenDialogMsg{
+ Model: reasoning.NewReasoningDialog(),
+ }
+ }
+ return nil
+ }
+}
+
+func (p *chatPage) handleReasoningEffortSelected(effort string) tea.Cmd {
+ return func() tea.Msg {
+ cfg := config.Get()
+ agentCfg := cfg.Agents["coder"]
+ currentModel := cfg.Models[agentCfg.Model]
+
+ // Update the model configuration
+ currentModel.ReasoningEffort = effort
+ cfg.Models[agentCfg.Model] = currentModel
+
+ // Update the agent with the new configuration
+ if err := p.app.UpdateAgentModel(); err != nil {
+ return util.InfoMsg{
+ Type: util.InfoTypeError,
+ Msg: "Failed to update reasoning effort: " + err.Error(),
+ }
+ }
+
+ return util.InfoMsg{
+ Type: util.InfoTypeInfo,
+ Msg: "Reasoning effort set to " + effort,
+ }
+ }
+}
+
func (p *chatPage) setCompactMode(compact bool) {
if p.compact == compact {
return
@@ -601,10 +706,16 @@ func (p *chatPage) changeFocus() {
func (p *chatPage) cancel() tea.Cmd {
if p.isCanceling {
p.isCanceling = false
- p.app.CoderAgent.Cancel(p.session.ID)
+ if p.app.CoderAgent != nil {
+ p.app.CoderAgent.Cancel(p.session.ID)
+ }
return nil
}
+ if p.app.CoderAgent != nil && p.app.CoderAgent.QueuedPrompts(p.session.ID) > 0 {
+ p.app.CoderAgent.ClearQueue(p.session.ID)
+ return nil
+ }
p.isCanceling = true
return cancelTimerCmd()
}
@@ -635,6 +746,9 @@ func (p *chatPage) sendMessage(text string, attachments []message.Attachment) te
session = newSession
cmds = append(cmds, util.CmdHandler(chat.SessionSelectedMsg(session)))
}
+ if p.app.CoderAgent == nil {
+ return util.ReportError(fmt.Errorf("coder agent is not initialized"))
+ }
_, err := p.app.CoderAgent.Run(context.Background(), session.ID, text, attachments...)
if err != nil {
return util.ReportError(err)
@@ -652,7 +766,7 @@ func (p *chatPage) Bindings() []key.Binding {
cancelBinding := p.keyMap.Cancel
if p.isCanceling {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "press again to cancel"),
)
}
@@ -721,7 +835,7 @@ func (p *chatPage) Help() help.KeyMap {
shortList = append(shortList,
// Go back
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "back"),
),
)
@@ -756,7 +870,7 @@ func (p *chatPage) Help() help.KeyMap {
key.WithHelp("tab/enter", "complete"),
),
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
key.NewBinding(
@@ -771,15 +885,21 @@ func (p *chatPage) Help() help.KeyMap {
}
if p.app.CoderAgent != nil && p.app.CoderAgent.IsBusy() {
cancelBinding := key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
)
if p.isCanceling {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "press again to cancel"),
)
}
+ if p.app.CoderAgent != nil && p.app.CoderAgent.QueuedPrompts(p.session.ID) > 0 {
+ cancelBinding = key.NewBinding(
+ key.WithKeys("esc", "alt+esc"),
+ key.WithHelp("esc", "clear queue"),
+ )
+ }
shortList = append(shortList, cancelBinding)
fullList = append(fullList,
[]key.Binding{
@@ -838,10 +958,7 @@ func (p *chatPage) Help() help.KeyMap {
key.WithKeys("up", "down"),
key.WithHelp("↑↓", "scroll"),
),
- key.NewBinding(
- key.WithKeys("c", "y"),
- key.WithHelp("c/y", "copy"),
- ),
+ messages.CopyKey,
)
fullList = append(fullList,
[]key.Binding{
@@ -880,6 +997,10 @@ func (p *chatPage) Help() help.KeyMap {
key.WithHelp("G", "end"),
),
},
+ []key.Binding{
+ messages.CopyKey,
+ messages.ClearSelectionKey,
+ },
)
case PanelTypeEditor:
newLineBinding := key.NewBinding(
@@ -921,7 +1042,7 @@ func (p *chatPage) Help() help.KeyMap {
key.WithHelp("ctrl+r+r", "delete all attachments"),
),
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
})
@@ -23,7 +23,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("ctrl+f", "add attachment"),
),
Cancel: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
Tab: key.NewBinding(
@@ -1,12 +1,13 @@
package styles
import (
+ "github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/x/exp/charmtone"
)
-func NewCrushTheme() *Theme {
- return &Theme{
- Name: "crush",
+func NewCharmtoneTheme() *Theme {
+ t := &Theme{
+ Name: "charmtone",
IsDark: true,
Primary: charmtone.Charple,
@@ -44,6 +45,7 @@ func NewCrushTheme() *Theme {
Blue: charmtone.Malibu,
Yellow: charmtone.Mustard,
+ Citron: charmtone.Citron,
Green: charmtone.Julep,
GreenDark: charmtone.Guac,
@@ -54,4 +56,20 @@ func NewCrushTheme() *Theme {
RedLight: charmtone.Salmon,
Cherry: charmtone.Cherry,
}
+
+ // Text selection.
+ t.TextSelection = lipgloss.NewStyle().Foreground(charmtone.Salt).Background(charmtone.Charple)
+
+ // LSP and MCP status.
+ t.ItemOfflineIcon = lipgloss.NewStyle().Foreground(charmtone.Squid).SetString("●")
+ t.ItemBusyIcon = t.ItemOfflineIcon.Foreground(charmtone.Citron)
+ t.ItemErrorIcon = t.ItemOfflineIcon.Foreground(charmtone.Coral)
+ t.ItemOnlineIcon = t.ItemOfflineIcon.Foreground(charmtone.Guac)
+
+ t.YoloIconFocused = lipgloss.NewStyle().Foreground(charmtone.Oyster).Background(charmtone.Citron).Bold(true).SetString(" ! ")
+ t.YoloIconBlurred = t.YoloIconFocused.Foreground(charmtone.Pepper).Background(charmtone.Squid)
+ t.YoloDotsFocused = lipgloss.NewStyle().Foreground(charmtone.Zest).SetString(":::")
+ t.YoloDotsBlurred = t.YoloDotsFocused.Foreground(charmtone.Squid)
+
+ return t
}
@@ -15,4 +15,27 @@ const (
ToolPending string = "●"
ToolSuccess string = "✓"
ToolError string = "×"
+
+ BorderThin string = "│"
+ BorderThick string = "▌"
)
+
+var SelectionIgnoreIcons = []string{
+ // CheckIcon,
+ // ErrorIcon,
+ // WarningIcon,
+ // InfoIcon,
+ // HintIcon,
+ // SpinnerIcon,
+ // LoadingIcon,
+ // DocumentIcon,
+ // ModelIcon,
+ //
+ // // Tool call icons
+ // ToolPending,
+ // ToolSuccess,
+ // ToolError,
+
+ BorderThin,
+ BorderThick,
+}
@@ -62,6 +62,7 @@ type Theme struct {
// Yellows
Yellow color.Color
+ Citron color.Color
// Greens
Green color.Color
@@ -74,6 +75,21 @@ type Theme struct {
RedLight color.Color
Cherry color.Color
+ // Text selection.
+ TextSelection lipgloss.Style
+
+ // LSP and MCP status indicators.
+ ItemOfflineIcon lipgloss.Style
+ ItemBusyIcon lipgloss.Style
+ ItemErrorIcon lipgloss.Style
+ ItemOnlineIcon lipgloss.Style
+
+ // Editor: Yolo Mode
+ YoloIconFocused lipgloss.Style
+ YoloIconBlurred lipgloss.Style
+ YoloDotsFocused lipgloss.Style
+ YoloDotsBlurred lipgloss.Style
+
styles *Styles
}
@@ -485,26 +501,26 @@ func SetDefaultManager(m *Manager) {
func DefaultManager() *Manager {
if defaultManager == nil {
- defaultManager = NewManager("crush")
+ defaultManager = NewManager()
}
return defaultManager
}
func CurrentTheme() *Theme {
if defaultManager == nil {
- defaultManager = NewManager("crush")
+ defaultManager = NewManager()
}
return defaultManager.Current()
}
-func NewManager(defaultTheme string) *Manager {
+func NewManager() *Manager {
m := &Manager{
themes: make(map[string]*Theme),
}
- m.Register(NewCrushTheme())
-
- m.current = m.themes[defaultTheme]
+ t := NewCharmtoneTheme() // default theme
+ m.Register(t)
+ m.current = m.themes[t.Name]
return m
}
@@ -575,18 +591,18 @@ func Lighten(c color.Color, percent float64) color.Color {
}
}
-// ApplyForegroundGrad renders a given string with a horizontal gradient
-// foreground.
-func ApplyForegroundGrad(input string, color1, color2 color.Color) string {
+func ForegroundGrad(input string, bold bool, color1, color2 color.Color) []string {
if input == "" {
- return ""
+ return []string{""}
}
-
- var o strings.Builder
+ t := CurrentTheme()
if len(input) == 1 {
- return lipgloss.NewStyle().Foreground(color1).Render(input)
+ style := t.S().Base.Foreground(color1)
+ if bold {
+ style.Bold(true)
+ }
+ return []string{style.Render(input)}
}
-
var clusters []string
gr := uniseg.NewGraphemes(input)
for gr.Next() {
@@ -595,9 +611,26 @@ func ApplyForegroundGrad(input string, color1, color2 color.Color) string {
ramp := blendColors(len(clusters), color1, color2)
for i, c := range ramp {
- fmt.Fprint(&o, CurrentTheme().S().Base.Foreground(c).Render(clusters[i]))
+ style := t.S().Base.Foreground(c)
+ if bold {
+ style.Bold(true)
+ }
+ clusters[i] = style.Render(clusters[i])
}
+ return clusters
+}
+// ApplyForegroundGrad renders a given string with a horizontal gradient
+// foreground.
+func ApplyForegroundGrad(input string, color1, color2 color.Color) string {
+ if input == "" {
+ return ""
+ }
+ var o strings.Builder
+ clusters := ForegroundGrad(input, false, color1, color2)
+ for _, c := range clusters {
+ fmt.Fprint(&o, c)
+ }
return o.String()
}
@@ -607,24 +640,11 @@ func ApplyBoldForegroundGrad(input string, color1, color2 color.Color) string {
if input == "" {
return ""
}
- t := CurrentTheme()
-
var o strings.Builder
- if len(input) == 1 {
- return t.S().Base.Bold(true).Foreground(color1).Render(input)
- }
-
- var clusters []string
- gr := uniseg.NewGraphemes(input)
- for gr.Next() {
- clusters = append(clusters, string(gr.Runes()))
+ clusters := ForegroundGrad(input, true, color1, color2)
+ for _, c := range clusters {
+ fmt.Fprint(&o, c)
}
-
- ramp := blendColors(len(clusters), color1, color2)
- for i, c := range ramp {
- fmt.Fprint(&o, t.S().Base.Bold(true).Foreground(c).Render(clusters[i]))
- }
-
return o.String()
}
@@ -10,6 +10,7 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/llm/agent"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/pubsub"
@@ -41,7 +42,7 @@ func MouseEventFilter(m tea.Model, msg tea.Msg) tea.Msg {
case tea.MouseWheelMsg, tea.MouseMotionMsg:
now := time.Now()
// trackpad is sending too many requests
- if now.Sub(lastMouseEvent) < 5*time.Millisecond {
+ if now.Sub(lastMouseEvent) < 15*time.Millisecond {
return nil
}
lastMouseEvent = now
@@ -76,8 +77,13 @@ type appModel struct {
// Init initializes the application model and returns initial commands.
func (a appModel) Init() tea.Cmd {
+ item, ok := a.pages[a.currentPage]
+ if !ok {
+ return nil
+ }
+
var cmds []tea.Cmd
- cmd := a.pages[a.currentPage].Init()
+ cmd := item.Init()
cmds = append(cmds, cmd)
a.loadedPages[a.currentPage] = true
@@ -99,7 +105,10 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
case tea.KeyboardEnhancementsMsg:
for id, page := range a.pages {
m, pageCmd := page.Update(msg)
- a.pages[id] = m.(util.Model)
+ if model, ok := m.(util.Model); ok {
+ a.pages[id] = model
+ }
+
if pageCmd != nil {
cmds = append(cmds, pageCmd)
}
@@ -114,7 +123,10 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
case completions.OpenCompletionsMsg, completions.FilterCompletionsMsg,
completions.CloseCompletionsMsg, completions.RepositionCompletionsMsg:
u, completionCmd := a.completions.Update(msg)
- a.completions = u.(completions.Completions)
+ if model, ok := u.(completions.Completions); ok {
+ a.completions = model
+ }
+
return a, completionCmd
// Dialog messages
@@ -174,6 +186,8 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return a, util.CmdHandler(dialogs.OpenDialogMsg{
Model: quit.NewQuitDialog(),
})
+ case commands.ToggleYoloModeMsg:
+ a.app.Permissions.SetSkipRequests(!a.app.Permissions.SkipRequests())
case commands.ToggleHelpMsg:
a.status.ToggleFullHelp()
a.showingFullHelp = !a.showingFullHelp
@@ -183,6 +197,7 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
if a.app.CoderAgent.IsBusy() {
return a, util.ReportWarn("Agent is busy, please wait...")
}
+
config.Get().UpdatePreferredModel(msg.ModelType, msg.Model)
// Update the agent with the new model/provider configuration
@@ -198,6 +213,8 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// File Picker
case commands.OpenFilePickerMsg:
+ event.FilePickerOpened()
+
if a.dialog.ActiveDialogID() == filepicker.FilePickerID {
// If the commands dialog is already open, close it
return a, util.CmdHandler(dialogs.CloseDialogMsg{})
@@ -207,13 +224,23 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
})
// Permissions
case pubsub.Event[permission.PermissionNotification]:
- // forward to page
- updated, cmd := a.pages[a.currentPage].Update(msg)
- a.pages[a.currentPage] = updated.(util.Model)
- return a, cmd
+ item, ok := a.pages[a.currentPage]
+ if !ok {
+ return a, nil
+ }
+
+ // Forward to view.
+ updated, itemCmd := item.Update(msg)
+ if model, ok := updated.(util.Model); ok {
+ a.pages[a.currentPage] = model
+ }
+
+ return a, itemCmd
case pubsub.Event[permission.PermissionRequest]:
return a, util.CmdHandler(dialogs.OpenDialogMsg{
- Model: permissions.NewPermissionDialogCmp(msg.Payload),
+ Model: permissions.NewPermissionDialogCmp(msg.Payload, &permissions.Options{
+ DiffMode: config.Get().Options.TUI.DiffMode,
+ }),
})
case permissions.PermissionResponseMsg:
switch msg.Action {
@@ -232,7 +259,10 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// Forward agent events to dialogs
if a.dialog.HasDialogs() && a.dialog.ActiveDialogID() == compact.CompactDialogID {
u, dialogCmd := a.dialog.Update(payload)
- a.dialog = u.(dialogs.DialogCmp)
+ if model, ok := u.(dialogs.DialogCmp); ok {
+ a.dialog = model
+ }
+
cmds = append(cmds, dialogCmd)
}
@@ -265,41 +295,84 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
a.status = s.(status.StatusCmp)
return a, statusCmd
case splash.OnboardingCompleteMsg:
+ item, ok := a.pages[a.currentPage]
+ if !ok {
+ return a, nil
+ }
+
a.isConfigured = config.HasInitialDataConfig()
- updated, pageCmd := a.pages[a.currentPage].Update(msg)
- a.pages[a.currentPage] = updated.(util.Model)
+ updated, pageCmd := item.Update(msg)
+ if model, ok := updated.(util.Model); ok {
+ a.pages[a.currentPage] = model
+ }
+
cmds = append(cmds, pageCmd)
return a, tea.Batch(cmds...)
- // Key Press Messages
+
case tea.KeyPressMsg:
return a, a.handleKeyPressMsg(msg)
case tea.MouseWheelMsg:
- if !a.dialog.HasDialogs() {
- updated, pageCmd := a.pages[a.currentPage].Update(msg)
- a.pages[a.currentPage] = updated.(util.Model)
+ if a.dialog.HasDialogs() {
+ u, dialogCmd := a.dialog.Update(msg)
+ a.dialog = u.(dialogs.DialogCmp)
+ cmds = append(cmds, dialogCmd)
+ } else {
+ item, ok := a.pages[a.currentPage]
+ if !ok {
+ return a, nil
+ }
+
+ updated, pageCmd := item.Update(msg)
+ if model, ok := updated.(util.Model); ok {
+ a.pages[a.currentPage] = model
+ }
+
cmds = append(cmds, pageCmd)
}
return a, tea.Batch(cmds...)
case tea.PasteMsg:
if a.dialog.HasDialogs() {
u, dialogCmd := a.dialog.Update(msg)
- a.dialog = u.(dialogs.DialogCmp)
+ if model, ok := u.(dialogs.DialogCmp); ok {
+ a.dialog = model
+ }
+
cmds = append(cmds, dialogCmd)
} else {
- updated, pageCmd := a.pages[a.currentPage].Update(msg)
- a.pages[a.currentPage] = updated.(util.Model)
+ item, ok := a.pages[a.currentPage]
+ if !ok {
+ return a, nil
+ }
+
+ updated, pageCmd := item.Update(msg)
+ if model, ok := updated.(util.Model); ok {
+ a.pages[a.currentPage] = model
+ }
+
cmds = append(cmds, pageCmd)
}
return a, tea.Batch(cmds...)
}
s, _ := a.status.Update(msg)
a.status = s.(status.StatusCmp)
- updated, cmd := a.pages[a.currentPage].Update(msg)
- a.pages[a.currentPage] = updated.(util.Model)
+
+ item, ok := a.pages[a.currentPage]
+ if !ok {
+ return a, nil
+ }
+
+ updated, cmd := item.Update(msg)
+ if model, ok := updated.(util.Model); ok {
+ a.pages[a.currentPage] = model
+ }
+
if a.dialog.HasDialogs() {
u, dialogCmd := a.dialog.Update(msg)
- a.dialog = u.(dialogs.DialogCmp)
+ if model, ok := u.(dialogs.DialogCmp); ok {
+ a.dialog = model
+ }
+
cmds = append(cmds, dialogCmd)
}
cmds = append(cmds, cmd)
@@ -309,27 +382,38 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// handleWindowResize processes window resize events and updates all components.
func (a *appModel) handleWindowResize(width, height int) tea.Cmd {
var cmds []tea.Cmd
+
+ // TODO: clean up these magic numbers.
if a.showingFullHelp {
height -= 5
} else {
height -= 2
}
+
a.width, a.height = width, height
// Update status bar
s, cmd := a.status.Update(tea.WindowSizeMsg{Width: width, Height: height})
- a.status = s.(status.StatusCmp)
+ if model, ok := s.(status.StatusCmp); ok {
+ a.status = model
+ }
cmds = append(cmds, cmd)
- // Update the current page
+ // Update the current view.
for p, page := range a.pages {
updated, pageCmd := page.Update(tea.WindowSizeMsg{Width: width, Height: height})
- a.pages[p] = updated.(util.Model)
+ if model, ok := updated.(util.Model); ok {
+ a.pages[p] = model
+ }
+
cmds = append(cmds, pageCmd)
}
// Update the dialogs
dialog, cmd := a.dialog.Update(tea.WindowSizeMsg{Width: width, Height: height})
- a.dialog = dialog.(dialogs.DialogCmp)
+ if model, ok := dialog.(dialogs.DialogCmp); ok {
+ a.dialog = model
+ }
+
cmds = append(cmds, cmd)
return tea.Batch(cmds...)
@@ -337,6 +421,16 @@ func (a *appModel) handleWindowResize(width, height int) tea.Cmd {
// handleKeyPressMsg processes keyboard input and routes to appropriate handlers.
func (a *appModel) handleKeyPressMsg(msg tea.KeyPressMsg) tea.Cmd {
+ // Check this first as the user should be able to quit no matter what.
+ if key.Matches(msg, a.keyMap.Quit) {
+ if a.dialog.ActiveDialogID() == quit.QuitDialogID {
+ return tea.Quit
+ }
+ return util.CmdHandler(dialogs.OpenDialogMsg{
+ Model: quit.NewQuitDialog(),
+ })
+ }
+
if a.completions.Open() {
// completions
keyMap := a.completions.KeyMap()
@@ -349,6 +443,11 @@ func (a *appModel) handleKeyPressMsg(msg tea.KeyPressMsg) tea.Cmd {
return cmd
}
}
+ if a.dialog.HasDialogs() {
+ u, dialogCmd := a.dialog.Update(msg)
+ a.dialog = u.(dialogs.DialogCmp)
+ return dialogCmd
+ }
switch {
// help
case key.Matches(msg, a.keyMap.Help):
@@ -356,14 +455,6 @@ func (a *appModel) handleKeyPressMsg(msg tea.KeyPressMsg) tea.Cmd {
a.showingFullHelp = !a.showingFullHelp
return a.handleWindowResize(a.wWidth, a.wHeight)
// dialogs
- case key.Matches(msg, a.keyMap.Quit):
- if a.dialog.ActiveDialogID() == quit.QuitDialogID {
- return tea.Quit
- }
- return util.CmdHandler(dialogs.OpenDialogMsg{
- Model: quit.NewQuitDialog(),
- })
-
case key.Matches(msg, a.keyMap.Commands):
// if the app is not configured show no commands
if !a.isConfigured {
@@ -409,15 +500,16 @@ func (a *appModel) handleKeyPressMsg(msg tea.KeyPressMsg) tea.Cmd {
}
return tea.Suspend
default:
- if a.dialog.HasDialogs() {
- u, dialogCmd := a.dialog.Update(msg)
- a.dialog = u.(dialogs.DialogCmp)
- return dialogCmd
- } else {
- updated, cmd := a.pages[a.currentPage].Update(msg)
- a.pages[a.currentPage] = updated.(util.Model)
- return cmd
+ item, ok := a.pages[a.currentPage]
+ if !ok {
+ return nil
+ }
+
+ updated, cmd := item.Update(msg)
+ if model, ok := updated.(util.Model); ok {
+ a.pages[a.currentPage] = model
}
+ return cmd
}
}
@@ -10,11 +10,13 @@ import (
_ "github.com/joho/godotenv/autoload" // automatically load .env files
"github.com/charmbracelet/crush/internal/cmd"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/log"
)
func main() {
defer log.RecoverPanic("main", func() {
+ event.Flush()
slog.Error("Application terminated due to unhandled panic")
})
@@ -3,8 +3,27 @@
"$id": "https://github.com/charmbracelet/crush/internal/config/config",
"$ref": "#/$defs/Config",
"$defs": {
+ "Attribution": {
+ "properties": {
+ "co_authored_by": {
+ "type": "boolean",
+ "description": "Add Co-Authored-By trailer to commit messages",
+ "default": true
+ },
+ "generated_with": {
+ "type": "boolean",
+ "description": "Add Generated with Crush line to commit messages and issues and PRs",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
"Config": {
"properties": {
+ "$schema": {
+ "type": "string"
+ },
"models": {
"additionalProperties": {
"$ref": "#/$defs/SelectedModel"
@@ -41,7 +60,7 @@
},
"LSPConfig": {
"properties": {
- "enabled": {
+ "disabled": {
"type": "boolean",
"description": "Whether this LSP server is disabled",
"default": false
@@ -60,8 +79,47 @@
"type": "array",
"description": "Arguments to pass to the LSP server command"
},
+ "env": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "type": "object",
+ "description": "Environment variables to set to the LSP server command"
+ },
+ "filetypes": {
+ "items": {
+ "type": "string",
+ "examples": [
+ "go",
+ "mod",
+ "rs",
+ "c",
+ "js",
+ "ts"
+ ]
+ },
+ "type": "array",
+ "description": "File types this LSP server handles"
+ },
+ "root_markers": {
+ "items": {
+ "type": "string",
+ "examples": [
+ "go.mod",
+ "package.json",
+ "Cargo.toml"
+ ]
+ },
+ "type": "array",
+ "description": "Files or directories that indicate the project root"
+ },
+ "init_options": {
+ "type": "object",
+ "description": "Initialization options passed to the LSP server during initialize request"
+ },
"options": {
- "description": "LSP server-specific configuration options"
+ "type": "object",
+ "description": "LSP server-specific settings passed during initialization"
}
},
"additionalProperties": false,
@@ -122,6 +180,16 @@
"description": "Whether this MCP server is disabled",
"default": false
},
+ "timeout": {
+ "type": "integer",
+ "description": "Timeout in seconds for MCP server connections",
+ "default": 15,
+ "examples": [
+ 30,
+ 60,
+ 120
+ ]
+ },
"headers": {
"additionalProperties": {
"type": "string"
@@ -236,10 +304,34 @@
"examples": [
".crush"
]
+ },
+ "disabled_tools": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Tools to disable"
+ },
+ "disable_provider_auto_update": {
+ "type": "boolean",
+ "description": "Disable providers auto-update",
+ "default": false
+ },
+ "attribution": {
+ "$ref": "#/$defs/Attribution",
+ "description": "Attribution settings for generated content"
+ },
+ "disable_metrics": {
+ "type": "boolean",
+ "description": "Disable sending metrics",
+ "default": false
}
},
"additionalProperties": false,
- "type": "object"
+ "type": "object",
+ "required": [
+ "disabled_tools"
+ ]
},
"Permissions": {
"properties": {
@@ -384,6 +476,14 @@
"type": "boolean",
"description": "Enable compact mode for the TUI interface",
"default": false
+ },
+ "diff_mode": {
+ "type": "string",
+ "enum": [
+ "unified",
+ "split"
+ ],
+ "description": "Diff mode for the TUI interface"
}
},
"additionalProperties": false,
@@ -2,11 +2,11 @@ ISSUES=$(gh issue list --state=all --limit=1000 --json "number" -t '{{range .}}{
PRS=$(gh pr list --state=all --limit=1000 --json "number" -t '{{range .}}{{printf "%.0f\n" .number}}{{end}}')
for issue in $ISSUES; do
- echo "Dispatching issue-labeler.yml for $issue"
- gh workflow run issue-labeler.yml -f issue-number="$issue"
+ echo "Dispatching labeler.yml for $issue"
+ gh workflow run labeler.yml -f issue-number="$issue"
done
for pr in $PRS; do
- echo "Dispatching issue-labeler.yml for $pr"
- gh workflow run issue-labeler.yml -f issue-number="$pr"
+ echo "Dispatching labeler.yml for $pr"
+ gh workflow run labeler.yml -f issue-number="$pr"
done