diff --git a/.github/cla-signatures.json b/.github/cla-signatures.json
index d893fbfe2cd3f16295b636031d97c3cd5a163c51..dc7d5873d3110320f09b8457b696a47d307ef41a 100644
--- a/.github/cla-signatures.json
+++ b/.github/cla-signatures.json
@@ -599,6 +599,110 @@
"created_at": "2025-09-11T17:37:57Z",
"repoId": 987670088,
"pullRequestNo": 1017
+ },
+ {
+ "name": "Amolith",
+ "id": 29460675,
+ "comment_id": 3285628360,
+ "created_at": "2025-09-12T15:00:12Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1025
+ },
+ {
+ "name": "WhiskeyJack96",
+ "id": 10688621,
+ "comment_id": 3290164209,
+ "created_at": "2025-09-15T01:16:08Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1037
+ },
+ {
+ "name": "Grin1024",
+ "id": 34613592,
+ "comment_id": 3290570050,
+ "created_at": "2025-09-15T05:42:29Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1042
+ },
+ {
+ "name": "dvcrn",
+ "id": 688326,
+ "comment_id": 3296702457,
+ "created_at": "2025-09-16T08:48:17Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1056
+ },
+ {
+ "name": "khushveer007",
+ "id": 122660325,
+ "comment_id": 3301369568,
+ "created_at": "2025-09-17T05:32:53Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1065
+ },
+ {
+ "name": "msteinert",
+ "id": 202852,
+ "comment_id": 3312218015,
+ "created_at": "2025-09-19T13:31:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1084
+ },
+ {
+ "name": "zoete",
+ "id": 33318916,
+ "comment_id": 3314945939,
+ "created_at": "2025-09-20T12:37:42Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1095
+ },
+ {
+ "name": "Kaneki-x",
+ "id": 6857108,
+ "comment_id": 3338743039,
+ "created_at": "2025-09-26T13:30:16Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1135
+ },
+ {
+ "name": "maxious",
+ "id": 81432,
+ "comment_id": 3341700737,
+ "created_at": "2025-09-27T13:09:22Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1141
+ },
+ {
+ "name": "Wangch29",
+ "id": 115294077,
+ "comment_id": 3344526018,
+ "created_at": "2025-09-29T01:19:40Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1148
+ },
+ {
+ "name": "kucukkanat",
+ "id": 914316,
+ "comment_id": 3369230313,
+ "created_at": "2025-10-05T18:13:57Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1195
+ },
+ {
+ "name": "thuggys",
+ "id": 150315417,
+ "comment_id": 3369149503,
+ "created_at": "2025-10-05T15:59:55Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1194
+ },
+ {
+ "name": "nikaro",
+ "id": 3918653,
+ "comment_id": 3373586148,
+ "created_at": "2025-10-06T19:31:50Z",
+ "repoId": 987670088,
+ "pullRequestNo": 1200
}
]
}
\ No newline at end of file
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 94ff8c80c13621146bf40ccd90325c65b86bbaf3..cf970b5887bc33fd822ab7fc4fe4540df045a6e1 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -17,6 +17,13 @@ updates:
all:
patterns:
- "*"
+ ignore:
+ - dependency-name: github.com/charmbracelet/bubbletea/v2
+ versions:
+ - v2.0.0-beta1
+ - dependency-name: github.com/charmbracelet/lipgloss/v2
+ versions:
+ - v2.0.0-beta1
- package-ecosystem: "github-actions"
directory: "/"
diff --git a/.github/labeler.yml b/.github/labeler.yml
index 75642def1c1e84476d692bee5e8711f52208d05d..dd07db45310a70016126e6455f8777a714d71f5c 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -43,6 +43,8 @@
- "/(anthropic|claude)/i"
"provider: aws bedrock":
- "/(aws|bedrock)/i"
+"provider: azure":
+ - "/azure/i"
"provider: google gemini":
- "/gemini/i"
"provider: google vertex":
diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml
index 0ae65fe25556483b90f71c07a0a336170b66d93c..a7b6485e6b5f89cf5a566cf9f5058dd8e72b0d23 100644
--- a/.github/workflows/cla.yml
+++ b/.github/workflows/cla.yml
@@ -22,7 +22,7 @@ jobs:
github.event.comment.body == 'recheck' ||
github.event.comment.body == 'I have read the Contributor License Agreement (CLA) and hereby sign the CLA.' ||
github.event_name == 'pull_request_target'
- uses: contributor-assistant/github-action@v2.6.1
+ uses: contributor-assistant/github-action@ca4a40a7d1004f18d9960b404b97e5f30a505a08 # v2.6.1
env:
GITHUB_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
with:
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
index afc6427ff864eaf4929b831c7df23a2699304528..ff40f751d372d17bf458a33efc7776f258739cf3 100644
--- a/.github/workflows/labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -20,7 +20,7 @@ jobs:
triage:
runs-on: ubuntu-latest
steps:
- - uses: github/issue-labeler@v3.4
+ - uses: github/issue-labeler@c1b0f9f52a63158c4adc09425e858e87b32e9685 # v3.4
with:
configuration-path: .github/labeler.yml
enable-versioned-regex: 0
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index ede4752153124701ff75e3780f12d179f98fe530..1e711cfcaa5beb465f7dabacdb10157f4f35ac68 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -11,7 +11,7 @@ jobs:
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 1
- id: check
diff --git a/.github/workflows/schema-update.yml b/.github/workflows/schema-update.yml
index bc7e19b67f5021e8d3ff00342a062f2c6c000e86..bc1a69c68273c007a764c268958858be3b62bcd2 100644
--- a/.github/workflows/schema-update.yml
+++ b/.github/workflows/schema-update.yml
@@ -10,10 +10,10 @@ jobs:
update-schema:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- - uses: actions/setup-go@v6
+ - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
go-version-file: go.mod
- run: go run . schema > ./schema.json
diff --git a/.goreleaser.yml b/.goreleaser.yml
index 7c477b587c324bbcf66883baf385e7caf984c872..aabf2f7606462ebb540fd6ebe9efb302a6855e5f 100644
--- a/.goreleaser.yml
+++ b/.goreleaser.yml
@@ -67,8 +67,12 @@ builds:
goarch: arm
- goos: android
goarch: "386"
+ - goos: windows
+ goarch: arm
ldflags:
- -s -w -X github.com/charmbracelet/crush/internal/version.Version={{.Version}}
+ flags:
+ - -trimpath
archives:
- name_template: >-
@@ -308,19 +312,19 @@ changelog:
- "^wip "
- "^wip:"
groups:
- - title: "New Features"
- regexp: '^.*?feat(\(.+\))??!?:.+$'
+ - title: "Deps"
+ regexp: "^.*\\(deps\\)*:+.*$"
+ order: 300
+ - title: "New!"
+ regexp: "^.*feat[(\\w)]*:+.*$"
order: 100
- - title: "Security updates"
- regexp: '^.*?sec(\(.+\))??!?:.+$'
- order: 150
- - title: "Bug fixes and improvements"
- regexp: '^.*?(fix|refactor)(\(.+\))??!?:.+$'
+ - title: "Fixed"
+ regexp: "^.*fix[(\\w)]*:+.*$"
order: 200
- - title: "Documentation updates"
- regexp: ^.*?docs?(\(.+\))??!?:.+$
+ - title: "Docs"
+ regexp: "^.*docs[(\\w)]*:+.*$"
order: 400
- - title: Other work
+ - title: "Other stuff"
order: 9999
release:
diff --git a/README.md b/README.md
index 907e182527690bb67ad4ed89b9d9f501d93c16ab..7f28c5c049cdb6c45bc83ec59f94f4310c13b7c5 100644
--- a/README.md
+++ b/README.md
@@ -7,6 +7,7 @@
Your new coding bestie, now available in your favourite terminal.
Your tools, your code, and your workflows, wired into your LLM of choice.
+你的新编程伙伴,现在就在你最爱的终端中。
你的工具、代码和工作流,都与您选择的 LLM 模型紧密相连。

@@ -64,6 +65,61 @@ nix-channel --update
nix-shell -p '(import { pkgs = import {}; }).repos.charmbracelet.crush'
```
+### NixOS & Home Manager Module Usage via NUR
+
+Crush provides NixOS and Home Manager modules via NUR.
+You can use these modules directly in your flake by importing them from NUR. Since it auto detects whether its a home manager or nixos context you can use the import the exact same way :)
+
+```nix
+{
+ inputs = {
+ nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
+ nur.url = "github:nix-community/NUR";
+ };
+
+ outputs = { self, nixpkgs, nur, ... }: {
+ nixosConfigurations.your-hostname = nixpkgs.lib.nixosSystem {
+ system = "x86_64-linux";
+ modules = [
+ nur.modules.nixos.default
+ nur.repos.charmbracelet.modules.crush
+ {
+ programs.crush = {
+ enable = true;
+ settings = {
+ providers = {
+ openai = {
+ id = "openai";
+ name = "OpenAI";
+ base_url = "https://api.openai.com/v1";
+ type = "openai";
+ api_key = "sk-fake123456789abcdef...";
+ models = [
+ {
+ id = "gpt-4";
+ name = "GPT-4";
+ }
+ ];
+ };
+ };
+ lsp = {
+ go = { command = "gopls"; enabled = true; };
+ nix = { command = "nil"; enabled = true; };
+ };
+ options = {
+ context_paths = [ "/etc/nixos/configuration.nix" ];
+ tui = { compact_mode = true; };
+ debug = false;
+ };
+ };
+ };
+ }
+ ];
+ };
+ };
+}
+```
+
@@ -119,21 +175,25 @@ Crush. You'll be prompted to enter your API key.
That said, you can also set environment variables for preferred providers.
-| Environment Variable | Provider |
-| -------------------------- | -------------------------------------------------- |
-| `ANTHROPIC_API_KEY` | Anthropic |
-| `OPENAI_API_KEY` | OpenAI |
-| `OPENROUTER_API_KEY` | OpenRouter |
-| `GEMINI_API_KEY` | Google Gemini |
-| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
-| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
-| `GROQ_API_KEY` | Groq |
-| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
-| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
-| `AWS_REGION` | AWS Bedrock (Claude) |
-| `AZURE_OPENAI_ENDPOINT` | Azure OpenAI models |
-| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
-| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
+| Environment Variable | Provider |
+| --------------------------- | -------------------------------------------------- |
+| `ANTHROPIC_API_KEY` | Anthropic |
+| `OPENAI_API_KEY` | OpenAI |
+| `OPENROUTER_API_KEY` | OpenRouter |
+| `GEMINI_API_KEY` | Google Gemini |
+| `CEREBRAS_API_KEY` | Cerebras |
+| `HF_TOKEN` | Huggingface Inference |
+| `VERTEXAI_PROJECT` | Google Cloud VertexAI (Gemini) |
+| `VERTEXAI_LOCATION` | Google Cloud VertexAI (Gemini) |
+| `GROQ_API_KEY` | Groq |
+| `AWS_ACCESS_KEY_ID` | AWS Bedrock (Claude) |
+| `AWS_SECRET_ACCESS_KEY` | AWS Bedrock (Claude) |
+| `AWS_REGION` | AWS Bedrock (Claude) |
+| `AWS_PROFILE` | Custom AWS Profile |
+| `AWS_REGION` | AWS Region |
+| `AZURE_OPENAI_API_ENDPOINT` | Azure OpenAI models |
+| `AZURE_OPENAI_API_KEY` | Azure OpenAI models (optional when using Entra ID) |
+| `AZURE_OPENAI_API_VERSION` | Azure OpenAI models |
### By the Way
@@ -157,8 +217,8 @@ Configuration itself is stored as a JSON object:
```json
{
- "this-setting": {"this": "that"},
- "that-setting": ["ceci", "cela"]
+ "this-setting": { "this": "that" },
+ "that-setting": ["ceci", "cela"]
}
```
@@ -213,6 +273,8 @@ using `$(echo $VAR)` syntax.
"type": "stdio",
"command": "node",
"args": ["/path/to/mcp-server.js"],
+ "timeout": 120,
+ "disabled": false,
"env": {
"NODE_ENV": "production"
}
@@ -220,6 +282,8 @@ using `$(echo $VAR)` syntax.
"github": {
"type": "http",
"url": "https://example.com/mcp/",
+ "timeout": 120,
+ "disabled": false,
"headers": {
"Authorization": "$(echo Bearer $EXAMPLE_MCP_TOKEN)"
}
@@ -227,6 +291,8 @@ using `$(echo $VAR)` syntax.
"streaming-service": {
"type": "sse",
"url": "https://example.com/mcp/sse",
+ "timeout": 120,
+ "disabled": false,
"headers": {
"API-Key": "$(echo $API_KEY)"
}
@@ -269,6 +335,26 @@ permissions. Use this with care.
You can also skip all permission prompts entirely by running Crush with the
`--yolo` flag. Be very, very careful with this feature.
+### Attribution Settings
+
+By default, Crush adds attribution information to Git commits and pull requests
+it creates. You can customize this behavior with the `attribution` option:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "attribution": {
+ "co_authored_by": true,
+ "generated_with": true
+ }
+ }
+}
+```
+
+- `co_authored_by`: When true (default), adds `Co-Authored-By: Crush ` to commit messages
+- `generated_with`: When true (default), adds `💘 Generated with Crush` line to commit messages and PR descriptions
+
### Local Models
Local models can also be configured via OpenAI-compatible API. Here are two common examples:
@@ -390,9 +476,9 @@ Custom Anthropic-compatible providers follow this format:
Crush currently supports running Anthropic models through Bedrock, with caching disabled.
-* A Bedrock provider will appear once you have AWS configured, i.e. `aws configure`
-* Crush also expects the `AWS_REGION` or `AWS_DEFAULT_REGION` to be set
-* To use a specific AWS profile set `AWS_PROFILE` in your environment, i.e. `AWS_PROFILE=myprofile crush`
+- A Bedrock provider will appear once you have AWS configured, i.e. `aws configure`
+- Crush also expects the `AWS_REGION` or `AWS_DEFAULT_REGION` to be set
+- To use a specific AWS profile set `AWS_PROFILE` in your environment, i.e. `AWS_PROFILE=myprofile crush`
### Vertex AI Platform
@@ -428,17 +514,6 @@ To add specific models to the configuration, configure as such:
}
```
-## A Note on Claude Max and GitHub Copilot
-
-Crush only supports model providers through official, compliant APIs. We do not
-support or endorse any methods that rely on personal Claude Max and GitHub Copilot
-accounts or OAuth workarounds, which may violate Anthropic and Microsoft’s
-Terms of Service.
-
-We’re committed to building sustainable, trusted integrations with model
-providers. If you’re a provider interested in working with us,
-[reach out](mailto:vt100@charm.sh).
-
## Logging
Sometimes you need to look at logs. Luckily, Crush logs all sorts of
@@ -470,6 +545,105 @@ config:
}
```
+## Provider Auto-Updates
+
+By default, Crush automatically checks for the latest and greatest list of
+providers and models from [Catwalk](https://github.com/charmbracelet/catwalk),
+the open source Crush provider database. This means that when new providers and
+models are available, or when model metadata changes, Crush automatically
+updates your local configuration.
+
+### Disabling automatic provider updates
+
+For those with restricted internet access, or those who prefer to work in
+air-gapped environments, this might not be want you want, and this feature can
+be disabled.
+
+To disable automatic provider updates, set `disable_provider_auto_update` into
+your `crush.json` config:
+
+```json
+{
+ "$schema": "https://charm.land/crush.json",
+ "options": {
+ "disable_provider_auto_update": true
+ }
+}
+```
+
+Or set the `CRUSH_DISABLE_PROVIDER_AUTO_UPDATE` environment variable:
+
+```bash
+export CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1
+```
+
+### Manually updating providers
+
+Manually updating providers is possible with the `crush update-providers`
+command:
+
+```bash
+# Update providers remotely from Catwalk.
+crush update-providers
+
+# Update providers from a custom Catwalk base URL.
+crush update-providers https://example.com/
+
+# Update providers from a local file.
+crush update-providers /path/to/local-providers.json
+
+# Reset providers to the embedded version, embedded at crush at build time.
+crush update-providers embedded
+
+# For more info:
+crush update-providers --help
+```
+
+## Metrics
+
+Crush records pseudonymous usage metrics (tied to a device-specific hash),
+which maintainers rely on to inform development and support priorities. The
+metrics include solely usage metadata; prompts and responses are NEVER
+collected.
+
+Details on exactly what’s collected are in the source code ([here](https://github.com/charmbracelet/crush/tree/main/internal/event)
+and [here](https://github.com/charmbracelet/crush/blob/main/internal/llm/agent/event.go)).
+
+You can opt out of metrics collection at any time by setting the environment
+variable by setting the following in your environment:
+
+```bash
+export CRUSH_DISABLE_METRICS=1
+```
+
+Or by setting the following in your config:
+
+```json
+{
+ "options": {
+ "disable_metrics": true
+ }
+}
+```
+
+Crush also respects the [`DO_NOT_TRACK`](https://consoledonottrack.com)
+convention which can be enabled via `export DO_NOT_TRACK=1`.
+
+## A Note on Claude Max and GitHub Copilot
+
+Crush only supports model providers through official, compliant APIs. We do not
+support or endorse any methods that rely on personal Claude Max and GitHub
+Copilot accounts or OAuth workarounds, which violate Anthropic and
+Microsoft’s Terms of Service.
+
+We’re committed to building sustainable, trusted integrations with model
+providers. If you’re a provider interested in working with us,
+[reach out](mailto:vt100@charm.sh).
+
+## Contributing
+
+See the [contributing guide](https://github.com/charmbracelet/crush?tab=contributing-ov-file#contributing).
+
## Whatcha think?
We’d love to hear your thoughts on this project. Need help? We gotchu. You can find us on:
@@ -478,6 +652,7 @@ We’d love to hear your thoughts on this project. Need help? We gotchu. You can
- [Discord][discord]
- [Slack](https://charm.land/slack)
- [The Fediverse](https://mastodon.social/@charmcli)
+- [Bluesky](https://bsky.app/profile/charm.land)
[discord]: https://charm.land/discord
diff --git a/Taskfile.yaml b/Taskfile.yaml
index 3993be96f6ac8987a84491f87c4940963b176074..1c4225158fc21508e8dccac8d6f47610f7d81faf 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -35,6 +35,11 @@ tasks:
generates:
- crush
+ run:
+ desc: Run build
+ cmds:
+ - go run . {{.CLI_ARGS}}
+
test:
desc: Run tests
cmds:
@@ -79,3 +84,26 @@ tasks:
- echo "Generated schema.json"
generates:
- schema.json
+
+ release:
+ desc: Create and push a new tag following semver
+ vars:
+ NEXT:
+ sh: svu next --always || go run github.com/caarlos0/svu/v3@latest next --always
+ prompt: "This will release {{.NEXT}}. Continue?"
+ preconditions:
+ - sh: '[ $(git symbolic-ref --short HEAD) = "main" ]'
+ msg: Not on main branch
+ - sh: "[ $(git status --porcelain=2 | wc -l) = 0 ]"
+ msg: "Git is dirty"
+ cmds:
+ - task: fetch-tags
+ - git commit --allow-empty -m "{{.NEXT}}"
+ - git tag --annotate --sign {{.NEXT}} {{.CLI_ARGS}}
+ - echo "Pushing {{.NEXT}}..."
+ - git push origin --tags
+
+ fetch-tags:
+ cmds:
+ - git tag -d nightly || true
+ - git fetch --tags
diff --git a/crush.json b/crush.json
index ba4dc18bc63381ad4bdbca5470a1527986c74205..f5daef89add28ad4924c2bb87ca70020af005d67 100644
--- a/crush.json
+++ b/crush.json
@@ -1,8 +1,6 @@
{
"$schema": "https://charm.land/crush.json",
"lsp": {
- "Go": {
- "command": "gopls"
- }
+ "gopls": {}
}
}
diff --git a/go.mod b/go.mod
index 6998ed6302a752faaa92d136e54703beb7e6c1b4..170788928c44d7e233da6c25871927f3a8bf2073 100644
--- a/go.mod
+++ b/go.mod
@@ -7,40 +7,39 @@ require (
github.com/MakeNowJust/heredoc v1.0.0
github.com/PuerkitoBio/goquery v1.10.3
github.com/alecthomas/chroma/v2 v2.20.0
- github.com/anthropics/anthropic-sdk-go v1.9.1
+ github.com/anthropics/anthropic-sdk-go v1.13.0
github.com/atotto/clipboard v0.1.4
github.com/aymanbagabas/go-udiff v0.3.1
github.com/bmatcuk/doublestar/v4 v4.9.1
- github.com/charlievieth/fastwalk v1.0.12
+ github.com/charlievieth/fastwalk v1.0.14
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2
- github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e
- github.com/charmbracelet/catwalk v0.5.3
- github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674
+ github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7
+ github.com/charmbracelet/catwalk v0.6.3
+ github.com/charmbracelet/fang v0.4.3
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018
- github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0
+ github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706
- github.com/charmbracelet/x/ansi v0.10.1
+ github.com/charmbracelet/x/ansi v0.10.2
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
- github.com/fsnotify/fsnotify v1.9.0
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.13.0
github.com/joho/godotenv v1.5.1
- github.com/mark3labs/mcp-go v0.38.0
+ github.com/mark3labs/mcp-go v0.41.1
github.com/muesli/termenv v0.16.0
- github.com/ncruces/go-sqlite3 v0.28.0
+ github.com/ncruces/go-sqlite3 v0.29.1
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
github.com/nxadm/tail v1.4.11
github.com/openai/openai-go v1.12.0
- github.com/pressly/goose/v3 v3.25.0
- github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c
+ github.com/pressly/goose/v3 v3.26.0
+ github.com/qjebbs/go-jsons v1.0.0-alpha.4
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sahilm/fuzzy v0.1.1
- github.com/spf13/cobra v1.9.1
+ github.com/spf13/cobra v1.10.1
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c
github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef
- github.com/stretchr/testify v1.11.0
+ github.com/stretchr/testify v1.11.1
github.com/tidwall/sjson v1.2.5
github.com/zeebo/xxh3 v1.0.2
gopkg.in/natefinch/lumberjack.v2 v2.2.1
@@ -73,18 +72,21 @@ require (
github.com/aymerick/douceur v0.2.0 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
- github.com/charmbracelet/colorprofile v0.3.2 // indirect
- github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299
+ github.com/charmbracelet/colorprofile v0.3.2
+ github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d
+ github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4
github.com/charmbracelet/x/term v0.2.1
github.com/charmbracelet/x/termios v0.1.1 // indirect
github.com/charmbracelet/x/windows v0.2.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/denisbrodbeck/machineid v1.0.1
github.com/disintegration/gift v1.1.2 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
+ github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-logfmt/logfmt v0.6.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
@@ -94,16 +96,18 @@ require (
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
+ github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.0.9 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
- github.com/lucasb-eyer/go-colorful v1.2.0
+ github.com/lucasb-eyer/go-colorful v1.3.0
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
- github.com/mattn/go-runewidth v0.0.16 // indirect
+ github.com/mattn/go-runewidth v0.0.17 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 // indirect
+ github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/mango v0.1.0 // indirect
github.com/muesli/mango-cobra v1.2.0 // indirect
@@ -112,11 +116,13 @@ require (
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/posthog/posthog-go v1.6.10
github.com/rivo/uniseg v0.4.7
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
+ github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
github.com/spf13/cast v1.7.1 // indirect
- github.com/spf13/pflag v1.0.7 // indirect
+ github.com/spf13/pflag v1.0.9 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
@@ -135,21 +141,21 @@ require (
go.opentelemetry.io/otel/metric v1.37.0 // indirect
go.opentelemetry.io/otel/trace v1.37.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
- golang.org/x/crypto v0.41.0 // indirect
+ golang.org/x/crypto v0.42.0 // indirect
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
golang.org/x/image v0.26.0 // indirect
- golang.org/x/net v0.42.0 // indirect
+ golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sync v0.17.0 // indirect
- golang.org/x/sys v0.36.0
- golang.org/x/term v0.34.0 // indirect
- golang.org/x/text v0.28.0
+ golang.org/x/sys v0.36.0 // indirect
+ golang.org/x/term v0.35.0 // indirect
+ golang.org/x/text v0.29.0
golang.org/x/time v0.8.0 // indirect
google.golang.org/api v0.211.0 // indirect
- google.golang.org/genai v1.21.0
+ google.golang.org/genai v1.28.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect
google.golang.org/grpc v1.71.0 // indirect
- google.golang.org/protobuf v1.36.6 // indirect
+ google.golang.org/protobuf v1.36.8 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
mvdan.cc/sh/moreinterp v0.0.0-20250902163504-3cf4fd5717a5
diff --git a/go.sum b/go.sum
index 5f04e5fb6161c875a12fdf689cae3dd5ee348b67..3669305d22b191791df373899305e5e18a4e1f71 100644
--- a/go.sum
+++ b/go.sum
@@ -30,8 +30,8 @@ github.com/alecthomas/repr v0.5.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW5
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
-github.com/anthropics/anthropic-sdk-go v1.9.1 h1:raRhZKmayVSVZtLpLDd6IsMXvxLeeSU03/2IBTerWlg=
-github.com/anthropics/anthropic-sdk-go v1.9.1/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
+github.com/anthropics/anthropic-sdk-go v1.13.0 h1:Bhbe8sRoDPtipttg8bQYrMCKe2b79+q6rFW1vOKEUKI=
+github.com/anthropics/anthropic-sdk-go v1.13.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY=
@@ -74,28 +74,28 @@ github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
-github.com/charlievieth/fastwalk v1.0.12 h1:pwfxe1LajixViQqo7EFLXU2+mQxb6OaO0CeNdVwRKTg=
-github.com/charlievieth/fastwalk v1.0.12/go.mod h1:yGy1zbxog41ZVMcKA/i8ojXLFsuayX5VvwhQVoj9PBI=
+github.com/charlievieth/fastwalk v1.0.14 h1:3Eh5uaFGwHZd8EGwTjJnSpBkfwfsak9h6ICgnWlhAyg=
+github.com/charlievieth/fastwalk v1.0.14/go.mod h1:diVcUreiU1aQ4/Wu3NbxxH4/KYdKpLDojrQ1Bb2KgNY=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2 h1:973OHYuq2Jx9deyuPwe/6lsuQrDCatOsjP8uCd02URE=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250820203609-601216f68ee2/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e h1:4BBnKWFwJ5FLyhw/ijFxKE04i9rubr8WIPR1kjO57iA=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250910155747-997384b0b35e/go.mod h1:F7AfLKYQqpM3NNBVs7ctW417tavhvoh9SBjsgtwpzbY=
-github.com/charmbracelet/catwalk v0.5.3 h1:Hw9DlX8u79K9iLQJB4Bti9/rTzMvEpBjE/GyniWxHNY=
-github.com/charmbracelet/catwalk v0.5.3/go.mod h1:WnKgNPmQHuMyk7GtwAQwl+ezHusfH40IvzML2qwUGwc=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7 h1:wH4F+UvxcZSDOxy8j45tghiRo8amrYHejbE9+1C6xv0=
+github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.4.0.20250930175933-4cafc092c5e7/go.mod h1:5IzIGXU1n0foRc8bRAherC8ZuQCQURPlwx3ANLq1138=
+github.com/charmbracelet/catwalk v0.6.3 h1:RyL8Yqd4QsV3VyvBEsePScv1z2vKaZxPfQQ0XB5L5AA=
+github.com/charmbracelet/catwalk v0.6.3/go.mod h1:ReU4SdrLfe63jkEjWMdX2wlZMV3k9r11oQAmzN0m+KY=
github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI=
github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI=
-github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674 h1:+Cz+VfxD5DO+JT1LlswXWhre0HYLj6l2HW8HVGfMuC0=
-github.com/charmbracelet/fang v0.3.1-0.20250711140230-d5ebb8c1d674/go.mod h1:9gCUAHmVx5BwSafeyNr3GI0GgvlB1WYjL21SkPp1jyU=
+github.com/charmbracelet/fang v0.4.3 h1:qXeMxnL4H6mSKBUhDefHu8NfikFbP/MBNTfqTrXvzmY=
+github.com/charmbracelet/fang v0.4.3/go.mod h1:wHJKQYO5ReYsxx+yZl+skDtrlKO/4LLEQ6EXsdHhRhg=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018 h1:PU4Zvpagsk5sgaDxn5W4sxHuLp9QRMBZB3bFSk40A4w=
github.com/charmbracelet/glamour/v2 v2.0.0-20250811143442-a27abb32f018/go.mod h1:Z/GLmp9fzaqX4ze3nXG7StgWez5uBM5XtlLHK8V/qSk=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0 h1:sWRGoSw/JsO2S4t2+fmmEkRbkOxphI0AxZkQPQVKWbs=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250721205738-ea66aa652ee0/go.mod h1:XIuqKpZTUXtVyeyiN1k9Tc/U7EzfaDnVc34feFHfBws=
+github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea h1:g1HfUgSMvye8mgecMD1mPscpt+pzJoDEiSA+p2QXzdQ=
+github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.3.0.20250917201909-41ff0bf215ea/go.mod h1:ngHerf1JLJXBrDXdphn5gFrBPriCL437uwukd5c93pM=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mSIGnGuSdKl9qDSyfbYK50z2wc2gGMggegE=
github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
-github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299 h1:vpIIy7W1Bv84GUhi3Z5oRyZZRAtdTd9kI3+TnLZrnZE=
-github.com/charmbracelet/ultraviolet v0.0.0-20250910155420-aa0094762299/go.mod h1:V21rZtvULxJyG8tUsRC8caTBvKNHOuRJVxH+G6ghH0Y=
-github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
-github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
+github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef h1:VrWaUi2LXYLjfjCHowdSOEc6dQ9Ro14KY7Bw4IWd19M=
+github.com/charmbracelet/ultraviolet v0.0.0-20250915111650-81d4262876ef/go.mod h1:AThRsQH1t+dfyOKIwXRoJBniYFQUkUpQq4paheHMc2o=
+github.com/charmbracelet/x/ansi v0.10.2 h1:ith2ArZS0CJG30cIUfID1LXN7ZFXRCww6RUvAPA+Pzw=
+github.com/charmbracelet/x/ansi v0.10.2/go.mod h1:HbLdJjQH4UH4AqA2HpRWuWNluRE6zxJH/yteYEYCFa8=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a h1:zYSNtEJM9jwHbJts2k+Hroj+xQwsW1yxc4Wopdv7KaI=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250811133356-e0c5dbe5ea4a/go.mod h1:rc2bsPC6MWae3LdOxNO1mOb443NlMrrDL0xEya48NNc=
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250708181618-a60a724ba6c3 h1:1xwHZg6eMZ9Wv5TE1UGub6ARubyOd1Lo5kPUI/6VL50=
@@ -104,6 +104,8 @@ github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHE
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d h1:H2oh4WlSsXy8qwLd7I3eAvPd/X3S40aM9l+h47WF1eA=
github.com/charmbracelet/x/exp/slice v0.0.0-20250829135019-44e44e21330d/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
+github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4 h1:ZhDGU688EHQXslD9KphRpXwK0pKP03egUoZAATUDlV0=
+github.com/charmbracelet/x/powernap v0.0.0-20250919153222-1038f7e6fef4/go.mod h1:cmdl5zlP5mR8TF2Y68UKc7hdGUDiSJ2+4hk0h04Hsx4=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
@@ -116,6 +118,8 @@ github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfv
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=
+github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/disintegration/gift v1.1.2 h1:9ZyHJr+kPamiH10FX3Pynt1AxFUob812bU9Wt4GMzhs=
github.com/disintegration/gift v1.1.2/go.mod h1:Jh2i7f7Q2BM7Ezno3PhfezbR1xpUg9dUg3/RlKGr4HI=
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec h1:YrB6aVr9touOt75I9O1SiancmR2GMg45U9UYf0gtgWg=
@@ -157,8 +161,11 @@ github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrk
github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
+github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
+github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@@ -183,20 +190,22 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
-github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
-github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
+github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
+github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/mark3labs/mcp-go v0.38.0 h1:E5tmJiIXkhwlV0pLAwAT0O5ZjUZSISE/2Jxg+6vpq4I=
-github.com/mark3labs/mcp-go v0.38.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
+github.com/mark3labs/mcp-go v0.41.1 h1:w78eWfiQam2i8ICL7AL0WFiq7KHNJQ6UB53ZVtH4KGA=
+github.com/mark3labs/mcp-go v0.41.1/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
-github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/mattn/go-runewidth v0.0.17 h1:78v8ZlW0bP43XfmAfPsdXcoNCelfMHsDmd/pkENfrjQ=
+github.com/mattn/go-runewidth v0.0.17/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI=
@@ -209,8 +218,8 @@ github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8=
github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
-github.com/ncruces/go-sqlite3 v0.28.0 h1:AQVTUPgfamONl09LS+4rGFbHmLKM8/QrJJJi1UukjEQ=
-github.com/ncruces/go-sqlite3 v0.28.0/go.mod h1:WqvLhYwtEiZzg1H8BIeahUv/DxbmR+3xG5jDHDiBAGk=
+github.com/ncruces/go-sqlite3 v0.29.1 h1:NIi8AISWBToRHyoz01FXiTNvU147Tqdibgj2tFzJCqM=
+github.com/ncruces/go-sqlite3 v0.29.1/go.mod h1:PpccBNNhvjwUOwDQEn2gXQPFPTWdlromj0+fSkd5KSg=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt7M=
@@ -228,10 +237,12 @@ github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjL
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/pressly/goose/v3 v3.25.0 h1:6WeYhMWGRCzpyd89SpODFnCBCKz41KrVbRT58nVjGng=
-github.com/pressly/goose/v3 v3.25.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
-github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c h1:kmzxiX+OB0knCo1V0dkEkdPelzCdAzCURCfmFArn2/A=
-github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
+github.com/posthog/posthog-go v1.6.10 h1:OA6bkiUg89rI7f5cSXbcrH5+wLinyS6hHplnD92Pu/M=
+github.com/posthog/posthog-go v1.6.10/go.mod h1:LcC1Nu4AgvV22EndTtrMXTy+7RGVC0MhChSw7Qk5XkY=
+github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
+github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
+github.com/qjebbs/go-jsons v1.0.0-alpha.4 h1:Qsb4ohRUHQODIUAsJKdKJ/SIDbsO7oGOzsfy+h1yQZs=
+github.com/qjebbs/go-jsons v1.0.0-alpha.4/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
@@ -252,13 +263,14 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
+github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFRh6zSQ=
+github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
-github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
-github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
-github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
-github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M=
-github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
+github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
+github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
+github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c h1:km8GpoQut05eY3GiYWEedbTT0qnSxrCjsVbb7yKY1KE=
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c/go.mod h1:cNQ3dwVJtS5Hmnjxy6AgTPd0Inb3pW05ftPSX7NZO7Q=
github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef h1:Ch6Q+AZUxDBCVqdkI8FSpFyZDtCVBc2VmejdNrm5rRQ=
@@ -267,8 +279,8 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8=
-github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
+github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
+github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
@@ -326,8 +338,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
-golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
-golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
+golang.org/x/crypto v0.42.0 h1:chiH31gIWm57EkTXpwnqf8qeuMUi0yekh6mT2AvFlqI=
+golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8=
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY=
@@ -348,8 +360,8 @@ golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
-golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
-golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8=
+golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
+golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -389,8 +401,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
-golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
-golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
+golang.org/x/term v0.35.0 h1:bZBVKBudEyhRcajGcNc3jIfWPqV4y/Kt2XcoigOWtDQ=
+golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
@@ -400,8 +412,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
-golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
-golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
+golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
+golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -413,14 +425,14 @@ golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxb
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.211.0 h1:IUpLjq09jxBSV1lACO33CGY3jsRcbctfGzhj+ZSE/Bg=
google.golang.org/api v0.211.0/go.mod h1:XOloB4MXFH4UTlQSGuNUxw0UT74qdENK8d6JNsXKLi0=
-google.golang.org/genai v1.21.0 h1:0olX8oJPFn0iXNV4cNwgdvc4NHGTZpUbhGhu6Y/zh7U=
-google.golang.org/genai v1.21.0/go.mod h1:QPj5NGJw+3wEOHg+PrsWwJKvG6UC84ex5FR7qAYsN/M=
+google.golang.org/genai v1.28.0 h1:6qpUWFH3PkHPhxNnu3wjaCVJ6Jri1EIR7ks07f9IpIk=
+google.golang.org/genai v1.28.0/go.mod h1:7pAilaICJlQBonjKKJNhftDFv3SREhZcTe9F6nRcjbg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
-google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
-google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
+google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
+google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
diff --git a/internal/app/app.go b/internal/app/app.go
index 21ddcd25eff1c9aeebb9d6700f9340ab0932e7ab..29631c1be84e96617adfeb705b2e35e0b68725e5 100644
--- a/internal/app/app.go
+++ b/internal/app/app.go
@@ -6,7 +6,6 @@ import (
"errors"
"fmt"
"log/slog"
- "maps"
"sync"
"time"
@@ -18,13 +17,12 @@ import (
"github.com/charmbracelet/crush/internal/history"
"github.com/charmbracelet/crush/internal/llm/agent"
"github.com/charmbracelet/crush/internal/log"
- "github.com/charmbracelet/crush/internal/pubsub"
-
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/watcher"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/permission"
+ "github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
+ "github.com/charmbracelet/x/ansi"
)
type App struct {
@@ -35,12 +33,7 @@ type App struct {
CoderAgent agent.Service
- LSPClients map[string]*lsp.Client
-
- clientsMutex sync.RWMutex
-
- watcherCancelFuncs *csync.Slice[context.CancelFunc]
- lspWatcherWG sync.WaitGroup
+ LSPClients *csync.Map[string, *lsp.Client]
config *config.Config
@@ -71,14 +64,12 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
Messages: messages,
History: files,
Permissions: permission.NewPermissionService(cfg.WorkingDir(), skipPermissionsRequests, allowedTools),
- LSPClients: make(map[string]*lsp.Client),
+ LSPClients: csync.NewMap[string, *lsp.Client](),
globalCtx: ctx,
config: cfg,
- watcherCancelFuncs: csync.NewSlice[context.CancelFunc](),
-
events: make(chan tea.Msg, 100),
serviceEventsWG: &sync.WaitGroup{},
tuiWG: &sync.WaitGroup{},
@@ -86,11 +77,6 @@ func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
app.setupEvents()
- // Start the global watcher
- if err := watcher.Start(); err != nil {
- return nil, fmt.Errorf("app: %w", err)
- }
-
// Initialize LSP clients in the background.
app.initLSPClients(ctx)
@@ -121,7 +107,10 @@ func (app *App) RunNonInteractive(ctx context.Context, prompt string, quiet bool
ctx, cancel := context.WithCancel(ctx)
defer cancel()
- // Start spinner if not in quiet mode.
+ // Start progress bar and spinner
+ fmt.Printf(ansi.SetIndeterminateProgressBar)
+ defer fmt.Printf(ansi.ResetProgressBar)
+
var spinner *format.Spinner
if !quiet {
spinner = format.NewSpinner(ctx, cancel, "Generating")
@@ -336,31 +325,15 @@ func (app *App) Shutdown() {
app.CoderAgent.CancelAll()
}
- for cancel := range app.watcherCancelFuncs.Seq() {
- cancel()
- }
-
- // Wait for all LSP watchers to finish.
- app.lspWatcherWG.Wait()
-
- // Get all LSP clients.
- app.clientsMutex.RLock()
- clients := make(map[string]*lsp.Client, len(app.LSPClients))
- maps.Copy(clients, app.LSPClients)
- app.clientsMutex.RUnlock()
-
// Shutdown all LSP clients.
- for name, client := range clients {
+ for name, client := range app.LSPClients.Seq2() {
shutdownCtx, cancel := context.WithTimeout(app.globalCtx, 5*time.Second)
- if err := client.Shutdown(shutdownCtx); err != nil {
+ if err := client.Close(shutdownCtx); err != nil {
slog.Error("Failed to shutdown LSP client", "name", name, "error", err)
}
cancel()
}
- // Shutdown the global watcher
- watcher.Shutdown()
-
// Call call cleanup functions.
for _, cleanup := range app.cleanupFuncs {
if cleanup != nil {
diff --git a/internal/app/lsp.go b/internal/app/lsp.go
index 8a9b06c1e784770371bc4000a2101af11aa44d64..f4c26af2f4ed369a94c7078600ce9639874dc643 100644
--- a/internal/app/lsp.go
+++ b/internal/app/lsp.go
@@ -6,14 +6,16 @@ import (
"time"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/log"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/watcher"
)
// initLSPClients initializes LSP clients.
func (app *App) initLSPClients(ctx context.Context) {
for name, clientConfig := range app.config.LSP {
+ if clientConfig.Disabled {
+ slog.Info("Skipping disabled LSP client", "name", name)
+ continue
+ }
go app.createAndStartLSPClient(ctx, name, clientConfig)
}
slog.Info("LSP clients initialization started in background")
@@ -23,11 +25,18 @@ func (app *App) initLSPClients(ctx context.Context) {
func (app *App) createAndStartLSPClient(ctx context.Context, name string, config config.LSPConfig) {
slog.Info("Creating LSP client", "name", name, "command", config.Command, "fileTypes", config.FileTypes, "args", config.Args)
+ // Check if any root markers exist in the working directory (config now has defaults)
+ if !lsp.HasRootMarkers(app.config.WorkingDir(), config.RootMarkers) {
+ slog.Info("Skipping LSP client - no root markers found", "name", name, "rootMarkers", config.RootMarkers)
+ updateLSPState(name, lsp.StateDisabled, nil, nil, 0)
+ return
+ }
+
// Update state to starting
updateLSPState(name, lsp.StateStarting, nil, nil, 0)
// Create LSP client.
- lspClient, err := lsp.NewClient(ctx, name, config)
+ lspClient, err := lsp.New(ctx, name, config, app.config.Resolver())
if err != nil {
slog.Error("Failed to create LSP client for", name, err)
updateLSPState(name, lsp.StateError, err, nil, 0)
@@ -42,11 +51,11 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
defer cancel()
// Initialize LSP client.
- _, err = lspClient.InitializeLSPClient(initCtx, app.config.WorkingDir())
+ _, err = lspClient.Initialize(initCtx, app.config.WorkingDir())
if err != nil {
slog.Error("Initialize failed", "name", name, "error", err)
updateLSPState(name, lsp.StateError, err, lspClient, 0)
- lspClient.Close()
+ lspClient.Close(ctx)
return
}
@@ -66,64 +75,6 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, config
slog.Info("LSP client initialized", "name", name)
- // Create a child context that can be canceled when the app is shutting
- // down.
- watchCtx, cancelFunc := context.WithCancel(ctx)
-
- // Create the workspace watcher.
- workspaceWatcher := watcher.New(name, lspClient)
-
- // Store the cancel function to be called during cleanup.
- app.watcherCancelFuncs.Append(cancelFunc)
-
// Add to map with mutex protection before starting goroutine
- app.clientsMutex.Lock()
- app.LSPClients[name] = lspClient
- app.clientsMutex.Unlock()
-
- // Run workspace watcher.
- app.lspWatcherWG.Add(1)
- go app.runWorkspaceWatcher(watchCtx, name, workspaceWatcher)
-}
-
-// runWorkspaceWatcher executes the workspace watcher for an LSP client.
-func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.Client) {
- defer app.lspWatcherWG.Done()
- defer log.RecoverPanic("LSP-"+name, func() {
- // Try to restart the client.
- app.restartLSPClient(ctx, name)
- })
-
- workspaceWatcher.Watch(ctx, app.config.WorkingDir())
- slog.Info("Workspace watcher stopped", "client", name)
-}
-
-// restartLSPClient attempts to restart a crashed or failed LSP client.
-func (app *App) restartLSPClient(ctx context.Context, name string) {
- // Get the original configuration.
- clientConfig, exists := app.config.LSP[name]
- if !exists {
- slog.Error("Cannot restart client, configuration not found", "client", name)
- return
- }
-
- // Clean up the old client if it exists.
- app.clientsMutex.Lock()
- oldClient, exists := app.LSPClients[name]
- if exists {
- // Remove from map before potentially slow shutdown.
- delete(app.LSPClients, name)
- }
- app.clientsMutex.Unlock()
-
- if exists && oldClient != nil {
- // Try to shut down client gracefully, but don't block on errors.
- shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
- _ = oldClient.Shutdown(shutdownCtx)
- cancel()
- }
-
- // Create a new client using the shared function.
- app.createAndStartLSPClient(ctx, name, clientConfig)
- slog.Info("Successfully restarted LSP client", "client", name)
+ app.LSPClients.Set(name, lspClient)
}
diff --git a/internal/cmd/dirs.go b/internal/cmd/dirs.go
new file mode 100644
index 0000000000000000000000000000000000000000..d3bc0bd4fb1482e2657eedaab9ce4cee30a09373
--- /dev/null
+++ b/internal/cmd/dirs.go
@@ -0,0 +1,66 @@
+package cmd
+
+import (
+ "os"
+ "path/filepath"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/lipgloss/v2/table"
+ "github.com/charmbracelet/x/term"
+ "github.com/spf13/cobra"
+)
+
+var dirsCmd = &cobra.Command{
+ Use: "dirs",
+ Short: "Print directories used by Crush",
+ Long: `Print the directories where Crush stores its configuration and data files.
+This includes the global configuration directory and data directory.`,
+ Example: `
+# Print all directories
+crush dirs
+
+# Print only the config directory
+crush dirs config
+
+# Print only the data directory
+crush dirs data
+ `,
+ Run: func(cmd *cobra.Command, args []string) {
+ if term.IsTerminal(os.Stdout.Fd()) {
+ // We're in a TTY: make it fancy.
+ t := table.New().
+ Border(lipgloss.RoundedBorder()).
+ StyleFunc(func(row, col int) lipgloss.Style {
+ return lipgloss.NewStyle().Padding(0, 2)
+ }).
+ Row("Config", filepath.Dir(config.GlobalConfig())).
+ Row("Data", filepath.Dir(config.GlobalConfigData()))
+ lipgloss.Println(t)
+ return
+ }
+ // Not a TTY.
+ cmd.Println(filepath.Dir(config.GlobalConfig()))
+ cmd.Println(filepath.Dir(config.GlobalConfigData()))
+ },
+}
+
+var configDirCmd = &cobra.Command{
+ Use: "config",
+ Short: "Print the configuration directory used by Crush",
+ Run: func(cmd *cobra.Command, args []string) {
+ cmd.Println(filepath.Dir(config.GlobalConfig()))
+ },
+}
+
+var dataDirCmd = &cobra.Command{
+ Use: "data",
+ Short: "Print the datauration directory used by Crush",
+ Run: func(cmd *cobra.Command, args []string) {
+ cmd.Println(filepath.Dir(config.GlobalConfigData()))
+ },
+}
+
+func init() {
+ dirsCmd.AddCommand(configDirCmd, dataDirCmd)
+}
diff --git a/internal/cmd/logs.go b/internal/cmd/logs.go
index 85921c4e4354194d0d260e814fc61222c114d3ef..e7160f4a1307406be20f1fe00a59e93de5232d67 100644
--- a/internal/cmd/logs.go
+++ b/internal/cmd/logs.go
@@ -68,7 +68,6 @@ var logsCmd = &cobra.Command{
func init() {
logsCmd.Flags().BoolP("follow", "f", false, "Follow log output")
logsCmd.Flags().IntP("tail", "t", defaultTailLines, "Show only the last N lines default: 1000 for performance")
- rootCmd.AddCommand(logsCmd)
}
func followLogs(ctx context.Context, logsFile string, tailLines int) error {
diff --git a/internal/cmd/root.go b/internal/cmd/root.go
index ee167814a1688ae45238d92f0cae78a7e86c0ccd..d6a26d818643a05704f554223a7b7960792970c5 100644
--- a/internal/cmd/root.go
+++ b/internal/cmd/root.go
@@ -1,20 +1,27 @@
package cmd
import (
+ "bytes"
"context"
+ "errors"
"fmt"
"io"
"log/slog"
"os"
"path/filepath"
+ "strconv"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/colorprofile"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/db"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/tui"
"github.com/charmbracelet/crush/internal/version"
"github.com/charmbracelet/fang"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/exp/charmtone"
"github.com/charmbracelet/x/term"
"github.com/spf13/cobra"
)
@@ -27,7 +34,13 @@ func init() {
rootCmd.Flags().BoolP("help", "h", false, "Help")
rootCmd.Flags().BoolP("yolo", "y", false, "Automatically accept all permissions (dangerous mode)")
- rootCmd.AddCommand(runCmd)
+ rootCmd.AddCommand(
+ runCmd,
+ dirsCmd,
+ updateProvidersCmd,
+ logsCmd,
+ schemaCmd,
+ )
}
var rootCmd = &cobra.Command{
@@ -65,6 +78,8 @@ crush -y
}
defer app.Shutdown()
+ event.AppInitialized()
+
// Set up the TUI.
program := tea.NewProgram(
tui.New(app),
@@ -77,14 +92,50 @@ crush -y
go app.Subscribe(program)
if _, err := program.Run(); err != nil {
+ event.Error(err)
slog.Error("TUI run error", "error", err)
- return fmt.Errorf("TUI error: %v", err)
+ return errors.New("Crush crashed. If metrics are enabled, we were notified about it. If you'd like to report it, please copy the stacktrace above and open an issue at https://github.com/charmbracelet/crush/issues/new?template=bug.yml") //nolint:staticcheck
}
return nil
},
+ PostRun: func(cmd *cobra.Command, args []string) {
+ event.AppExited()
+ },
}
+var heartbit = lipgloss.NewStyle().Foreground(charmtone.Dolly).SetString(`
+ ▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄
+ ███████████ ███████████
+████████████████████████████
+████████████████████████████
+██████████▀██████▀██████████
+██████████ ██████ ██████████
+▀▀██████▄████▄▄████▄██████▀▀
+ ████████████████████████
+ ████████████████████
+ ▀▀██████████▀▀
+ ▀▀▀▀▀▀
+`)
+
+// copied from cobra:
+const defaultVersionTemplate = `{{with .DisplayName}}{{printf "%s " .}}{{end}}{{printf "version %s" .Version}}
+`
+
func Execute() {
+ // NOTE: very hacky: we create a colorprofile writer with STDOUT, then make
+ // it forward to a bytes.Buffer, write the colored heartbit to it, and then
+ // finally prepend it in the version template.
+ // Unfortunately cobra doesn't give us a way to set a function to handle
+ // printing the version, and PreRunE runs after the version is already
+ // handled, so that doesn't work either.
+ // This is the only way I could find that works relatively well.
+ if term.IsTerminal(os.Stdout.Fd()) {
+ var b bytes.Buffer
+ w := colorprofile.NewWriter(os.Stdout, os.Environ())
+ w.Forward = &b
+ _, _ = w.WriteString(heartbit.String())
+ rootCmd.SetVersionTemplate(b.String() + "\n" + defaultVersionTemplate)
+ }
if err := fang.Execute(
context.Background(),
rootCmd,
@@ -134,9 +185,26 @@ func setupApp(cmd *cobra.Command) (*app.App, error) {
return nil, err
}
+ if shouldEnableMetrics() {
+ event.Init()
+ }
+
return appInstance, nil
}
+func shouldEnableMetrics() bool {
+ if v, _ := strconv.ParseBool(os.Getenv("CRUSH_DISABLE_METRICS")); v {
+ return false
+ }
+ if v, _ := strconv.ParseBool(os.Getenv("DO_NOT_TRACK")); v {
+ return false
+ }
+ if config.Get().Options.DisableMetrics {
+ return false
+ }
+ return true
+}
+
func MaybePrependStdin(prompt string) (string, error) {
if term.IsTerminal(os.Stdin.Fd()) {
return prompt, nil
diff --git a/internal/cmd/schema.go b/internal/cmd/schema.go
index f835e250c24ea91a9d5084c9a414ed0e1ae28474..6070eb9144dc0e46bf0f374b2cb1a860f09e83e9 100644
--- a/internal/cmd/schema.go
+++ b/internal/cmd/schema.go
@@ -24,7 +24,3 @@ var schemaCmd = &cobra.Command{
return nil
},
}
-
-func init() {
- rootCmd.AddCommand(schemaCmd)
-}
diff --git a/internal/cmd/update_providers.go b/internal/cmd/update_providers.go
new file mode 100644
index 0000000000000000000000000000000000000000..1e771642db785794a5abca24a14b33376cdc0724
--- /dev/null
+++ b/internal/cmd/update_providers.go
@@ -0,0 +1,60 @@
+package cmd
+
+import (
+ "fmt"
+ "log/slog"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/exp/charmtone"
+ "github.com/spf13/cobra"
+)
+
+var updateProvidersCmd = &cobra.Command{
+ Use: "update-providers [path-or-url]",
+ Short: "Update providers",
+ Long: `Update the list of providers from a specified local path or remote URL.`,
+ Example: `
+# Update providers remotely from Catwalk
+crush update-providers
+
+# Update providers from a custom URL
+crush update-providers https://example.com/
+
+# Update providers from a local file
+crush update-providers /path/to/local-providers.json
+
+# Update providers from embedded version
+crush update-providers embedded
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ // NOTE(@andreynering): We want to skip logging output do stdout here.
+ slog.SetDefault(slog.New(slog.DiscardHandler))
+
+ var pathOrUrl string
+ if len(args) > 0 {
+ pathOrUrl = args[0]
+ }
+
+ if err := config.UpdateProviders(pathOrUrl); err != nil {
+ return err
+ }
+
+ // NOTE(@andreynering): This style is more-or-less copied from Fang's
+ // error message, adapted for success.
+ headerStyle := lipgloss.NewStyle().
+ Foreground(charmtone.Butter).
+ Background(charmtone.Guac).
+ Bold(true).
+ Padding(0, 1).
+ Margin(1).
+ MarginLeft(2).
+ SetString("SUCCESS")
+ textStyle := lipgloss.NewStyle().
+ MarginLeft(2).
+ SetString("Providers updated successfully.")
+
+ fmt.Printf("%s\n%s\n\n", headerStyle.Render(), textStyle.Render())
+ return nil
+ },
+}
diff --git a/internal/config/config.go b/internal/config/config.go
index 4e42a56e361c81feca31cd95bd778d14c312cd20..858fa1c47b33f6a5e6bafb81b4799ea5739736f9 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -117,18 +117,33 @@ type MCPConfig struct {
}
type LSPConfig struct {
- Disabled bool `json:"enabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
- Command string `json:"command" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
- Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
- Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
- Options any `json:"options,omitempty" jsonschema:"description=LSP server-specific configuration options"`
- FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
+ Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
+ Command string `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
+ Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
+ Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
+ FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
+ RootMarkers []string `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
+ InitOptions map[string]any `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
+ Options map[string]any `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
}
type TUIOptions struct {
CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
DiffMode string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
// Here we can add themes later or any TUI related options
+ //
+
+ Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
+}
+
+// Completions defines options for the completions UI.
+type Completions struct {
+ MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
+ MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
+}
+
+func (c Completions) Limits() (depth, items int) {
+ return ptrValOr(c.MaxDepth, -1), ptrValOr(c.MaxItems, -1)
}
type Permissions struct {
@@ -136,14 +151,22 @@ type Permissions struct {
SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
}
+type Attribution struct {
+ CoAuthoredBy bool `json:"co_authored_by,omitempty" jsonschema:"description=Add Co-Authored-By trailer to commit messages,default=true"`
+ GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
+}
+
type Options struct {
- ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
- TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
- Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
- DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
- DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
- DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
- DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
+ ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
+ TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
+ Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
+ DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
+ DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
+ DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
+ DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
+ DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
+ Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
+ DisableMetrics bool `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
}
type MCPs map[string]MCPConfig
@@ -236,6 +259,19 @@ type Agent struct {
ContextPaths []string `json:"context_paths,omitempty"`
}
+type Tools struct {
+ Ls ToolLs `json:"ls,omitzero"`
+}
+
+type ToolLs struct {
+ MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
+ MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
+}
+
+func (t ToolLs) Limits() (depth, items int) {
+ return ptrValOr(t.MaxDepth, -1), ptrValOr(t.MaxItems, -1)
+}
+
// Config holds the configuration for crush.
type Config struct {
Schema string `json:"$schema,omitempty"`
@@ -254,6 +290,8 @@ type Config struct {
Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
+ Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
+
// Internal
workingDir string `json:"-"`
// TODO: most likely remove this concept when I come back to it
@@ -418,6 +456,7 @@ func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
func allToolNames() []string {
return []string{
+ "agent",
"bash",
"download",
"edit",
@@ -499,7 +538,7 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if baseURL == "" {
baseURL = "https://api.openai.com/v1"
}
- if c.ID == "openrouter" {
+ if c.ID == string(catwalk.InferenceProviderOpenRouter) {
testURL = baseURL + "/credits"
} else {
testURL = baseURL + "/models"
@@ -537,8 +576,15 @@ func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
if err != nil {
return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
}
- if b.StatusCode != http.StatusOK {
- return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ if c.ID == string(catwalk.InferenceProviderZAI) {
+ if b.StatusCode == http.StatusUnauthorized {
+ // for z.ai just check if the http response is not 401
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
+ } else {
+ if b.StatusCode != http.StatusOK {
+ return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
+ }
}
_ = b.Body.Close()
return nil
@@ -561,3 +607,10 @@ func resolveEnvs(envs map[string]string) []string {
}
return res
}
+
+func ptrValOr[T any](t *T, el T) T {
+ if t == nil {
+ return el
+ }
+ return *t
+}
diff --git a/internal/config/init.go b/internal/config/init.go
index f97272cefa779319a752927456c34fbcff97e3b6..6807ab25e819b99d899fac711da304c8dc8db595 100644
--- a/internal/config/init.go
+++ b/internal/config/init.go
@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
+ "slices"
"strings"
"sync/atomic"
)
@@ -50,30 +51,38 @@ func ProjectNeedsInitialization() (bool, error) {
return false, fmt.Errorf("failed to check init flag file: %w", err)
}
- crushExists, err := crushMdExists(cfg.WorkingDir())
+ someContextFileExists, err := contextPathsExist(cfg.WorkingDir())
if err != nil {
- return false, fmt.Errorf("failed to check for CRUSH.md files: %w", err)
+ return false, fmt.Errorf("failed to check for context files: %w", err)
}
- if crushExists {
+ if someContextFileExists {
return false, nil
}
return true, nil
}
-func crushMdExists(dir string) (bool, error) {
+func contextPathsExist(dir string) (bool, error) {
entries, err := os.ReadDir(dir)
if err != nil {
return false, err
}
+ // Create a slice of lowercase filenames for lookup with slices.Contains
+ var files []string
for _, entry := range entries {
- if entry.IsDir() {
- continue
+ if !entry.IsDir() {
+ files = append(files, strings.ToLower(entry.Name()))
}
+ }
+
+ // Check if any of the default context paths exist in the directory
+ for _, path := range defaultContextPaths {
+ // Extract just the filename from the path
+ _, filename := filepath.Split(path)
+ filename = strings.ToLower(filename)
- name := strings.ToLower(entry.Name())
- if name == "crush.md" {
+ if slices.Contains(files, filename) {
return true, nil
}
}
diff --git a/internal/config/load.go b/internal/config/load.go
index a703a049c7697be9209d3994c857ff0548f60b8b..9fb45028d6936a652f2657f51707b6cde73f4084 100644
--- a/internal/config/load.go
+++ b/internal/config/load.go
@@ -1,15 +1,18 @@
package config
import (
+ "context"
"encoding/json"
"fmt"
"io"
"log/slog"
"maps"
"os"
+ "os/exec"
"path/filepath"
"runtime"
"slices"
+ "strconv"
"strings"
"github.com/charmbracelet/catwalk/pkg/catwalk"
@@ -18,6 +21,7 @@ import (
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/log"
+ powernapConfig "github.com/charmbracelet/x/powernap/pkg/config"
)
const defaultCatwalkURL = "https://catwalk.charm.sh"
@@ -39,13 +43,8 @@ func LoadReader(fd io.Reader) (*Config, error) {
// Load loads the configuration from the default paths.
func Load(workingDir, dataDir string, debug bool) (*Config, error) {
- // uses default config paths
- configPaths := []string{
- globalConfig(),
- GlobalConfigData(),
- filepath.Join(workingDir, fmt.Sprintf("%s.json", appName)),
- filepath.Join(workingDir, fmt.Sprintf(".%s.json", appName)),
- }
+ configPaths := lookupConfigs(workingDir)
+
cfg, err := loadFromConfigPaths(configPaths)
if err != nil {
return nil, fmt.Errorf("failed to load config from paths %v: %w", configPaths, err)
@@ -65,10 +64,20 @@ func Load(workingDir, dataDir string, debug bool) (*Config, error) {
cfg.Options.Debug,
)
+ if !isInsideWorktree() {
+ const depth = 2
+ const items = 100
+ slog.Warn("No git repository detected in working directory, will limit file walk operations", "depth", depth, "items", items)
+ assignIfNil(&cfg.Tools.Ls.MaxDepth, depth)
+ assignIfNil(&cfg.Tools.Ls.MaxItems, items)
+ assignIfNil(&cfg.Options.TUI.Completions.MaxDepth, depth)
+ assignIfNil(&cfg.Options.TUI.Completions.MaxItems, items)
+ }
+
// Load known providers, this loads the config from catwalk
- providers, err := Providers()
- if err != nil || len(providers) == 0 {
- return nil, fmt.Errorf("failed to load providers: %w", err)
+ providers, err := Providers(cfg)
+ if err != nil {
+ return nil, err
}
cfg.knownProviders = providers
@@ -76,7 +85,7 @@ func Load(workingDir, dataDir string, debug bool) (*Config, error) {
// Configure providers
valueResolver := NewShellVariableResolver(env)
cfg.resolver = valueResolver
- if err := cfg.configureProviders(env, valueResolver, providers); err != nil {
+ if err := cfg.configureProviders(env, valueResolver, cfg.knownProviders); err != nil {
return nil, fmt.Errorf("failed to configure providers: %w", err)
}
@@ -85,7 +94,7 @@ func Load(workingDir, dataDir string, debug bool) (*Config, error) {
return cfg, nil
}
- if err := cfg.configureSelectedModels(providers); err != nil {
+ if err := cfg.configureSelectedModels(cfg.knownProviders); err != nil {
return nil, fmt.Errorf("failed to configure selected models: %w", err)
}
cfg.SetupAgents()
@@ -129,11 +138,6 @@ func (c *Config) configureProviders(env env.Env, resolver VariableResolver, know
config, configExists := c.Providers.Get(string(p.ID))
// if the user configured a known provider we need to allow it to override a couple of parameters
if configExists {
- if config.Disable {
- slog.Debug("Skipping provider due to disable flag", "provider", p.ID)
- c.Providers.Del(string(p.ID))
- continue
- }
if config.BaseURL != "" {
p.APIEndpoint = config.BaseURL
}
@@ -278,7 +282,7 @@ func (c *Config) configureProviders(env env.Env, resolver VariableResolver, know
c.Providers.Del(id)
continue
}
- if providerConfig.Type != catwalk.TypeOpenAI && providerConfig.Type != catwalk.TypeAnthropic {
+ if providerConfig.Type != catwalk.TypeOpenAI && providerConfig.Type != catwalk.TypeAnthropic && providerConfig.Type != catwalk.TypeGemini {
slog.Warn("Skipping custom provider because the provider type is not supported", "provider", id, "type", providerConfig.Type)
c.Providers.Del(id)
continue
@@ -314,7 +318,7 @@ func (c *Config) setDefaults(workingDir, dataDir string) {
if dataDir != "" {
c.Options.DataDirectory = dataDir
} else if c.Options.DataDirectory == "" {
- if path, ok := fsext.SearchParent(workingDir, defaultDataDirectory); ok {
+ if path, ok := fsext.LookupClosest(workingDir, defaultDataDirectory); ok {
c.Options.DataDirectory = path
} else {
c.Options.DataDirectory = filepath.Join(workingDir, defaultDataDirectory)
@@ -333,51 +337,65 @@ func (c *Config) setDefaults(workingDir, dataDir string) {
c.LSP = make(map[string]LSPConfig)
}
- // Apply default file types for known LSP servers if not specified
- applyDefaultLSPFileTypes(c.LSP)
+ // Apply defaults to LSP configurations
+ c.applyLSPDefaults()
// Add the default context paths if they are not already present
c.Options.ContextPaths = append(defaultContextPaths, c.Options.ContextPaths...)
slices.Sort(c.Options.ContextPaths)
c.Options.ContextPaths = slices.Compact(c.Options.ContextPaths)
-}
-var defaultLSPFileTypes = map[string][]string{
- "gopls": {"go", "mod", "sum", "work"},
- "typescript-language-server": {"ts", "tsx", "js", "jsx", "mjs", "cjs"},
- "vtsls": {"ts", "tsx", "js", "jsx", "mjs", "cjs"},
- "bash-language-server": {"sh", "bash", "zsh", "ksh"},
- "rust-analyzer": {"rs"},
- "pyright": {"py", "pyi"},
- "pylsp": {"py", "pyi"},
- "clangd": {"c", "cpp", "cc", "cxx", "h", "hpp"},
- "jdtls": {"java"},
- "vscode-html-languageserver": {"html", "htm"},
- "vscode-css-languageserver": {"css", "scss", "sass", "less"},
- "vscode-json-languageserver": {"json", "jsonc"},
- "yaml-language-server": {"yaml", "yml"},
- "lua-language-server": {"lua"},
- "solargraph": {"rb"},
- "elixir-ls": {"ex", "exs"},
- "zls": {"zig"},
+ if str, ok := os.LookupEnv("CRUSH_DISABLE_PROVIDER_AUTO_UPDATE"); ok {
+ c.Options.DisableProviderAutoUpdate, _ = strconv.ParseBool(str)
+ }
}
-// applyDefaultLSPFileTypes sets default file types for known LSP servers
-func applyDefaultLSPFileTypes(lspConfigs map[string]LSPConfig) {
- for name, config := range lspConfigs {
- if len(config.FileTypes) != 0 {
- continue
+// applyLSPDefaults applies default values from powernap to LSP configurations
+func (c *Config) applyLSPDefaults() {
+ // Get powernap's default configuration
+ configManager := powernapConfig.NewManager()
+ configManager.LoadDefaults()
+
+ // Apply defaults to each LSP configuration
+ for name, cfg := range c.LSP {
+ // Try to get defaults from powernap based on name or command name.
+ base, ok := configManager.GetServer(name)
+ if !ok {
+ base, ok = configManager.GetServer(cfg.Command)
+ if !ok {
+ continue
+ }
+ }
+ if cfg.Options == nil {
+ cfg.Options = base.Settings
+ }
+ if cfg.InitOptions == nil {
+ cfg.InitOptions = base.InitOptions
+ }
+ if len(cfg.FileTypes) == 0 {
+ cfg.FileTypes = base.FileTypes
+ }
+ if len(cfg.RootMarkers) == 0 {
+ cfg.RootMarkers = base.RootMarkers
}
- bin := strings.ToLower(filepath.Base(config.Command))
- config.FileTypes = defaultLSPFileTypes[bin]
- lspConfigs[name] = config
+ if cfg.Command == "" {
+ cfg.Command = base.Command
+ }
+ if len(cfg.Args) == 0 {
+ cfg.Args = base.Args
+ }
+ if len(cfg.Env) == 0 {
+ cfg.Env = base.Environment
+ }
+ // Update the config in the map
+ c.LSP[name] = cfg
}
}
func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (largeModel SelectedModel, smallModel SelectedModel, err error) {
if len(knownProviders) == 0 && c.Providers.Len() == 0 {
err = fmt.Errorf("no providers configured, please configure at least one provider")
- return
+ return largeModel, smallModel, err
}
// Use the first provider enabled based on the known providers order
@@ -390,7 +408,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
defaultLargeModel := c.GetModel(string(p.ID), p.DefaultLargeModelID)
if defaultLargeModel == nil {
err = fmt.Errorf("default large model %s not found for provider %s", p.DefaultLargeModelID, p.ID)
- return
+ return largeModel, smallModel, err
}
largeModel = SelectedModel{
Provider: string(p.ID),
@@ -402,7 +420,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
defaultSmallModel := c.GetModel(string(p.ID), p.DefaultSmallModelID)
if defaultSmallModel == nil {
err = fmt.Errorf("default small model %s not found for provider %s", p.DefaultSmallModelID, p.ID)
- return
+ return largeModel, smallModel, err
}
smallModel = SelectedModel{
Provider: string(p.ID),
@@ -410,7 +428,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
MaxTokens: defaultSmallModel.DefaultMaxTokens,
ReasoningEffort: defaultSmallModel.DefaultReasoningEffort,
}
- return
+ return largeModel, smallModel, err
}
enabledProviders := c.EnabledProviders()
@@ -420,13 +438,13 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
if len(enabledProviders) == 0 {
err = fmt.Errorf("no providers configured, please configure at least one provider")
- return
+ return largeModel, smallModel, err
}
providerConfig := enabledProviders[0]
if len(providerConfig.Models) == 0 {
err = fmt.Errorf("provider %s has no models configured", providerConfig.ID)
- return
+ return largeModel, smallModel, err
}
defaultLargeModel := c.GetModel(providerConfig.ID, providerConfig.Models[0].ID)
largeModel = SelectedModel{
@@ -440,7 +458,7 @@ func (c *Config) defaultModelSelection(knownProviders []catwalk.Provider) (large
Model: defaultSmallModel.ID,
MaxTokens: defaultSmallModel.DefaultMaxTokens,
}
- return
+ return largeModel, smallModel, err
}
func (c *Config) configureSelectedModels(knownProviders []catwalk.Provider) error {
@@ -510,6 +528,28 @@ func (c *Config) configureSelectedModels(knownProviders []catwalk.Provider) erro
return nil
}
+// lookupConfigs searches config files recursively from CWD up to FS root
+func lookupConfigs(cwd string) []string {
+ // prepend default config paths
+ configPaths := []string{
+ GlobalConfig(),
+ GlobalConfigData(),
+ }
+
+ configNames := []string{appName + ".json", "." + appName + ".json"}
+
+ foundConfigs, err := fsext.Lookup(cwd, configNames...)
+ if err != nil {
+ // returns at least default configs
+ return configPaths
+ }
+
+ // reverse order so last config has more priority
+ slices.Reverse(foundConfigs)
+
+ return append(configPaths, foundConfigs...)
+}
+
func loadFromConfigPaths(configPaths []string) (*Config, error) {
var configs []io.Reader
@@ -568,7 +608,8 @@ func hasAWSCredentials(env env.Env) bool {
return false
}
-func globalConfig() string {
+// GlobalConfig returns the global configuration file path for the application.
+func GlobalConfig() string {
xdgConfigHome := os.Getenv("XDG_CONFIG_HOME")
if xdgConfigHome != "" {
return filepath.Join(xdgConfigHome, appName, fmt.Sprintf("%s.json", appName))
@@ -609,3 +650,18 @@ func GlobalConfigData() string {
return filepath.Join(home.Dir(), ".local", "share", appName, fmt.Sprintf("%s.json", appName))
}
+
+func assignIfNil[T any](ptr **T, val T) {
+ if *ptr == nil {
+ *ptr = &val
+ }
+}
+
+func isInsideWorktree() bool {
+ bts, err := exec.CommandContext(
+ context.Background(),
+ "git", "rev-parse",
+ "--is-inside-work-tree",
+ ).CombinedOutput()
+ return err == nil && strings.TrimSpace(string(bts)) == "true"
+}
diff --git a/internal/config/load_test.go b/internal/config/load_test.go
index e0ce94f3995fb64cc8f66348723a4e6c62a0ea2b..406fe07d523c8b0d5d7f038f8d94cc74a0b58f89 100644
--- a/internal/config/load_test.go
+++ b/internal/config/load_test.go
@@ -485,13 +485,36 @@ func TestConfig_setupAgentsWithDisabledTools(t *testing.T) {
cfg.SetupAgents()
coderAgent, ok := cfg.Agents["coder"]
require.True(t, ok)
- assert.Equal(t, []string{"bash", "multiedit", "fetch", "glob", "ls", "sourcegraph", "view", "write"}, coderAgent.AllowedTools)
+ assert.Equal(t, []string{"agent", "bash", "multiedit", "fetch", "glob", "ls", "sourcegraph", "view", "write"}, coderAgent.AllowedTools)
taskAgent, ok := cfg.Agents["task"]
require.True(t, ok)
assert.Equal(t, []string{"glob", "ls", "sourcegraph", "view"}, taskAgent.AllowedTools)
}
+func TestConfig_setupAgentsWithEveryReadOnlyToolDisabled(t *testing.T) {
+ cfg := &Config{
+ Options: &Options{
+ DisabledTools: []string{
+ "glob",
+ "grep",
+ "ls",
+ "sourcegraph",
+ "view",
+ },
+ },
+ }
+
+ cfg.SetupAgents()
+ coderAgent, ok := cfg.Agents["coder"]
+ require.True(t, ok)
+ assert.Equal(t, []string{"agent", "bash", "download", "edit", "multiedit", "fetch", "write"}, coderAgent.AllowedTools)
+
+ taskAgent, ok := cfg.Agents["task"]
+ require.True(t, ok)
+ assert.Equal(t, []string{}, taskAgent.AllowedTools)
+}
+
func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
knownProviders := []catwalk.Provider{
{
@@ -520,10 +543,10 @@ func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
err := cfg.configureProviders(env, resolver, knownProviders)
require.NoError(t, err)
- // Provider should be removed from config when disabled
- require.Equal(t, cfg.Providers.Len(), 0)
- _, exists := cfg.Providers.Get("openai")
- require.False(t, exists)
+ require.Equal(t, cfg.Providers.Len(), 1)
+ prov, exists := cfg.Providers.Get("openai")
+ require.True(t, exists)
+ require.True(t, prov.Disable)
}
func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
diff --git a/internal/config/lsp_defaults_test.go b/internal/config/lsp_defaults_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..f04666597803f390fe25699a3a8bf9aba44b68be
--- /dev/null
+++ b/internal/config/lsp_defaults_test.go
@@ -0,0 +1,35 @@
+package config
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestApplyLSPDefaults(t *testing.T) {
+ t.Parallel()
+
+ // Create a config with an LSP that should get defaults
+ config := &Config{
+ LSP: map[string]LSPConfig{
+ "gopls": {
+ Command: "gopls", // This should get defaults from powernap
+ },
+ "custom": {
+ Command: "custom-lsp",
+ RootMarkers: []string{"custom.toml"}, // This should keep its explicit config
+ },
+ },
+ }
+
+ // Apply defaults
+ config.applyLSPDefaults()
+
+ // Check that gopls got defaults (it should have some root markers now)
+ goplsConfig := config.LSP["gopls"]
+ require.NotEmpty(t, goplsConfig.RootMarkers, "gopls should have received default root markers")
+
+ // Check that custom LSP kept its explicit config
+ customConfig := config.LSP["custom"]
+ require.Equal(t, []string{"custom.toml"}, customConfig.RootMarkers, "custom LSP should keep its explicit root markers")
+}
diff --git a/internal/config/merge_test.go b/internal/config/merge_test.go
index a00eb992a3edf97beb534353b4f0768c2b53a6d8..1b721bf2e8e4b4596025c2c773bec0093778f430 100644
--- a/internal/config/merge_test.go
+++ b/internal/config/merge_test.go
@@ -15,7 +15,7 @@ func TestMerge(t *testing.T) {
t.Fatalf("expected no error, got %v", err)
}
- expected := `{"baz":"qux","foo":"bar"}`
+ expected := `{"foo":"bar","baz":"qux"}`
got, err := io.ReadAll(merged)
if err != nil {
t.Fatalf("expected no error reading merged data, got %v", err)
diff --git a/internal/config/provider.go b/internal/config/provider.go
index 68ede5095506b21dc4d744e309aaa836917345e5..671c348f71da3a79f65c14c624bdaf2adc011411 100644
--- a/internal/config/provider.go
+++ b/internal/config/provider.go
@@ -8,10 +8,12 @@ import (
"os"
"path/filepath"
"runtime"
+ "strings"
"sync"
"time"
"github.com/charmbracelet/catwalk/pkg/catwalk"
+ "github.com/charmbracelet/catwalk/pkg/embedded"
"github.com/charmbracelet/crush/internal/home"
)
@@ -22,6 +24,7 @@ type ProviderClient interface {
var (
providerOnce sync.Once
providerList []catwalk.Provider
+ providerErr error
)
// file to cache provider data
@@ -75,55 +78,128 @@ func loadProvidersFromCache(path string) ([]catwalk.Provider, error) {
return providers, nil
}
-func Providers() ([]catwalk.Provider, error) {
- catwalkURL := cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL)
- client := catwalk.NewWithURL(catwalkURL)
- path := providerCacheFileData()
- return loadProvidersOnce(client, path)
+func UpdateProviders(pathOrUrl string) error {
+ var providers []catwalk.Provider
+ pathOrUrl = cmp.Or(pathOrUrl, os.Getenv("CATWALK_URL"), defaultCatwalkURL)
+
+ switch {
+ case pathOrUrl == "embedded":
+ providers = embedded.GetAll()
+ case strings.HasPrefix(pathOrUrl, "http://") || strings.HasPrefix(pathOrUrl, "https://"):
+ var err error
+ providers, err = catwalk.NewWithURL(pathOrUrl).GetProviders()
+ if err != nil {
+ return fmt.Errorf("failed to fetch providers from Catwalk: %w", err)
+ }
+ default:
+ content, err := os.ReadFile(pathOrUrl)
+ if err != nil {
+ return fmt.Errorf("failed to read file: %w", err)
+ }
+ if err := json.Unmarshal(content, &providers); err != nil {
+ return fmt.Errorf("failed to unmarshal provider data: %w", err)
+ }
+ if len(providers) == 0 {
+ return fmt.Errorf("no providers found in the provided source")
+ }
+ }
+
+ cachePath := providerCacheFileData()
+ if err := saveProvidersInCache(cachePath, providers); err != nil {
+ return fmt.Errorf("failed to save providers to cache: %w", err)
+ }
+
+ slog.Info("Providers updated successfully", "count", len(providers), "from", pathOrUrl, "to", cachePath)
+ return nil
}
-func loadProvidersOnce(client ProviderClient, path string) ([]catwalk.Provider, error) {
- var err error
+func Providers(cfg *Config) ([]catwalk.Provider, error) {
providerOnce.Do(func() {
- providerList, err = loadProviders(client, path)
+ catwalkURL := cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL)
+ client := catwalk.NewWithURL(catwalkURL)
+ path := providerCacheFileData()
+
+ autoUpdateDisabled := cfg.Options.DisableProviderAutoUpdate
+ providerList, providerErr = loadProviders(autoUpdateDisabled, client, path)
})
- if err != nil {
- return nil, err
- }
- return providerList, nil
+ return providerList, providerErr
}
-func loadProviders(client ProviderClient, path string) (providerList []catwalk.Provider, err error) {
- // if cache is not stale, load from it
- stale, exists := isCacheStale(path)
- if !stale {
- slog.Info("Using cached provider data", "path", path)
- providerList, err = loadProvidersFromCache(path)
- if len(providerList) > 0 && err == nil {
- go func() {
- slog.Info("Updating provider cache in background", "path", path)
- updated, uerr := client.GetProviders()
- if len(updated) > 0 && uerr == nil {
- _ = saveProvidersInCache(path, updated)
- }
- }()
- return
+func loadProviders(autoUpdateDisabled bool, client ProviderClient, path string) ([]catwalk.Provider, error) {
+ cacheIsStale, cacheExists := isCacheStale(path)
+
+ catwalkGetAndSave := func() ([]catwalk.Provider, error) {
+ providers, err := client.GetProviders()
+ if err != nil {
+ return nil, fmt.Errorf("failed to fetch providers from catwalk: %w", err)
}
+ if len(providers) == 0 {
+ return nil, fmt.Errorf("empty providers list from catwalk")
+ }
+ if err := saveProvidersInCache(path, providers); err != nil {
+ return nil, err
+ }
+ return providers, nil
}
- slog.Info("Getting live provider data", "path", path)
- providerList, err = client.GetProviders()
- if len(providerList) > 0 && err == nil {
- err = saveProvidersInCache(path, providerList)
- return
+ backgroundCacheUpdate := func() {
+ go func() {
+ slog.Info("Updating providers cache in background", "path", path)
+
+ providers, err := client.GetProviders()
+ if err != nil {
+ slog.Error("Failed to fetch providers in background from Catwalk", "error", err)
+ return
+ }
+ if len(providers) == 0 {
+ slog.Error("Empty providers list from Catwalk")
+ return
+ }
+ if err := saveProvidersInCache(path, providers); err != nil {
+ slog.Error("Failed to update providers.json in background", "error", err)
+ }
+ }()
}
- if !exists {
- err = fmt.Errorf("failed to load providers")
- return
+
+ switch {
+ case autoUpdateDisabled:
+ slog.Warn("Providers auto-update is disabled")
+
+ if cacheExists {
+ slog.Warn("Using locally cached providers")
+ return loadProvidersFromCache(path)
+ }
+
+ slog.Warn("Saving embedded providers to cache")
+ providers := embedded.GetAll()
+ if err := saveProvidersInCache(path, providers); err != nil {
+ return nil, err
+ }
+ return providers, nil
+
+ case cacheExists && !cacheIsStale:
+ slog.Info("Recent providers cache is available.", "path", path)
+
+ providers, err := loadProvidersFromCache(path)
+ if err != nil {
+ return nil, err
+ }
+ if len(providers) == 0 {
+ return catwalkGetAndSave()
+ }
+ backgroundCacheUpdate()
+ return providers, nil
+
+ default:
+ slog.Info("Cache is not available or is stale. Fetching providers from Catwalk.", "path", path)
+
+ providers, err := catwalkGetAndSave()
+ if err != nil {
+ catwalkUrl := fmt.Sprintf("%s/providers", cmp.Or(os.Getenv("CATWALK_URL"), defaultCatwalkURL))
+ return nil, fmt.Errorf("Crush was unable to fetch an updated list of providers from %s. Consider setting CRUSH_DISABLE_PROVIDER_AUTO_UPDATE=1 to use the embedded providers bundled at the time of this Crush release. You can also update providers manually. For more info see crush update-providers --help. %w", catwalkUrl, err) //nolint:staticcheck
+ }
+ return providers, nil
}
- slog.Info("Loading provider data from cache", "path", path)
- providerList, err = loadProvidersFromCache(path)
- return
}
func isCacheStale(path string) (stale, exists bool) {
diff --git a/internal/config/provider_empty_test.go b/internal/config/provider_empty_test.go
index cb71cabfa5a01cb16b6ef2b6708d1780e31951a9..f3691c320ad4e3509b327374c8ce7f5285c39590 100644
--- a/internal/config/provider_empty_test.go
+++ b/internal/config/provider_empty_test.go
@@ -19,8 +19,8 @@ func TestProvider_loadProvidersEmptyResult(t *testing.T) {
client := &emptyProviderClient{}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
- require.EqualError(t, err, "failed to load providers")
+ providers, err := loadProviders(false, client, tmpPath)
+ require.Contains(t, err.Error(), "Crush was unable to fetch an updated list of providers")
require.Empty(t, providers)
require.Len(t, providers, 0)
@@ -39,7 +39,7 @@ func TestProvider_loadProvidersEmptyCache(t *testing.T) {
require.NoError(t, os.WriteFile(tmpPath, data, 0o644))
// Should refresh and get real providers instead of using empty cache
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
diff --git a/internal/config/provider_test.go b/internal/config/provider_test.go
index ed2568d68a840798872af60c5132707e84a5cbbf..8b499919bca666915a89d38c1e5014a911f4d2d1 100644
--- a/internal/config/provider_test.go
+++ b/internal/config/provider_test.go
@@ -28,7 +28,7 @@ func (m *mockProviderClient) GetProviders() ([]catwalk.Provider, error) {
func TestProvider_loadProvidersNoIssues(t *testing.T) {
client := &mockProviderClient{shouldFail: false}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
@@ -57,7 +57,7 @@ func TestProvider_loadProvidersWithIssues(t *testing.T) {
if err != nil {
t.Fatalf("Failed to write old providers to file: %v", err)
}
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.NoError(t, err)
require.NotNil(t, providers)
require.Len(t, providers, 1)
@@ -67,7 +67,7 @@ func TestProvider_loadProvidersWithIssues(t *testing.T) {
func TestProvider_loadProvidersWithIssuesAndNoCache(t *testing.T) {
client := &mockProviderClient{shouldFail: true}
tmpPath := t.TempDir() + "/providers.json"
- providers, err := loadProviders(client, tmpPath)
+ providers, err := loadProviders(false, client, tmpPath)
require.Error(t, err)
require.Nil(t, providers, "Expected nil providers when loading fails and no cache exists")
}
diff --git a/internal/csync/maps.go b/internal/csync/maps.go
index b7a1f3109f6c15e7e5592cb538943a2d9e340819..1fd2005790014b2ce4bd5a78dbb7931d54cbe66c 100644
--- a/internal/csync/maps.go
+++ b/internal/csync/maps.go
@@ -27,6 +27,25 @@ func NewMapFrom[K comparable, V any](m map[K]V) *Map[K, V] {
}
}
+// NewLazyMap creates a new lazy-loaded map. The provided load function is
+// executed in a separate goroutine to populate the map.
+func NewLazyMap[K comparable, V any](load func() map[K]V) *Map[K, V] {
+ m := &Map[K, V]{}
+ m.mu.Lock()
+ go func() {
+ m.inner = load()
+ m.mu.Unlock()
+ }()
+ return m
+}
+
+// Reset replaces the inner map with the new one.
+func (m *Map[K, V]) Reset(input map[K]V) {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+ m.inner = input
+}
+
// Set sets the value for the specified key in the map.
func (m *Map[K, V]) Set(key K, value V) {
m.mu.Lock()
diff --git a/internal/csync/maps_test.go b/internal/csync/maps_test.go
index 4a8019260a2610b7f5ae0d854029207c6b945d04..4c590f008dad91e8dcbc40d1b90d87ef1b3e5750 100644
--- a/internal/csync/maps_test.go
+++ b/internal/csync/maps_test.go
@@ -5,6 +5,8 @@ import (
"maps"
"sync"
"testing"
+ "testing/synctest"
+ "time"
"github.com/stretchr/testify/require"
)
@@ -36,6 +38,56 @@ func TestNewMapFrom(t *testing.T) {
require.Equal(t, 1, value)
}
+func TestNewLazyMap(t *testing.T) {
+ t.Parallel()
+
+ synctest.Test(t, func(t *testing.T) {
+ t.Helper()
+
+ waiter := sync.Mutex{}
+ waiter.Lock()
+ loadCalled := false
+
+ loadFunc := func() map[string]int {
+ waiter.Lock()
+ defer waiter.Unlock()
+ loadCalled = true
+ return map[string]int{
+ "key1": 1,
+ "key2": 2,
+ }
+ }
+
+ m := NewLazyMap(loadFunc)
+ require.NotNil(t, m)
+
+ waiter.Unlock() // Allow the load function to proceed
+ time.Sleep(100 * time.Millisecond)
+ require.True(t, loadCalled)
+ require.Equal(t, 2, m.Len())
+
+ value, ok := m.Get("key1")
+ require.True(t, ok)
+ require.Equal(t, 1, value)
+ })
+}
+
+func TestMap_Reset(t *testing.T) {
+ t.Parallel()
+
+ m := NewMapFrom(map[string]int{
+ "a": 10,
+ })
+
+ m.Reset(map[string]int{
+ "b": 20,
+ })
+ value, ok := m.Get("b")
+ require.True(t, ok)
+ require.Equal(t, 20, value)
+ require.Equal(t, 1, m.Len())
+}
+
func TestMap_Set(t *testing.T) {
t.Parallel()
diff --git a/internal/csync/versionedmap.go b/internal/csync/versionedmap.go
new file mode 100644
index 0000000000000000000000000000000000000000..f0f4e0249c3b0102976840bd82400e18c1703c47
--- /dev/null
+++ b/internal/csync/versionedmap.go
@@ -0,0 +1,51 @@
+package csync
+
+import (
+ "iter"
+ "sync/atomic"
+)
+
+// NewVersionedMap creates a new versioned, thread-safe map.
+func NewVersionedMap[K comparable, V any]() *VersionedMap[K, V] {
+ return &VersionedMap[K, V]{
+ m: NewMap[K, V](),
+ }
+}
+
+// VersionedMap is a thread-safe map that keeps track of its version.
+type VersionedMap[K comparable, V any] struct {
+ m *Map[K, V]
+ v atomic.Uint64
+}
+
+// Get gets the value for the specified key from the map.
+func (m *VersionedMap[K, V]) Get(key K) (V, bool) {
+ return m.m.Get(key)
+}
+
+// Set sets the value for the specified key in the map and increments the version.
+func (m *VersionedMap[K, V]) Set(key K, value V) {
+ m.m.Set(key, value)
+ m.v.Add(1)
+}
+
+// Del deletes the specified key from the map and increments the version.
+func (m *VersionedMap[K, V]) Del(key K) {
+ m.m.Del(key)
+ m.v.Add(1)
+}
+
+// Seq2 returns an iter.Seq2 that yields key-value pairs from the map.
+func (m *VersionedMap[K, V]) Seq2() iter.Seq2[K, V] {
+ return m.m.Seq2()
+}
+
+// Len returns the number of items in the map.
+func (m *VersionedMap[K, V]) Len() int {
+ return m.m.Len()
+}
+
+// Version returns the current version of the map.
+func (m *VersionedMap[K, V]) Version() uint64 {
+ return m.v.Load()
+}
diff --git a/internal/csync/versionedmap_test.go b/internal/csync/versionedmap_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..2c32004c5f269b7518999f95be23db95d7b6ec15
--- /dev/null
+++ b/internal/csync/versionedmap_test.go
@@ -0,0 +1,89 @@
+package csync
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestVersionedMap_Set(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ require.Equal(t, uint64(0), vm.Version())
+
+ vm.Set("key1", 42)
+ require.Equal(t, uint64(1), vm.Version())
+
+ value, ok := vm.Get("key1")
+ require.True(t, ok)
+ require.Equal(t, 42, value)
+}
+
+func TestVersionedMap_Del(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ vm.Set("key1", 42)
+ initialVersion := vm.Version()
+
+ vm.Del("key1")
+ require.Equal(t, initialVersion+1, vm.Version())
+
+ _, ok := vm.Get("key1")
+ require.False(t, ok)
+}
+
+func TestVersionedMap_VersionIncrement(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[string, int]()
+ initialVersion := vm.Version()
+
+ // Setting a value should increment the version
+ vm.Set("key1", 42)
+ require.Equal(t, initialVersion+1, vm.Version())
+
+ // Deleting a value should increment the version
+ vm.Del("key1")
+ require.Equal(t, initialVersion+2, vm.Version())
+
+ // Deleting a non-existent key should still increment the version
+ vm.Del("nonexistent")
+ require.Equal(t, initialVersion+3, vm.Version())
+}
+
+func TestVersionedMap_ConcurrentAccess(t *testing.T) {
+ t.Parallel()
+
+ vm := NewVersionedMap[int, int]()
+ const numGoroutines = 100
+ const numOperations = 100
+
+ // Initial version
+ initialVersion := vm.Version()
+
+ // Perform concurrent Set and Del operations
+ for i := range numGoroutines {
+ go func(id int) {
+ for j := range numOperations {
+ key := id*numOperations + j
+ vm.Set(key, key*2)
+ vm.Del(key)
+ }
+ }(i)
+ }
+
+ // Wait for operations to complete by checking the version
+ // This is a simplified check - in a real test you might want to use sync.WaitGroup
+ expectedMinVersion := initialVersion + uint64(numGoroutines*numOperations*2)
+
+ // Allow some time for operations to complete
+ for vm.Version() < expectedMinVersion {
+ // Busy wait - in a real test you'd use proper synchronization
+ }
+
+ // Final version should be at least the expected minimum
+ require.GreaterOrEqual(t, vm.Version(), expectedMinVersion)
+ require.Equal(t, 0, vm.Len())
+}
diff --git a/internal/event/all.go b/internal/event/all.go
new file mode 100644
index 0000000000000000000000000000000000000000..8caf98e62ff3f39b291e341959ebc943361eec05
--- /dev/null
+++ b/internal/event/all.go
@@ -0,0 +1,59 @@
+package event
+
+import (
+ "time"
+)
+
+var appStartTime time.Time
+
+func AppInitialized() {
+ appStartTime = time.Now()
+ send("app initialized")
+}
+
+func AppExited() {
+ duration := time.Since(appStartTime).Truncate(time.Second)
+ send(
+ "app exited",
+ "app duration pretty", duration.String(),
+ "app duration in seconds", int64(duration.Seconds()),
+ )
+ Flush()
+}
+
+func SessionCreated() {
+ send("session created")
+}
+
+func SessionDeleted() {
+ send("session deleted")
+}
+
+func SessionSwitched() {
+ send("session switched")
+}
+
+func FilePickerOpened() {
+ send("filepicker opened")
+}
+
+func PromptSent(props ...any) {
+ send(
+ "prompt sent",
+ props...,
+ )
+}
+
+func PromptResponded(props ...any) {
+ send(
+ "prompt responded",
+ props...,
+ )
+}
+
+func TokensUsed(props ...any) {
+ send(
+ "tokens used",
+ props...,
+ )
+}
diff --git a/internal/event/event.go b/internal/event/event.go
new file mode 100644
index 0000000000000000000000000000000000000000..ca02c6d89d67be1756b166aea152da165b2712c9
--- /dev/null
+++ b/internal/event/event.go
@@ -0,0 +1,107 @@
+package event
+
+import (
+ "fmt"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+
+ "github.com/charmbracelet/crush/internal/version"
+ "github.com/posthog/posthog-go"
+)
+
+const (
+ endpoint = "https://data.charm.land"
+ key = "phc_4zt4VgDWLqbYnJYEwLRxFoaTL2noNrQij0C6E8k3I0V"
+)
+
+var (
+ client posthog.Client
+
+ baseProps = posthog.NewProperties().
+ Set("GOOS", runtime.GOOS).
+ Set("GOARCH", runtime.GOARCH).
+ Set("TERM", os.Getenv("TERM")).
+ Set("SHELL", filepath.Base(os.Getenv("SHELL"))).
+ Set("Version", version.Version).
+ Set("GoVersion", runtime.Version())
+)
+
+func Init() {
+ c, err := posthog.NewWithConfig(key, posthog.Config{
+ Endpoint: endpoint,
+ Logger: logger{},
+ })
+ if err != nil {
+ slog.Error("Failed to initialize PostHog client", "error", err)
+ }
+ client = c
+ distinctId = getDistinctId()
+}
+
+// send logs an event to PostHog with the given event name and properties.
+func send(event string, props ...any) {
+ if client == nil {
+ return
+ }
+ err := client.Enqueue(posthog.Capture{
+ DistinctId: distinctId,
+ Event: event,
+ Properties: pairsToProps(props...).Merge(baseProps),
+ })
+ if err != nil {
+ slog.Error("Failed to enqueue PostHog event", "event", event, "props", props, "error", err)
+ return
+ }
+}
+
+// Error logs an error event to PostHog with the error type and message.
+func Error(err any, props ...any) {
+ if client == nil {
+ return
+ }
+ // The PostHog Go client does not yet support sending exceptions.
+ // We're mimicking the behavior by sending the minimal info required
+ // for PostHog to recognize this as an exception event.
+ props = append(
+ []any{
+ "$exception_list",
+ []map[string]string{
+ {"type": reflect.TypeOf(err).String(), "value": fmt.Sprintf("%v", err)},
+ },
+ },
+ props...,
+ )
+ send("$exception", props...)
+}
+
+func Flush() {
+ if client == nil {
+ return
+ }
+ if err := client.Close(); err != nil {
+ slog.Error("Failed to flush PostHog events", "error", err)
+ }
+}
+
+func pairsToProps(props ...any) posthog.Properties {
+ p := posthog.NewProperties()
+
+ if !isEven(len(props)) {
+ slog.Error("Event properties must be provided as key-value pairs", "props", props)
+ return p
+ }
+
+ for i := 0; i < len(props); i += 2 {
+ key := props[i].(string)
+ value := props[i+1]
+ p = p.Set(key, value)
+ }
+ return p
+}
+
+func isEven(n int) bool {
+ return n%2 == 0
+}
diff --git a/internal/event/identifier.go b/internal/event/identifier.go
new file mode 100644
index 0000000000000000000000000000000000000000..ee05f8f58f6dd9a8f662e94992983ce26a94d9b9
--- /dev/null
+++ b/internal/event/identifier.go
@@ -0,0 +1,49 @@
+package event
+
+import (
+ "crypto/hmac"
+ "crypto/sha256"
+ "encoding/hex"
+ "fmt"
+ "net"
+
+ "github.com/denisbrodbeck/machineid"
+)
+
+var distinctId string
+
+const (
+ hashKey = "charm"
+ fallbackId = "unknown"
+)
+
+func getDistinctId() string {
+ if id, err := machineid.ProtectedID(hashKey); err == nil {
+ return id
+ }
+ if macAddr, err := getMacAddr(); err == nil {
+ return hashString(macAddr)
+ }
+ return fallbackId
+}
+
+func getMacAddr() (string, error) {
+ interfaces, err := net.Interfaces()
+ if err != nil {
+ return "", err
+ }
+ for _, iface := range interfaces {
+ if iface.Flags&net.FlagUp != 0 && iface.Flags&net.FlagLoopback == 0 && len(iface.HardwareAddr) > 0 {
+ if addrs, err := iface.Addrs(); err == nil && len(addrs) > 0 {
+ return iface.HardwareAddr.String(), nil
+ }
+ }
+ }
+ return "", fmt.Errorf("no active interface with mac address found")
+}
+
+func hashString(str string) string {
+ hash := hmac.New(sha256.New, []byte(str))
+ hash.Write([]byte(hashKey))
+ return hex.EncodeToString(hash.Sum(nil))
+}
diff --git a/internal/event/logger.go b/internal/event/logger.go
new file mode 100644
index 0000000000000000000000000000000000000000..7581676b018f5ac6001827db851a132792d21985
--- /dev/null
+++ b/internal/event/logger.go
@@ -0,0 +1,28 @@
+package event
+
+import (
+ "fmt"
+ "log/slog"
+
+ "github.com/posthog/posthog-go"
+)
+
+var _ posthog.Logger = logger{}
+
+type logger struct{}
+
+func (logger) Debugf(format string, args ...any) {
+ slog.Debug(fmt.Sprintf(format, args...))
+}
+
+func (logger) Logf(format string, args ...any) {
+ slog.Info(fmt.Sprintf(format, args...))
+}
+
+func (logger) Warnf(format string, args ...any) {
+ slog.Warn(fmt.Sprintf(format, args...))
+}
+
+func (logger) Errorf(format string, args ...any) {
+ slog.Error(fmt.Sprintf(format, args...))
+}
diff --git a/internal/fsext/fileutil.go b/internal/fsext/fileutil.go
index e83cfc915219320f34cd4f813ac253be6b2c5053..182b145a609311d20544d399c1212097c7519dda 100644
--- a/internal/fsext/fileutil.go
+++ b/internal/fsext/fileutil.go
@@ -1,15 +1,17 @@
package fsext
import (
+ "errors"
"fmt"
"os"
"path/filepath"
- "sort"
+ "slices"
"strings"
"time"
"github.com/bmatcuk/doublestar/v4"
"github.com/charlievieth/fastwalk"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/home"
)
@@ -75,11 +77,14 @@ func (w *FastGlobWalker) ShouldSkip(path string) bool {
}
func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool, error) {
+ // Normalize pattern to forward slashes on Windows so their config can use
+ // backslashes
+ pattern = filepath.ToSlash(pattern)
+
walker := NewFastGlobWalker(searchPath)
- var matches []FileInfo
+ found := csync.NewSlice[FileInfo]()
conf := fastwalk.Config{
- Follow: true,
- // Use forward slashes when running a Windows binary under WSL or MSYS
+ Follow: true,
ToSlash: fastwalk.DefaultToSlash(),
Sort: fastwalk.SortFilesFirst,
}
@@ -92,19 +97,21 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
if walker.ShouldSkip(path) {
return filepath.SkipDir
}
- return nil
}
if walker.ShouldSkip(path) {
return nil
}
- // Check if path matches the pattern
relPath, err := filepath.Rel(searchPath, path)
if err != nil {
relPath = path
}
+ // Normalize separators to forward slashes
+ relPath = filepath.ToSlash(relPath)
+
+ // Check if path matches the pattern
matched, err := doublestar.Match(pattern, relPath)
if err != nil || !matched {
return nil
@@ -115,31 +122,26 @@ func GlobWithDoubleStar(pattern, searchPath string, limit int) ([]string, bool,
return nil
}
- matches = append(matches, FileInfo{Path: path, ModTime: info.ModTime()})
- if limit > 0 && len(matches) >= limit*2 {
+ found.Append(FileInfo{Path: path, ModTime: info.ModTime()})
+ if limit > 0 && found.Len() >= limit*2 { // NOTE: why x2?
return filepath.SkipAll
}
return nil
})
- if err != nil {
+ if err != nil && !errors.Is(err, filepath.SkipAll) {
return nil, false, fmt.Errorf("fastwalk error: %w", err)
}
- sort.Slice(matches, func(i, j int) bool {
- return matches[i].ModTime.After(matches[j].ModTime)
+ matches := slices.SortedFunc(found.Seq(), func(a, b FileInfo) int {
+ return b.ModTime.Compare(a.ModTime)
})
-
- truncated := false
- if limit > 0 && len(matches) > limit {
- matches = matches[:limit]
- truncated = true
- }
+ matches, truncated := truncate(matches, limit)
results := make([]string, len(matches))
for i, m := range matches {
results[i] = m.Path
}
- return results, truncated, nil
+ return results, truncated || errors.Is(err, filepath.SkipAll), nil
}
// ShouldExcludeFile checks if a file should be excluded from processing
@@ -149,36 +151,6 @@ func ShouldExcludeFile(rootPath, filePath string) bool {
shouldIgnore(filePath, nil)
}
-// WalkDirectories walks a directory tree and calls the provided function for each directory,
-// respecting hierarchical .gitignore/.crushignore files like git does.
-func WalkDirectories(rootPath string, fn func(path string, d os.DirEntry, err error) error) error {
- dl := NewDirectoryLister(rootPath)
-
- conf := fastwalk.Config{
- Follow: true,
- ToSlash: fastwalk.DefaultToSlash(),
- Sort: fastwalk.SortDirsFirst,
- }
-
- return fastwalk.Walk(&conf, rootPath, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return fn(path, d, err)
- }
-
- // Only process directories
- if !d.IsDir() {
- return nil
- }
-
- // Check if directory should be ignored
- if dl.shouldIgnore(path, nil) {
- return filepath.SkipDir
- }
-
- return fn(path, d, err)
- })
-}
-
func PrettyPath(path string) string {
return home.Short(path)
}
@@ -242,3 +214,10 @@ func ToWindowsLineEndings(content string) (string, bool) {
}
return content, false
}
+
+func truncate[T any](input []T, limit int) ([]T, bool) {
+ if limit > 0 && len(input) > limit {
+ return input[:limit], true
+ }
+ return input, false
+}
diff --git a/internal/fsext/fileutil_test.go b/internal/fsext/fileutil_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..3788fe5477b082dec496275a8ac028788d55fc64
--- /dev/null
+++ b/internal/fsext/fileutil_test.go
@@ -0,0 +1,269 @@
+package fsext
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestGlobWithDoubleStar(t *testing.T) {
+ t.Run("finds files matching pattern", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ mainGo := filepath.Join(testDir, "src", "main.go")
+ utilsGo := filepath.Join(testDir, "src", "utils.go")
+ helperGo := filepath.Join(testDir, "pkg", "helper.go")
+ readmeMd := filepath.Join(testDir, "README.md")
+
+ for _, file := range []string{mainGo, utilsGo, helperGo, readmeMd} {
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test content"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/main.go", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{mainGo})
+ })
+
+ t.Run("finds directories matching pattern", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ srcDir := filepath.Join(testDir, "src")
+ pkgDir := filepath.Join(testDir, "pkg")
+ internalDir := filepath.Join(testDir, "internal")
+ cmdDir := filepath.Join(testDir, "cmd")
+ pkgFile := filepath.Join(testDir, "pkg.txt")
+
+ for _, dir := range []string{srcDir, pkgDir, internalDir, cmdDir} {
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ }
+
+ require.NoError(t, os.WriteFile(filepath.Join(srcDir, "main.go"), []byte("package main"), 0o644))
+ require.NoError(t, os.WriteFile(pkgFile, []byte("test"), 0o644))
+
+ matches, truncated, err := GlobWithDoubleStar("pkg", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, matches, []string{pkgDir})
+ })
+
+ t.Run("finds nested directories with wildcard patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ srcPkgDir := filepath.Join(testDir, "src", "pkg")
+ libPkgDir := filepath.Join(testDir, "lib", "pkg")
+ mainPkgDir := filepath.Join(testDir, "pkg")
+ otherDir := filepath.Join(testDir, "other")
+
+ for _, dir := range []string{srcPkgDir, libPkgDir, mainPkgDir, otherDir} {
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/pkg", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ var relativeMatches []string
+ for _, match := range matches {
+ rel, err := filepath.Rel(testDir, match)
+ require.NoError(t, err)
+ relativeMatches = append(relativeMatches, filepath.ToSlash(rel))
+ }
+
+ require.ElementsMatch(t, relativeMatches, []string{"pkg", "src/pkg", "lib/pkg"})
+ })
+
+ t.Run("finds directory contents with recursive patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ pkgDir := filepath.Join(testDir, "pkg")
+ pkgFile1 := filepath.Join(pkgDir, "main.go")
+ pkgFile2 := filepath.Join(pkgDir, "utils.go")
+ pkgSubdir := filepath.Join(pkgDir, "internal")
+ pkgSubfile := filepath.Join(pkgSubdir, "helper.go")
+
+ require.NoError(t, os.MkdirAll(pkgSubdir, 0o755))
+
+ for _, file := range []string{pkgFile1, pkgFile2, pkgSubfile} {
+ require.NoError(t, os.WriteFile(file, []byte("package main"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("pkg/**", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ var relativeMatches []string
+ for _, match := range matches {
+ rel, err := filepath.Rel(testDir, match)
+ require.NoError(t, err)
+ relativeMatches = append(relativeMatches, filepath.ToSlash(rel))
+ }
+
+ require.ElementsMatch(t, relativeMatches, []string{
+ "pkg",
+ "pkg/main.go",
+ "pkg/utils.go",
+ "pkg/internal",
+ "pkg/internal/helper.go",
+ })
+ })
+
+ t.Run("respects limit parameter", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ for i := range 10 {
+ file := filepath.Join(testDir, "file", fmt.Sprintf("test%d.txt", i))
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("**/*.txt", testDir, 5)
+ require.NoError(t, err)
+ require.True(t, truncated, "Expected truncation with limit")
+ require.Len(t, matches, 5, "Expected exactly 5 matches with limit")
+ })
+
+ t.Run("handles nested directory patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ file1 := filepath.Join(testDir, "a", "b", "c", "file1.txt")
+ file2 := filepath.Join(testDir, "a", "b", "file2.txt")
+ file3 := filepath.Join(testDir, "a", "file3.txt")
+ file4 := filepath.Join(testDir, "file4.txt")
+
+ for _, file := range []string{file1, file2, file3, file4} {
+ require.NoError(t, os.MkdirAll(filepath.Dir(file), 0o755))
+ require.NoError(t, os.WriteFile(file, []byte("test"), 0o644))
+ }
+
+ matches, truncated, err := GlobWithDoubleStar("a/b/c/file1.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, []string{file1}, matches)
+ })
+
+ t.Run("returns results sorted by modification time (newest first)", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ file1 := filepath.Join(testDir, "file1.txt")
+ require.NoError(t, os.WriteFile(file1, []byte("first"), 0o644))
+
+ file2 := filepath.Join(testDir, "file2.txt")
+ require.NoError(t, os.WriteFile(file2, []byte("second"), 0o644))
+
+ file3 := filepath.Join(testDir, "file3.txt")
+ require.NoError(t, os.WriteFile(file3, []byte("third"), 0o644))
+
+ base := time.Now()
+ m1 := base
+ m2 := base.Add(10 * time.Hour)
+ m3 := base.Add(20 * time.Hour)
+
+ require.NoError(t, os.Chtimes(file1, m1, m1))
+ require.NoError(t, os.Chtimes(file2, m2, m2))
+ require.NoError(t, os.Chtimes(file3, m3, m3))
+
+ matches, truncated, err := GlobWithDoubleStar("*.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+
+ require.Equal(t, []string{file3, file2, file1}, matches)
+ })
+
+ t.Run("handles empty directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ matches, truncated, err := GlobWithDoubleStar("**", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ // Even empty directories should return the directory itself
+ require.Equal(t, []string{testDir}, matches)
+ })
+
+ t.Run("handles non-existent search path", func(t *testing.T) {
+ nonExistentDir := filepath.Join(t.TempDir(), "does", "not", "exist")
+
+ matches, truncated, err := GlobWithDoubleStar("**", nonExistentDir, 0)
+ require.Error(t, err, "Should return error for non-existent search path")
+ require.False(t, truncated)
+ require.Empty(t, matches)
+ })
+
+ t.Run("respects basic ignore patterns", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ rootIgnore := filepath.Join(testDir, ".crushignore")
+
+ require.NoError(t, os.WriteFile(rootIgnore, []byte("*.tmp\nbackup/\n"), 0o644))
+
+ goodFile := filepath.Join(testDir, "good.txt")
+ require.NoError(t, os.WriteFile(goodFile, []byte("content"), 0o644))
+
+ badFile := filepath.Join(testDir, "bad.tmp")
+ require.NoError(t, os.WriteFile(badFile, []byte("temp content"), 0o644))
+
+ goodDir := filepath.Join(testDir, "src")
+ require.NoError(t, os.MkdirAll(goodDir, 0o755))
+
+ ignoredDir := filepath.Join(testDir, "backup")
+ require.NoError(t, os.MkdirAll(ignoredDir, 0o755))
+
+ ignoredFileInDir := filepath.Join(testDir, "backup", "old.txt")
+ require.NoError(t, os.WriteFile(ignoredFileInDir, []byte("old content"), 0o644))
+
+ matches, truncated, err := GlobWithDoubleStar("*.tmp", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Empty(t, matches, "Expected no matches for '*.tmp' pattern (should be ignored)")
+
+ matches, truncated, err = GlobWithDoubleStar("backup", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Empty(t, matches, "Expected no matches for 'backup' pattern (should be ignored)")
+
+ matches, truncated, err = GlobWithDoubleStar("*.txt", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Equal(t, []string{goodFile}, matches)
+ })
+
+ t.Run("handles mixed file and directory matching with sorting", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ oldestFile := filepath.Join(testDir, "old.rs")
+ require.NoError(t, os.WriteFile(oldestFile, []byte("old"), 0o644))
+
+ middleDir := filepath.Join(testDir, "mid.rs")
+ require.NoError(t, os.MkdirAll(middleDir, 0o755))
+
+ newestFile := filepath.Join(testDir, "new.rs")
+ require.NoError(t, os.WriteFile(newestFile, []byte("new"), 0o644))
+
+ base := time.Now()
+ tOldest := base
+ tMiddle := base.Add(10 * time.Hour)
+ tNewest := base.Add(20 * time.Hour)
+
+ // Reverse the expected order
+ require.NoError(t, os.Chtimes(newestFile, tOldest, tOldest))
+ require.NoError(t, os.Chtimes(middleDir, tMiddle, tMiddle))
+ require.NoError(t, os.Chtimes(oldestFile, tNewest, tNewest))
+
+ matches, truncated, err := GlobWithDoubleStar("*.rs", testDir, 0)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Len(t, matches, 3)
+
+ // Results should be sorted by mod time, but we set the oldestFile
+ // to have the most recent mod time
+ require.Equal(t, []string{oldestFile, middleDir, newestFile}, matches)
+ })
+}
diff --git a/internal/fsext/ignore_test.go b/internal/fsext/ignore_test.go
index 1b517ec0408fe69726bf4fa4bbb95c2a206e548c..a652f3a285fd256840fb3a711fb36e0217a43e28 100644
--- a/internal/fsext/ignore_test.go
+++ b/internal/fsext/ignore_test.go
@@ -9,14 +9,8 @@ import (
)
func TestCrushIgnore(t *testing.T) {
- // Create a temporary directory for testing
tempDir := t.TempDir()
-
- // Change to temp directory
- oldWd, _ := os.Getwd()
- err := os.Chdir(tempDir)
- require.NoError(t, err)
- defer os.Chdir(oldWd)
+ t.Chdir(tempDir)
// Create test files
require.NoError(t, os.WriteFile("test1.txt", []byte("test"), 0o644))
diff --git a/internal/fsext/lookup.go b/internal/fsext/lookup.go
new file mode 100644
index 0000000000000000000000000000000000000000..098426571c69521a5978a2c2e0a4178b51b0aae6
--- /dev/null
+++ b/internal/fsext/lookup.go
@@ -0,0 +1,141 @@
+package fsext
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/charmbracelet/crush/internal/home"
+)
+
+// Lookup searches for a target files or directories starting from dir
+// and walking up the directory tree until filesystem root is reached.
+// It also checks the ownership of files to ensure that the search does
+// not cross ownership boundaries. It skips ownership mismatches without
+// errors.
+// Returns full paths to fount targets.
+// The search includes the starting directory itself.
+func Lookup(dir string, targets ...string) ([]string, error) {
+ if len(targets) == 0 {
+ return nil, nil
+ }
+
+ var found []string
+
+ err := traverseUp(dir, func(cwd string, owner int) error {
+ for _, target := range targets {
+ fpath := filepath.Join(cwd, target)
+ err := probeEnt(fpath, owner)
+
+ // skip to the next file on permission denied
+ if errors.Is(err, os.ErrNotExist) ||
+ errors.Is(err, os.ErrPermission) {
+ continue
+ }
+
+ if err != nil {
+ return fmt.Errorf("error probing file %s: %w", fpath, err)
+ }
+
+ found = append(found, fpath)
+ }
+
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return found, nil
+}
+
+// LookupClosest searches for a target file or directory starting from dir
+// and walking up the directory tree until found or root or home is reached.
+// It also checks the ownership of files to ensure that the search does
+// not cross ownership boundaries.
+// Returns the full path to the target if found, empty string and false otherwise.
+// The search includes the starting directory itself.
+func LookupClosest(dir, target string) (string, bool) {
+ var found string
+
+ err := traverseUp(dir, func(cwd string, owner int) error {
+ fpath := filepath.Join(cwd, target)
+
+ err := probeEnt(fpath, owner)
+ if errors.Is(err, os.ErrNotExist) {
+ return nil
+ }
+
+ if err != nil {
+ return fmt.Errorf("error probing file %s: %w", fpath, err)
+ }
+
+ if cwd == home.Dir() {
+ return filepath.SkipAll
+ }
+
+ found = fpath
+ return filepath.SkipAll
+ })
+
+ return found, err == nil && found != ""
+}
+
+// traverseUp walks up from given directory up until filesystem root reached.
+// It passes absolute path of current directory and staring directory owner ID
+// to callback function. It is up to user to check ownership.
+func traverseUp(dir string, walkFn func(dir string, owner int) error) error {
+ cwd, err := filepath.Abs(dir)
+ if err != nil {
+ return fmt.Errorf("cannot convert CWD to absolute path: %w", err)
+ }
+
+ owner, err := Owner(dir)
+ if err != nil {
+ return fmt.Errorf("cannot get ownership: %w", err)
+ }
+
+ for {
+ err := walkFn(cwd, owner)
+ if err == nil || errors.Is(err, filepath.SkipDir) {
+ parent := filepath.Dir(cwd)
+ if parent == cwd {
+ return nil
+ }
+
+ cwd = parent
+ continue
+ }
+
+ if errors.Is(err, filepath.SkipAll) {
+ return nil
+ }
+
+ return err
+ }
+}
+
+// probeEnt checks if entity at given path exists and belongs to given owner
+func probeEnt(fspath string, owner int) error {
+ _, err := os.Stat(fspath)
+ if err != nil {
+ return fmt.Errorf("cannot stat %s: %w", fspath, err)
+ }
+
+ // special case for ownership check bypass
+ if owner == -1 {
+ return nil
+ }
+
+ fowner, err := Owner(fspath)
+ if err != nil {
+ return fmt.Errorf("cannot get ownership for %s: %w", fspath, err)
+ }
+
+ if fowner != owner {
+ return os.ErrPermission
+ }
+
+ return nil
+}
diff --git a/internal/fsext/lookup_test.go b/internal/fsext/lookup_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..97c167f37d8ebcf4d19124367955874e7f816b67
--- /dev/null
+++ b/internal/fsext/lookup_test.go
@@ -0,0 +1,437 @@
+package fsext
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/charmbracelet/crush/internal/home"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLookupClosest(t *testing.T) {
+ tempDir := t.TempDir()
+ t.Chdir(tempDir)
+
+ t.Run("target found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target file in current directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(testDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target found in parent directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in parent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(subDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target found in grandparent directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create nested subdirectories
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ subSubDir := filepath.Join(subDir, "subsubdir")
+ err = os.Mkdir(subSubDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in grandparent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(subSubDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+
+ t.Run("target not found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ foundPath, found := LookupClosest(testDir, "nonexistent.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("target directory found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target directory in current directory
+ targetDir := filepath.Join(testDir, "targetdir")
+ err := os.Mkdir(targetDir, 0o755)
+ require.NoError(t, err)
+
+ foundPath, found := LookupClosest(testDir, "targetdir")
+ require.True(t, found)
+ require.Equal(t, targetDir, foundPath)
+ })
+
+ t.Run("stops at home directory", func(t *testing.T) {
+ // This test is limited as we can't easily create files above home directory
+ // but we can test the behavior by searching from home directory itself
+ homeDir := home.Dir()
+
+ // Search for a file that doesn't exist from home directory
+ foundPath, found := LookupClosest(homeDir, "nonexistent_file_12345.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("invalid starting directory", func(t *testing.T) {
+ foundPath, found := LookupClosest("/invalid/path/that/does/not/exist", "target.txt")
+ require.False(t, found)
+ require.Empty(t, foundPath)
+ })
+
+ t.Run("relative path handling", func(t *testing.T) {
+ // Create target file in current directory
+ require.NoError(t, os.WriteFile("target.txt", []byte("test"), 0o644))
+
+ // Search using relative path
+ foundPath, found := LookupClosest(".", "target.txt")
+ require.True(t, found)
+
+ // Resolve symlinks to handle macOS /private/var vs /var discrepancy
+ expectedPath, err := filepath.EvalSymlinks(filepath.Join(tempDir, "target.txt"))
+ require.NoError(t, err)
+ actualPath, err := filepath.EvalSymlinks(foundPath)
+ require.NoError(t, err)
+ require.Equal(t, expectedPath, actualPath)
+ })
+}
+
+func TestLookupClosestWithOwnership(t *testing.T) {
+ // Note: Testing ownership boundaries is difficult in a cross-platform way
+ // without creating complex directory structures with different owners.
+ // This test focuses on the basic functionality when ownership checks pass.
+
+ tempDir := t.TempDir()
+ t.Chdir(tempDir)
+
+ t.Run("search respects same ownership", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory structure
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target file in parent directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err = os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Search should find the target assuming same ownership
+ foundPath, found := LookupClosest(subDir, "target.txt")
+ require.True(t, found)
+ require.Equal(t, targetFile, foundPath)
+ })
+}
+
+func TestLookup(t *testing.T) {
+ tempDir := t.TempDir()
+ t.Chdir(tempDir)
+
+ t.Run("no targets returns empty slice", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ found, err := Lookup(testDir)
+ require.NoError(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("single target found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target file in current directory
+ targetFile := filepath.Join(testDir, "target.txt")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 1)
+ require.Equal(t, targetFile, found[0])
+ })
+
+ t.Run("multiple targets found in starting directory", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create multiple target files in current directory
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+ targetFile3 := filepath.Join(testDir, "target3.txt")
+
+ err := os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile3, []byte("test3"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target1.txt", "target2.txt", "target3.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 3)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ require.Contains(t, found, targetFile3)
+ })
+
+ t.Run("targets found in parent directories", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create subdirectory
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target files in parent directory
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+ err = os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(subDir, "target1.txt", "target2.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ })
+
+ t.Run("targets found across multiple directory levels", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create nested subdirectories
+ subDir := filepath.Join(testDir, "subdir")
+ err := os.Mkdir(subDir, 0o755)
+ require.NoError(t, err)
+
+ subSubDir := filepath.Join(subDir, "subsubdir")
+ err = os.Mkdir(subSubDir, 0o755)
+ require.NoError(t, err)
+
+ // Create target files at different levels
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(subDir, "target2.txt")
+ targetFile3 := filepath.Join(subSubDir, "target3.txt")
+
+ err = os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile3, []byte("test3"), 0o644)
+ require.NoError(t, err)
+
+ found, err := Lookup(subSubDir, "target1.txt", "target2.txt", "target3.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 3)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ require.Contains(t, found, targetFile3)
+ })
+
+ t.Run("some targets not found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create only some target files
+ targetFile1 := filepath.Join(testDir, "target1.txt")
+ targetFile2 := filepath.Join(testDir, "target2.txt")
+
+ err := os.WriteFile(targetFile1, []byte("test1"), 0o644)
+ require.NoError(t, err)
+ err = os.WriteFile(targetFile2, []byte("test2"), 0o644)
+ require.NoError(t, err)
+
+ // Search for existing and non-existing targets
+ found, err := Lookup(testDir, "target1.txt", "nonexistent.txt", "target2.txt", "another_nonexistent.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile1)
+ require.Contains(t, found, targetFile2)
+ })
+
+ t.Run("no targets found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ found, err := Lookup(testDir, "nonexistent1.txt", "nonexistent2.txt", "nonexistent3.txt")
+ require.NoError(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("target directories found", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target directories
+ targetDir1 := filepath.Join(testDir, "targetdir1")
+ targetDir2 := filepath.Join(testDir, "targetdir2")
+ err := os.Mkdir(targetDir1, 0o755)
+ require.NoError(t, err)
+ err = os.Mkdir(targetDir2, 0o755)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "targetdir1", "targetdir2")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetDir1)
+ require.Contains(t, found, targetDir2)
+ })
+
+ t.Run("mixed files and directories", func(t *testing.T) {
+ testDir := t.TempDir()
+
+ // Create target files and directories
+ targetFile := filepath.Join(testDir, "target.txt")
+ targetDir := filepath.Join(testDir, "targetdir")
+ err := os.WriteFile(targetFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+ err = os.Mkdir(targetDir, 0o755)
+ require.NoError(t, err)
+
+ found, err := Lookup(testDir, "target.txt", "targetdir")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+ require.Contains(t, found, targetFile)
+ require.Contains(t, found, targetDir)
+ })
+
+ t.Run("invalid starting directory", func(t *testing.T) {
+ found, err := Lookup("/invalid/path/that/does/not/exist", "target.txt")
+ require.Error(t, err)
+ require.Empty(t, found)
+ })
+
+ t.Run("relative path handling", func(t *testing.T) {
+ // Create target files in current directory
+ require.NoError(t, os.WriteFile("target1.txt", []byte("test1"), 0o644))
+ require.NoError(t, os.WriteFile("target2.txt", []byte("test2"), 0o644))
+
+ // Search using relative path
+ found, err := Lookup(".", "target1.txt", "target2.txt")
+ require.NoError(t, err)
+ require.Len(t, found, 2)
+
+ // Resolve symlinks to handle macOS /private/var vs /var discrepancy
+ expectedPath1, err := filepath.EvalSymlinks(filepath.Join(tempDir, "target1.txt"))
+ require.NoError(t, err)
+ expectedPath2, err := filepath.EvalSymlinks(filepath.Join(tempDir, "target2.txt"))
+ require.NoError(t, err)
+
+ // Check that found paths match expected paths (order may vary)
+ foundEvalSymlinks := make([]string, len(found))
+ for i, path := range found {
+ evalPath, err := filepath.EvalSymlinks(path)
+ require.NoError(t, err)
+ foundEvalSymlinks[i] = evalPath
+ }
+
+ require.Contains(t, foundEvalSymlinks, expectedPath1)
+ require.Contains(t, foundEvalSymlinks, expectedPath2)
+ })
+}
+
+func TestProbeEnt(t *testing.T) {
+ t.Run("existing file with correct owner", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Get owner of temp directory
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ // Test probeEnt with correct owner
+ err = probeEnt(testFile, owner)
+ require.NoError(t, err)
+ })
+
+ t.Run("existing directory with correct owner", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test directory
+ testDir := filepath.Join(tempDir, "testdir")
+ err := os.Mkdir(testDir, 0o755)
+ require.NoError(t, err)
+
+ // Get owner of temp directory
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ // Test probeEnt with correct owner
+ err = probeEnt(testDir, owner)
+ require.NoError(t, err)
+ })
+
+ t.Run("nonexistent file", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ nonexistentFile := filepath.Join(tempDir, "nonexistent.txt")
+ owner, err := Owner(tempDir)
+ require.NoError(t, err)
+
+ err = probeEnt(nonexistentFile, owner)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrNotExist))
+ })
+
+ t.Run("nonexistent file in nonexistent directory", func(t *testing.T) {
+ nonexistentFile := "/this/directory/does/not/exists/nonexistent.txt"
+
+ err := probeEnt(nonexistentFile, -1)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrNotExist))
+ })
+
+ t.Run("ownership bypass with -1", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Test probeEnt with -1 (bypass ownership check)
+ err = probeEnt(testFile, -1)
+ require.NoError(t, err)
+ })
+
+ t.Run("ownership mismatch returns permission error", func(t *testing.T) {
+ tempDir := t.TempDir()
+
+ // Create test file
+ testFile := filepath.Join(tempDir, "test.txt")
+ err := os.WriteFile(testFile, []byte("test"), 0o644)
+ require.NoError(t, err)
+
+ // Test probeEnt with different owner (use 9999 which is unlikely to be the actual owner)
+ err = probeEnt(testFile, 9999)
+ require.Error(t, err)
+ require.True(t, errors.Is(err, os.ErrPermission))
+ })
+}
diff --git a/internal/fsext/ls.go b/internal/fsext/ls.go
index fa732b5b81c2192292a02c86d512f78e93ebfc37..5fef4a594dea49fbb0d1fd257401f0f84abe8765 100644
--- a/internal/fsext/ls.go
+++ b/internal/fsext/ls.go
@@ -1,9 +1,11 @@
package fsext
import (
+ "errors"
"log/slog"
"os"
"path/filepath"
+ "slices"
"strings"
"sync"
@@ -70,6 +72,11 @@ var commonIgnorePatterns = sync.OnceValue(func() ignore.IgnoreParser {
// Crush
".crush",
+
+ // macOS stuff
+ "OrbStack",
+ ".local",
+ ".share",
)
})
@@ -199,7 +206,7 @@ func (dl *directoryLister) getIgnore(path string) ignore.IgnoreParser {
}
type (
- DirectoryLister func(initialPath string, ignorePatterns []string, limit int) ([]string, bool, error)
+ DirectoryLister func(initialPath string, ignorePatterns []string, depth, limit int) ([]string, bool, error)
DirectoryListerResolver func() DirectoryLister
)
@@ -207,21 +214,22 @@ func ResolveDirectoryLister() DirectoryLister {
return listDirectory
}
-func listDirectory(initialPath string, ignorePatterns []string, limit int) ([]string, bool, error) {
- return ListDirectory(initialPath, ignorePatterns, limit)
+func listDirectory(initialPath string, ignorePatterns []string, depth, limit int) ([]string, bool, error) {
+ return ListDirectory(initialPath, ignorePatterns, depth, limit)
}
// ListDirectory lists files and directories in the specified path,
-func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]string, bool, error) {
- var results []string
- truncated := false
+func ListDirectory(initialPath string, ignorePatterns []string, depth, limit int) ([]string, bool, error) {
+ found := csync.NewSlice[string]()
dl := NewDirectoryLister(initialPath)
+ slog.Warn("listing directory", "path", initialPath, "depth", depth, "limit", limit, "ignorePatterns", ignorePatterns)
+
conf := fastwalk.Config{
- Follow: true,
- // Use forward slashes when running a Windows binary under WSL or MSYS
- ToSlash: fastwalk.DefaultToSlash(),
- Sort: fastwalk.SortDirsFirst,
+ Follow: true,
+ ToSlash: fastwalk.DefaultToSlash(),
+ Sort: fastwalk.SortDirsFirst,
+ MaxDepth: depth,
}
err := fastwalk.Walk(&conf, initialPath, func(path string, d os.DirEntry, err error) error {
@@ -240,19 +248,19 @@ func ListDirectory(initialPath string, ignorePatterns []string, limit int) ([]st
if d.IsDir() {
path = path + string(filepath.Separator)
}
- results = append(results, path)
+ found.Append(path)
}
- if limit > 0 && len(results) >= limit {
- truncated = true
+ if limit > 0 && found.Len() >= limit {
return filepath.SkipAll
}
return nil
})
- if err != nil && len(results) == 0 {
- return nil, truncated, err
+ if err != nil && !errors.Is(err, filepath.SkipAll) {
+ return nil, false, err
}
- return results, truncated, nil
+ matches, truncated := truncate(slices.Collect(found.Seq()), limit)
+ return matches, truncated || errors.Is(err, filepath.SkipAll), nil
}
diff --git a/internal/fsext/ls_test.go b/internal/fsext/ls_test.go
index a74ca3276c9af0edac6adbe1bd6e367d952af492..7bdad17fc46955d49fa08f7488d6efe8239294cb 100644
--- a/internal/fsext/ls_test.go
+++ b/internal/fsext/ls_test.go
@@ -5,26 +5,11 @@ import (
"path/filepath"
"testing"
- "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
-func chdir(t *testing.T, dir string) {
- original, err := os.Getwd()
- require.NoError(t, err)
-
- err = os.Chdir(dir)
- require.NoError(t, err)
-
- t.Cleanup(func() {
- err := os.Chdir(original)
- require.NoError(t, err)
- })
-}
-
func TestListDirectory(t *testing.T) {
- tempDir := t.TempDir()
- chdir(t, tempDir)
+ tmp := t.TempDir()
testFiles := map[string]string{
"regular.txt": "content",
@@ -35,32 +20,40 @@ func TestListDirectory(t *testing.T) {
"build.log": "build output",
}
- for filePath, content := range testFiles {
- dir := filepath.Dir(filePath)
- if dir != "." {
- require.NoError(t, os.MkdirAll(dir, 0o755))
- }
-
- err := os.WriteFile(filePath, []byte(content), 0o644)
- require.NoError(t, err)
+ for name, content := range testFiles {
+ fp := filepath.Join(tmp, name)
+ dir := filepath.Dir(fp)
+ require.NoError(t, os.MkdirAll(dir, 0o755))
+ require.NoError(t, os.WriteFile(fp, []byte(content), 0o644))
}
- files, truncated, err := ListDirectory(".", nil, 0)
- require.NoError(t, err)
- assert.False(t, truncated)
- assert.Equal(t, len(files), 4)
+ t.Run("no limit", func(t *testing.T) {
+ files, truncated, err := ListDirectory(tmp, nil, -1, -1)
+ require.NoError(t, err)
+ require.False(t, truncated)
+ require.Len(t, files, 4)
+ require.ElementsMatch(t, []string{
+ "regular.txt",
+ "subdir",
+ "subdir/.another",
+ "subdir/file.go",
+ }, relPaths(t, files, tmp))
+ })
+ t.Run("limit", func(t *testing.T) {
+ files, truncated, err := ListDirectory(tmp, nil, -1, 2)
+ require.NoError(t, err)
+ require.True(t, truncated)
+ require.Len(t, files, 2)
+ })
+}
- fileSet := make(map[string]bool)
- for _, file := range files {
- fileSet[filepath.ToSlash(file)] = true
+func relPaths(tb testing.TB, in []string, base string) []string {
+ tb.Helper()
+ out := make([]string, 0, len(in))
+ for _, p := range in {
+ rel, err := filepath.Rel(base, p)
+ require.NoError(tb, err)
+ out = append(out, filepath.ToSlash(rel))
}
-
- assert.True(t, fileSet["./regular.txt"])
- assert.True(t, fileSet["./subdir/"])
- assert.True(t, fileSet["./subdir/file.go"])
- assert.True(t, fileSet["./regular.txt"])
-
- assert.False(t, fileSet["./.hidden"])
- assert.False(t, fileSet["./.gitignore"])
- assert.False(t, fileSet["./build.log"])
+ return out
}
diff --git a/internal/fsext/owner_windows.go b/internal/fsext/owner_windows.go
index 107cda009b5fc152cba3200271c7145ff3227a39..41f9091c3e75e8f187984a8e1ddb7a7aa72c9dab 100644
--- a/internal/fsext/owner_windows.go
+++ b/internal/fsext/owner_windows.go
@@ -2,8 +2,14 @@
package fsext
+import "os"
+
// Owner retrieves the user ID of the owner of the file or directory at the
// specified path.
func Owner(path string) (int, error) {
+ _, err := os.Stat(path)
+ if err != nil {
+ return 0, err
+ }
return -1, nil
}
diff --git a/internal/fsext/parent.go b/internal/fsext/parent.go
deleted file mode 100644
index bd3193610a79cbc80b5bb2c1d75be32a819f34f5..0000000000000000000000000000000000000000
--- a/internal/fsext/parent.go
+++ /dev/null
@@ -1,60 +0,0 @@
-package fsext
-
-import (
- "errors"
- "os"
- "path/filepath"
-
- "github.com/charmbracelet/crush/internal/home"
-)
-
-// SearchParent searches for a target file or directory starting from dir
-// and walking up the directory tree until found or root or home is reached.
-// It also checks the ownership of directories to ensure that the search does
-// not cross ownership boundaries.
-// Returns the full path to the target if found, empty string and false otherwise.
-// The search includes the starting directory itself.
-func SearchParent(dir, target string) (string, bool) {
- absDir, err := filepath.Abs(dir)
- if err != nil {
- return "", false
- }
-
- path := filepath.Join(absDir, target)
- if _, err := os.Stat(path); err == nil {
- return path, true
- } else if !errors.Is(err, os.ErrNotExist) {
- return "", false
- }
-
- previousParent := absDir
- previousOwner, err := Owner(previousParent)
- if err != nil {
- return "", false
- }
-
- for {
- parent := filepath.Dir(previousParent)
- if parent == previousParent || parent == home.Dir() {
- return "", false
- }
-
- parentOwner, err := Owner(parent)
- if err != nil {
- return "", false
- }
- if parentOwner != previousOwner {
- return "", false
- }
-
- path := filepath.Join(parent, target)
- if _, err := os.Stat(path); err == nil {
- return path, true
- } else if !errors.Is(err, os.ErrNotExist) {
- return "", false
- }
-
- previousParent = parent
- previousOwner = parentOwner
- }
-}
diff --git a/internal/home/home.go b/internal/home/home.go
index f2a9b73b922abd8f027ba68655afc68f42a58b09..74ab5594bf19377a86e6e96cae298a91b4858cff 100644
--- a/internal/home/home.go
+++ b/internal/home/home.go
@@ -1,3 +1,4 @@
+// Package home provides utilities for dealing with the user's home directory.
package home
import (
diff --git a/internal/llm/agent/agent.go b/internal/llm/agent/agent.go
index 02ff02c2df5c85b688b892971472e22fa4aed0b7..1efc3fc268392c06481d61ae6e11c9d67cdc13e8 100644
--- a/internal/llm/agent/agent.go
+++ b/internal/llm/agent/agent.go
@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"log/slog"
+ "maps"
"slices"
"strings"
"time"
@@ -12,6 +13,7 @@ import (
"github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/history"
"github.com/charmbracelet/crush/internal/llm/prompt"
"github.com/charmbracelet/crush/internal/llm/provider"
@@ -25,12 +27,6 @@ import (
"github.com/charmbracelet/crush/internal/shell"
)
-// Common errors
-var (
- ErrRequestCancelled = errors.New("request canceled by user")
- ErrSessionBusy = errors.New("session is currently processing another request")
-)
-
type AgentEventType string
const (
@@ -66,12 +62,17 @@ type Service interface {
type agent struct {
*pubsub.Broker[AgentEvent]
- agentCfg config.Agent
- sessions session.Service
- messages message.Service
- mcpTools []McpTool
-
- tools *csync.LazySlice[tools.BaseTool]
+ agentCfg config.Agent
+ sessions session.Service
+ messages message.Service
+ permissions permission.Service
+ baseTools *csync.Map[string, tools.BaseTool]
+ mcpTools *csync.Map[string, tools.BaseTool]
+ lspClients *csync.Map[string, *lsp.Client]
+
+ // We need this to be able to update it when model changes
+ agentToolFn func() (tools.BaseTool, error)
+ cleanupFuncs []func()
provider provider.Provider
providerID string
@@ -81,8 +82,7 @@ type agent struct {
summarizeProviderID string
activeRequests *csync.Map[string, context.CancelFunc]
-
- promptQueue *csync.Map[string, []string]
+ promptQueue *csync.Map[string, []string]
}
var agentPromptMap = map[string]prompt.PromptID{
@@ -98,22 +98,23 @@ func NewAgent(
sessions session.Service,
messages message.Service,
history history.Service,
- lspClients map[string]*lsp.Client,
+ lspClients *csync.Map[string, *lsp.Client],
) (Service, error) {
cfg := config.Get()
- var agentTool tools.BaseTool
- if agentCfg.ID == "coder" {
- taskAgentCfg := config.Get().Agents["task"]
- if taskAgentCfg.ID == "" {
- return nil, fmt.Errorf("task agent not found in config")
- }
- taskAgent, err := NewAgent(ctx, taskAgentCfg, permissions, sessions, messages, history, lspClients)
- if err != nil {
- return nil, fmt.Errorf("failed to create task agent: %w", err)
+ var agentToolFn func() (tools.BaseTool, error)
+ if agentCfg.ID == "coder" && slices.Contains(agentCfg.AllowedTools, AgentToolName) {
+ agentToolFn = func() (tools.BaseTool, error) {
+ taskAgentCfg := config.Get().Agents["task"]
+ if taskAgentCfg.ID == "" {
+ return nil, fmt.Errorf("task agent not found in config")
+ }
+ taskAgent, err := NewAgent(ctx, taskAgentCfg, permissions, sessions, messages, history, lspClients)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create task agent: %w", err)
+ }
+ return NewAgentTool(taskAgent, sessions, messages), nil
}
-
- agentTool = NewAgentTool(taskAgent, sessions, messages)
}
providerCfg := config.Get().GetProviderForModel(agentCfg.Model)
@@ -173,15 +174,17 @@ func NewAgent(
return nil, err
}
- toolFn := func() []tools.BaseTool {
- slog.Info("Initializing agent tools", "agent", agentCfg.ID)
+ baseToolsFn := func() map[string]tools.BaseTool {
+ slog.Info("Initializing agent base tools", "agent", agentCfg.ID)
defer func() {
- slog.Info("Initialized agent tools", "agent", agentCfg.ID)
+ slog.Info("Initialized agent base tools", "agent", agentCfg.ID)
}()
+ // Base tools available to all agents
cwd := cfg.WorkingDir()
- allTools := []tools.BaseTool{
- tools.NewBashTool(permissions, cwd),
+ result := make(map[string]tools.BaseTool)
+ for _, tool := range []tools.BaseTool{
+ tools.NewBashTool(permissions, cwd, cfg.Options.Attribution),
tools.NewDownloadTool(permissions, cwd),
tools.NewEditTool(lspClients, permissions, history, cwd),
tools.NewMultiEditTool(lspClients, permissions, history, cwd),
@@ -192,35 +195,25 @@ func NewAgent(
tools.NewSourcegraphTool(),
tools.NewViewTool(lspClients, permissions, cwd),
tools.NewWriteTool(lspClients, permissions, history, cwd),
+ } {
+ result[tool.Name()] = tool
}
+ return result
+ }
+ mcpToolsFn := func() map[string]tools.BaseTool {
+ slog.Info("Initializing agent mcp tools", "agent", agentCfg.ID)
+ defer func() {
+ slog.Info("Initialized agent mcp tools", "agent", agentCfg.ID)
+ }()
mcpToolsOnce.Do(func() {
- mcpTools = doGetMCPTools(ctx, permissions, cfg)
+ doGetMCPTools(ctx, permissions, cfg)
})
- allTools = append(allTools, mcpTools...)
-
- if len(lspClients) > 0 {
- allTools = append(allTools, tools.NewDiagnosticsTool(lspClients))
- }
-
- if agentTool != nil {
- allTools = append(allTools, agentTool)
- }
-
- if agentCfg.AllowedTools == nil {
- return allTools
- }
- var filteredTools []tools.BaseTool
- for _, tool := range allTools {
- if slices.Contains(agentCfg.AllowedTools, tool.Name()) {
- filteredTools = append(filteredTools, tool)
- }
- }
- return filteredTools
+ return maps.Collect(mcpTools.Seq2())
}
- return &agent{
+ a := &agent{
Broker: pubsub.NewBroker[AgentEvent](),
agentCfg: agentCfg,
provider: agentProvider,
@@ -230,10 +223,16 @@ func NewAgent(
titleProvider: titleProvider,
summarizeProvider: summarizeProvider,
summarizeProviderID: string(providerCfg.ID),
+ agentToolFn: agentToolFn,
activeRequests: csync.NewMap[string, context.CancelFunc](),
- tools: csync.NewLazySlice(toolFn),
+ mcpTools: csync.NewLazyMap(mcpToolsFn),
+ baseTools: csync.NewLazyMap(baseToolsFn),
promptQueue: csync.NewMap[string, []string](),
- }, nil
+ permissions: permissions,
+ lspClients: lspClients,
+ }
+ a.setupEvents(ctx)
+ return a, nil
}
func (a *agent) Model() catwalk.Model {
@@ -322,7 +321,13 @@ func (a *agent) generateTitle(ctx context.Context, sessionID string, content str
return fmt.Errorf("no response received from title provider")
}
- title := strings.TrimSpace(strings.ReplaceAll(finalResponse.Content, "\n", " "))
+ title := strings.ReplaceAll(finalResponse.Content, "\n", " ")
+
+ if idx := strings.Index(title, ""); idx > 0 {
+ title = title[idx+len(""):]
+ }
+
+ title = strings.TrimSpace(title)
if title == "" {
return nil
}
@@ -355,8 +360,9 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
}
genCtx, cancel := context.WithCancel(ctx)
-
a.activeRequests.Set(sessionID, cancel)
+ startTime := time.Now()
+
go func() {
slog.Debug("Request started", "sessionID", sessionID)
defer log.RecoverPanic("agent.Run", func() {
@@ -367,16 +373,24 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
}
result := a.processGeneration(genCtx, sessionID, content, attachmentParts)
- if result.Error != nil && !errors.Is(result.Error, ErrRequestCancelled) && !errors.Is(result.Error, context.Canceled) {
- slog.Error(result.Error.Error())
+ if result.Error != nil {
+ if isCancelledErr(result.Error) {
+ slog.Error("Request canceled", "sessionID", sessionID)
+ } else {
+ slog.Error("Request errored", "sessionID", sessionID, "error", result.Error.Error())
+ event.Error(result.Error)
+ }
+ } else {
+ slog.Debug("Request completed", "sessionID", sessionID)
}
- slog.Debug("Request completed", "sessionID", sessionID)
+ a.eventPromptResponded(sessionID, time.Since(startTime).Truncate(time.Second))
a.activeRequests.Del(sessionID)
cancel()
a.Publish(pubsub.CreatedEvent, result)
events <- result
close(events)
}()
+ a.eventPromptSent(sessionID)
return events, nil
}
@@ -500,6 +514,29 @@ func (a *agent) createUserMessage(ctx context.Context, sessionID, content string
})
}
+func (a *agent) getAllTools() ([]tools.BaseTool, error) {
+ var allTools []tools.BaseTool
+ for tool := range a.baseTools.Seq() {
+ if a.agentCfg.AllowedTools == nil || slices.Contains(a.agentCfg.AllowedTools, tool.Name()) {
+ allTools = append(allTools, tool)
+ }
+ }
+ if a.agentCfg.ID == "coder" {
+ allTools = slices.AppendSeq(allTools, a.mcpTools.Seq())
+ if a.lspClients.Len() > 0 {
+ allTools = append(allTools, tools.NewDiagnosticsTool(a.lspClients))
+ }
+ }
+ if a.agentToolFn != nil {
+ agentTool, agentToolErr := a.agentToolFn()
+ if agentToolErr != nil {
+ return nil, agentToolErr
+ }
+ allTools = append(allTools, agentTool)
+ }
+ return allTools, nil
+}
+
func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msgHistory []message.Message) (message.Message, *message.Message, error) {
ctx = context.WithValue(ctx, tools.SessionIDContextKey, sessionID)
@@ -514,23 +551,32 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
return assistantMsg, nil, fmt.Errorf("failed to create assistant message: %w", err)
}
+ allTools, toolsErr := a.getAllTools()
+ if toolsErr != nil {
+ return assistantMsg, nil, toolsErr
+ }
// Now collect tools (which may block on MCP initialization)
- eventChan := a.provider.StreamResponse(ctx, msgHistory, slices.Collect(a.tools.Seq()))
+ eventChan := a.provider.StreamResponse(ctx, msgHistory, allTools)
// Add the session and message ID into the context if needed by tools.
ctx = context.WithValue(ctx, tools.MessageIDContextKey, assistantMsg.ID)
- // Process each event in the stream.
- for event := range eventChan {
- if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
- if errors.Is(processErr, context.Canceled) {
- a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
- } else {
- a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "API Error", processErr.Error())
+loop:
+ for {
+ select {
+ case event, ok := <-eventChan:
+ if !ok {
+ break loop
}
- return assistantMsg, nil, processErr
- }
- if ctx.Err() != nil {
+ if processErr := a.processEvent(ctx, sessionID, &assistantMsg, event); processErr != nil {
+ if errors.Is(processErr, context.Canceled) {
+ a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
+ } else {
+ a.finishMessage(ctx, &assistantMsg, message.FinishReasonError, "API Error", processErr.Error())
+ }
+ return assistantMsg, nil, processErr
+ }
+ case <-ctx.Done():
a.finishMessage(context.Background(), &assistantMsg, message.FinishReasonCanceled, "Request cancelled", "")
return assistantMsg, nil, ctx.Err()
}
@@ -554,7 +600,8 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
default:
// Continue processing
var tool tools.BaseTool
- for availableTool := range a.tools.Seq() {
+ allTools, _ = a.getAllTools()
+ for _, availableTool := range allTools {
if availableTool.Info().Name == toolCall.Name {
tool = availableTool
break
@@ -699,13 +746,13 @@ func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg
if err := a.messages.Update(ctx, *assistantMsg); err != nil {
return fmt.Errorf("failed to update message: %w", err)
}
- return a.TrackUsage(ctx, sessionID, a.Model(), event.Response.Usage)
+ return a.trackUsage(ctx, sessionID, a.Model(), event.Response.Usage)
}
return nil
}
-func (a *agent) TrackUsage(ctx context.Context, sessionID string, model catwalk.Model, usage provider.TokenUsage) error {
+func (a *agent) trackUsage(ctx context.Context, sessionID string, model catwalk.Model, usage provider.TokenUsage) error {
sess, err := a.sessions.Get(ctx, sessionID)
if err != nil {
return fmt.Errorf("failed to get session: %w", err)
@@ -716,6 +763,8 @@ func (a *agent) TrackUsage(ctx context.Context, sessionID string, model catwalk.
model.CostPer1MIn/1e6*float64(usage.InputTokens) +
model.CostPer1MOut/1e6*float64(usage.OutputTokens)
+ a.eventTokensUsed(sessionID, usage, cost)
+
sess.Cost += cost
sess.CompletionTokens = usage.OutputTokens + usage.CacheReadTokens
sess.PromptTokens = usage.InputTokens + usage.CacheCreationTokens
@@ -811,7 +860,7 @@ func (a *agent) Summarize(ctx context.Context, sessionID string) error {
if r.Error != nil {
event = AgentEvent{
Type: AgentEventTypeError,
- Error: fmt.Errorf("failed to summarize: %w", err),
+ Error: fmt.Errorf("failed to summarize: %w", r.Error),
Done: true,
}
a.Publish(pubsub.CreatedEvent, event)
@@ -920,6 +969,12 @@ func (a *agent) CancelAll() {
a.Cancel(key) // key is sessionID
}
+ for _, cleanup := range a.cleanupFuncs {
+ if cleanup != nil {
+ cleanup()
+ }
+ }
+
timeout := time.After(5 * time.Second)
for a.IsBusy() {
select {
@@ -1031,3 +1086,48 @@ func (a *agent) UpdateModel() error {
return nil
}
+
+func (a *agent) setupEvents(ctx context.Context) {
+ ctx, cancel := context.WithCancel(ctx)
+
+ go func() {
+ subCh := SubscribeMCPEvents(ctx)
+
+ for {
+ select {
+ case event, ok := <-subCh:
+ if !ok {
+ slog.Debug("MCPEvents subscription channel closed")
+ return
+ }
+ switch event.Payload.Type {
+ case MCPEventToolsListChanged:
+ name := event.Payload.Name
+ c, ok := mcpClients.Get(name)
+ if !ok {
+ slog.Warn("MCP client not found for tools update", "name", name)
+ continue
+ }
+ cfg := config.Get()
+ tools, err := getTools(ctx, name, a.permissions, c, cfg.WorkingDir())
+ if err != nil {
+ slog.Error("error listing tools", "error", err)
+ updateMCPState(name, MCPStateError, err, nil, 0)
+ _ = c.Close()
+ continue
+ }
+ updateMcpTools(name, tools)
+ a.mcpTools.Reset(maps.Collect(mcpTools.Seq2()))
+ updateMCPState(name, MCPStateConnected, nil, c, a.mcpTools.Len())
+ default:
+ continue
+ }
+ case <-ctx.Done():
+ slog.Debug("MCPEvents subscription cancelled")
+ return
+ }
+ }
+ }()
+
+ a.cleanupFuncs = append(a.cleanupFuncs, cancel)
+}
diff --git a/internal/llm/agent/errors.go b/internal/llm/agent/errors.go
new file mode 100644
index 0000000000000000000000000000000000000000..0e2f983d64b42b93ad3a51f32ce0335b0374a613
--- /dev/null
+++ b/internal/llm/agent/errors.go
@@ -0,0 +1,15 @@
+package agent
+
+import (
+ "context"
+ "errors"
+)
+
+var (
+ ErrRequestCancelled = errors.New("request canceled by user")
+ ErrSessionBusy = errors.New("session is currently processing another request")
+)
+
+func isCancelledErr(err error) bool {
+ return errors.Is(err, context.Canceled) || errors.Is(err, ErrRequestCancelled)
+}
diff --git a/internal/llm/agent/event.go b/internal/llm/agent/event.go
new file mode 100644
index 0000000000000000000000000000000000000000..8642d9990dc31689292abe9f2b39e685462f158e
--- /dev/null
+++ b/internal/llm/agent/event.go
@@ -0,0 +1,53 @@
+package agent
+
+import (
+ "time"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/event"
+ "github.com/charmbracelet/crush/internal/llm/provider"
+)
+
+func (a *agent) eventPromptSent(sessionID string) {
+ event.PromptSent(
+ a.eventCommon(sessionID)...,
+ )
+}
+
+func (a *agent) eventPromptResponded(sessionID string, duration time.Duration) {
+ event.PromptResponded(
+ append(
+ a.eventCommon(sessionID),
+ "prompt duration pretty", duration.String(),
+ "prompt duration in seconds", int64(duration.Seconds()),
+ )...,
+ )
+}
+
+func (a *agent) eventTokensUsed(sessionID string, usage provider.TokenUsage, cost float64) {
+ event.TokensUsed(
+ append(
+ a.eventCommon(sessionID),
+ "input tokens", usage.InputTokens,
+ "output tokens", usage.OutputTokens,
+ "cache read tokens", usage.CacheReadTokens,
+ "cache creation tokens", usage.CacheCreationTokens,
+ "total tokens", usage.InputTokens+usage.OutputTokens+usage.CacheReadTokens+usage.CacheCreationTokens,
+ "cost", cost,
+ )...,
+ )
+}
+
+func (a *agent) eventCommon(sessionID string) []any {
+ cfg := config.Get()
+ currentModel := cfg.Models[cfg.Agents["coder"].Model]
+
+ return []any{
+ "session id", sessionID,
+ "provider", currentModel.Provider,
+ "model", currentModel.Model,
+ "reasoning effort", currentModel.ReasoningEffort,
+ "thinking mode", currentModel.Think,
+ "yolo mode", a.permissions.SkipRequests(),
+ }
+}
diff --git a/internal/llm/agent/mcp-tools.go b/internal/llm/agent/mcp-tools.go
index bb50231da028e714c783f50cc7ebd8a1f4b595db..181f32b7280faf3eb36040d2ebecf3f892350f53 100644
--- a/internal/llm/agent/mcp-tools.go
+++ b/internal/llm/agent/mcp-tools.go
@@ -8,13 +8,13 @@ import (
"fmt"
"log/slog"
"maps"
- "slices"
"strings"
"sync"
"time"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/home"
"github.com/charmbracelet/crush/internal/llm/tools"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/pubsub"
@@ -53,7 +53,8 @@ func (s MCPState) String() string {
type MCPEventType string
const (
- MCPEventStateChanged MCPEventType = "state_changed"
+ MCPEventStateChanged MCPEventType = "state_changed"
+ MCPEventToolsListChanged MCPEventType = "tools_list_changed"
)
// MCPEvent represents an event in the MCP system
@@ -76,11 +77,12 @@ type MCPClientInfo struct {
}
var (
- mcpToolsOnce sync.Once
- mcpTools []tools.BaseTool
- mcpClients = csync.NewMap[string, *client.Client]()
- mcpStates = csync.NewMap[string, MCPClientInfo]()
- mcpBroker = pubsub.NewBroker[MCPEvent]()
+ mcpToolsOnce sync.Once
+ mcpTools = csync.NewMap[string, tools.BaseTool]()
+ mcpClient2Tools = csync.NewMap[string, []tools.BaseTool]()
+ mcpClients = csync.NewMap[string, *client.Client]()
+ mcpStates = csync.NewMap[string, MCPClientInfo]()
+ mcpBroker = pubsub.NewBroker[MCPEvent]()
)
type McpTool struct {
@@ -148,18 +150,20 @@ func getOrRenewClient(ctx context.Context, name string) (*client.Client, error)
return nil, fmt.Errorf("mcp '%s' not available", name)
}
- m := config.Get().MCP[name]
+ cfg := config.Get()
+ m := cfg.MCP[name]
state, _ := mcpStates.Get(name)
- pingCtx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
+ timeout := mcpTimeout(m)
+ pingCtx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
err := c.Ping(pingCtx)
if err == nil {
return c, nil
}
- updateMCPState(name, MCPStateError, err, nil, state.ToolCount)
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, state.ToolCount)
- c, err = createAndInitializeClient(ctx, name, m)
+ c, err = createAndInitializeClient(ctx, name, m, cfg.Resolver())
if err != nil {
return nil, err
}
@@ -174,7 +178,7 @@ func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
if sessionID == "" || messageID == "" {
return tools.ToolResponse{}, fmt.Errorf("session ID and message ID are required for creating a new file")
}
- permissionDescription := fmt.Sprintf("execute %s with the following parameters: %s", b.Info().Name, params.Input)
+ permissionDescription := fmt.Sprintf("execute %s with the following parameters:", b.Info().Name)
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
@@ -193,14 +197,10 @@ func (b *McpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
return runTool(ctx, b.mcpName, b.tool.Name, params.Input)
}
-func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) []tools.BaseTool {
+func getTools(ctx context.Context, name string, permissions permission.Service, c *client.Client, workingDir string) ([]tools.BaseTool, error) {
result, err := c.ListTools(ctx, mcp.ListToolsRequest{})
if err != nil {
- slog.Error("error listing tools", "error", err)
- updateMCPState(name, MCPStateError, err, nil, 0)
- c.Close()
- mcpClients.Del(name)
- return nil
+ return nil, err
}
mcpTools := make([]tools.BaseTool, 0, len(result.Tools))
for _, tool := range result.Tools {
@@ -211,7 +211,7 @@ func getTools(ctx context.Context, name string, permissions permission.Service,
workingDir: workingDir,
})
}
- return mcpTools
+ return mcpTools, nil
}
// SubscribeMCPEvents returns a channel for MCP events
@@ -238,8 +238,12 @@ func updateMCPState(name string, state MCPState, err error, client *client.Clien
Client: client,
ToolCount: toolCount,
}
- if state == MCPStateConnected {
+ switch state {
+ case MCPStateConnected:
info.ConnectedAt = time.Now()
+ case MCPStateError:
+ updateMcpTools(name, nil)
+ mcpClients.Del(name)
}
mcpStates.Set(name, info)
@@ -253,12 +257,20 @@ func updateMCPState(name string, state MCPState, err error, client *client.Clien
})
}
+// publishMCPEventToolsListChanged publishes a tool list changed event
+func publishMCPEventToolsListChanged(name string) {
+ mcpBroker.Publish(pubsub.UpdatedEvent, MCPEvent{
+ Type: MCPEventToolsListChanged,
+ Name: name,
+ })
+}
+
// CloseMCPClients closes all MCP clients. This should be called during application shutdown.
func CloseMCPClients() error {
var errs []error
- for c := range mcpClients.Seq() {
+ for name, c := range mcpClients.Seq2() {
if err := c.Close(); err != nil {
- errs = append(errs, err)
+ errs = append(errs, fmt.Errorf("close mcp: %s: %w", name, err))
}
}
mcpBroker.Shutdown()
@@ -275,10 +287,8 @@ var mcpInitRequest = mcp.InitializeRequest{
},
}
-func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) []tools.BaseTool {
+func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *config.Config) {
var wg sync.WaitGroup
- result := csync.NewSlice[tools.BaseTool]()
-
// Initialize states for all configured MCPs
for name, m := range cfg.MCP {
if m.Disabled {
@@ -311,59 +321,111 @@ func doGetMCPTools(ctx context.Context, permissions permission.Service, cfg *con
ctx, cancel := context.WithTimeout(ctx, mcpTimeout(m))
defer cancel()
- c, err := createAndInitializeClient(ctx, name, m)
+
+ c, err := createAndInitializeClient(ctx, name, m, cfg.Resolver())
if err != nil {
return
}
+
mcpClients.Set(name, c)
- tools := getTools(ctx, name, permissions, c, cfg.WorkingDir())
+ tools, err := getTools(ctx, name, permissions, c, cfg.WorkingDir())
+ if err != nil {
+ slog.Error("error listing tools", "error", err)
+ updateMCPState(name, MCPStateError, err, nil, 0)
+ c.Close()
+ return
+ }
+
+ updateMcpTools(name, tools)
+ mcpClients.Set(name, c)
updateMCPState(name, MCPStateConnected, nil, c, len(tools))
- result.Append(tools...)
}(name, m)
}
wg.Wait()
- return slices.Collect(result.Seq())
}
-func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig) (*client.Client, error) {
- c, err := createMcpClient(m)
+// updateMcpTools updates the global mcpTools and mcpClient2Tools maps
+func updateMcpTools(mcpName string, tools []tools.BaseTool) {
+ if len(tools) == 0 {
+ mcpClient2Tools.Del(mcpName)
+ } else {
+ mcpClient2Tools.Set(mcpName, tools)
+ }
+ for _, tools := range mcpClient2Tools.Seq2() {
+ for _, t := range tools {
+ mcpTools.Set(t.Name(), t)
+ }
+ }
+}
+
+func createAndInitializeClient(ctx context.Context, name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
+ c, err := createMcpClient(name, m, resolver)
if err != nil {
updateMCPState(name, MCPStateError, err, nil, 0)
slog.Error("error creating mcp client", "error", err, "name", name)
return nil, err
}
- // Only call Start() for non-stdio clients, as stdio clients auto-start
- if m.Type != config.MCPStdio {
- if err := c.Start(ctx); err != nil {
- updateMCPState(name, MCPStateError, err, nil, 0)
- slog.Error("error starting mcp client", "error", err, "name", name)
- _ = c.Close()
- return nil, err
+
+ c.OnNotification(func(n mcp.JSONRPCNotification) {
+ slog.Debug("Received MCP notification", "name", name, "notification", n)
+ switch n.Method {
+ case "notifications/tools/list_changed":
+ publishMCPEventToolsListChanged(name)
+ default:
+ slog.Debug("Unhandled MCP notification", "name", name, "method", n.Method)
}
+ })
+
+ // XXX: ideally we should be able to use context.WithTimeout here, but,
+ // the SSE MCP client will start failing once that context is canceled.
+ timeout := mcpTimeout(m)
+ mcpCtx, cancel := context.WithCancel(ctx)
+ cancelTimer := time.AfterFunc(timeout, cancel)
+
+ if err := c.Start(mcpCtx); err != nil {
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
+ slog.Error("error starting mcp client", "error", err, "name", name)
+ _ = c.Close()
+ cancel()
+ return nil, err
}
- if _, err := c.Initialize(ctx, mcpInitRequest); err != nil {
- updateMCPState(name, MCPStateError, err, nil, 0)
+
+ if _, err := c.Initialize(mcpCtx, mcpInitRequest); err != nil {
+ updateMCPState(name, MCPStateError, maybeTimeoutErr(err, timeout), nil, 0)
slog.Error("error initializing mcp client", "error", err, "name", name)
_ = c.Close()
+ cancel()
return nil, err
}
+ cancelTimer.Stop()
slog.Info("Initialized mcp client", "name", name)
return c, nil
}
-func createMcpClient(m config.MCPConfig) (*client.Client, error) {
+func maybeTimeoutErr(err error, timeout time.Duration) error {
+ if errors.Is(err, context.Canceled) {
+ return fmt.Errorf("timed out after %s", timeout)
+ }
+ return err
+}
+
+func createMcpClient(name string, m config.MCPConfig, resolver config.VariableResolver) (*client.Client, error) {
switch m.Type {
case config.MCPStdio:
- if strings.TrimSpace(m.Command) == "" {
+ command, err := resolver.ResolveValue(m.Command)
+ if err != nil {
+ return nil, fmt.Errorf("invalid mcp command: %w", err)
+ }
+ if strings.TrimSpace(command) == "" {
return nil, fmt.Errorf("mcp stdio config requires a non-empty 'command' field")
}
return client.NewStdioMCPClientWithOptions(
- m.Command,
+ home.Long(command),
m.ResolvedEnv(),
m.Args,
- transport.WithCommandLogger(mcpLogger{}),
+ transport.WithCommandLogger(mcpLogger{name: name}),
)
case config.MCPHttp:
if strings.TrimSpace(m.URL) == "" {
@@ -372,7 +434,7 @@ func createMcpClient(m config.MCPConfig) (*client.Client, error) {
return client.NewStreamableHttpClient(
m.URL,
transport.WithHTTPHeaders(m.ResolvedHeaders()),
- transport.WithHTTPLogger(mcpLogger{}),
+ transport.WithHTTPLogger(mcpLogger{name: name}),
)
case config.MCPSse:
if strings.TrimSpace(m.URL) == "" {
@@ -381,7 +443,7 @@ func createMcpClient(m config.MCPConfig) (*client.Client, error) {
return client.NewSSEMCPClient(
m.URL,
client.WithHeaders(m.ResolvedHeaders()),
- transport.WithSSELogger(mcpLogger{}),
+ transport.WithSSELogger(mcpLogger{name: name}),
)
default:
return nil, fmt.Errorf("unsupported mcp type: %s", m.Type)
@@ -389,10 +451,15 @@ func createMcpClient(m config.MCPConfig) (*client.Client, error) {
}
// for MCP's clients.
-type mcpLogger struct{}
+type mcpLogger struct{ name string }
+
+func (l mcpLogger) Errorf(format string, v ...any) {
+ slog.Error(fmt.Sprintf(format, v...), "name", l.name)
+}
-func (l mcpLogger) Errorf(format string, v ...any) { slog.Error(fmt.Sprintf(format, v...)) }
-func (l mcpLogger) Infof(format string, v ...any) { slog.Info(fmt.Sprintf(format, v...)) }
+func (l mcpLogger) Infof(format string, v ...any) {
+ slog.Info(fmt.Sprintf(format, v...), "name", l.name)
+}
func mcpTimeout(m config.MCPConfig) time.Duration {
return time.Duration(cmp.Or(m.Timeout, 15)) * time.Second
diff --git a/internal/llm/prompt/coder.go b/internal/llm/prompt/coder.go
index bf3eb5ea47d877c8adf21816900d7acb1434aa82..57ed088b22de03fe875ad0822f159b35eb36a834 100644
--- a/internal/llm/prompt/coder.go
+++ b/internal/llm/prompt/coder.go
@@ -45,9 +45,6 @@ var anthropicCoderPrompt []byte
//go:embed gemini.md
var geminiCoderPrompt []byte
-//go:embed openai.md
-var openaiCoderPrompt []byte
-
//go:embed v2.md
var coderV2Prompt []byte
@@ -56,7 +53,7 @@ func getEnvironmentInfo() string {
isGit := isGitRepo(cwd)
platform := runtime.GOOS
date := time.Now().Format("1/2/2006")
- output, _ := tools.ListDirectoryTree(cwd, nil)
+ output, _, _ := tools.ListDirectoryTree(cwd, tools.LSParams{})
return fmt.Sprintf(`Here is useful information about the environment you are running in:
Working directory: %s
diff --git a/internal/llm/prompt/openai.md b/internal/llm/prompt/openai.md
deleted file mode 100644
index 1c69d40152cd380993f2032e0b3967b370be90fc..0000000000000000000000000000000000000000
--- a/internal/llm/prompt/openai.md
+++ /dev/null
@@ -1,96 +0,0 @@
-Please resolve the user's task by editing and testing the code files in your current code execution session.
-You are a deployed coding agent.
-Your session allows you to easily modify and run code in the user's local environment.
-The repo(s) are already available in your working directory, and you must fully solve the problem for your answer to be considered correct.
-
-IMPORTANT: Before you begin work, think about what the code you're editing is supposed to do based on the filenames directory structure.
-
-# Memory
-
-If the current working directory contains a file called CRUSH.md, it will be automatically added to your context. This file serves multiple purposes:
-
-1. Storing frequently used bash commands (build, test, lint, etc.) so you can use them without searching each time
-2. Recording the user's code style preferences (naming conventions, preferred libraries, etc.)
-3. Maintaining useful information about the codebase structure and organization
-
-When you spend time searching for commands to typecheck, lint, build, or test, you should ask the user if it's okay to add those commands to CRUSH.md. Similarly, when learning about code style preferences or important codebase information, ask if it's okay to add that to CRUSH.md so you can remember it for next time.
-
-You MUST adhere to the following criteria when executing the task:
-
-- Working on the repo(s) in the current environment is allowed, even if they are proprietary.
-- Analyzing code for vulnerabilities is allowed.
-- Showing user code and tool call details is allowed.
-- User instructions may overwrite the _CODING GUIDELINES_ section in this developer message.
-- Do not use `ls -R` `find`, or `grep` - these are slow in large repos. Use the Agent tool for searching instead.
-- Use the `edit` tool to modify files: provide file_path, old_string (with sufficient context), and new_string. The edit tool requires:
- - Absolute file paths (starting with /)
- - Unique old_string matches with 3-5 lines of context before and after
- - Exact whitespace and indentation matching
- - For new files: provide file_path and new_string, leave old_string empty
- - For deleting content: provide file_path and old_string, leave new_string empty
-
-# Following conventions
-
-When making changes to files, first understand the file's code conventions. Mimic code style, use existing libraries and utilities, and follow existing patterns.
-
-- NEVER assume that a given library is available, even if it is well known. Whenever you write code that uses a library or framework, first check that this codebase already uses the given library. For example, you might look at neighboring files, or check the package.json (or cargo.toml, and so on depending on the language).
-- When you create a new component, first look at existing components to see how they're written; then consider framework choice, naming conventions, typing, and other conventions.
-- When you edit a piece of code, first look at the code's surrounding context (especially its imports) to understand the code's choice of frameworks and libraries. Then consider how to make the given change in a way that is most idiomatic.
-- Always follow security best practices. Never introduce code that exposes or logs secrets and keys. Never commit secrets or keys to the repository.
-
-# Code style
-
-- IMPORTANT: DO NOT ADD **_ANY_** COMMENTS unless asked
-
-- If completing the user's task requires writing or modifying files:
- - Your code and final answer should follow these _CODING GUIDELINES_:
- - Fix the problem at the root cause rather than applying surface-level patches, when possible.
- - Avoid unneeded complexity in your solution.
- - Ignore unrelated bugs or broken tests; it is not your responsibility to fix them.
- - Update documentation as necessary.
- - Keep changes consistent with the style of the existing codebase. Changes should be minimal and focused on the task.
- - Use `git log` and `git blame` to search the history of the codebase if additional context is required.
- - NEVER add copyright or license headers unless specifically requested.
- - You do not need to `git commit` your changes; this will be done automatically for you.
- - If there is a .pre-commit-config.yaml, use `pre-commit run --files ...` to check that your changes pass the pre-commit checks. However, do not fix pre-existing errors on lines you didn't touch.
- - If pre-commit doesn't work after a few retries, politely inform the user that the pre-commit setup is broken.
- - Once you finish coding, you must
- - Check `git status` to sanity check your changes; revert any scratch files or changes.
- - Remove all inline comments you added as much as possible, even if they look normal. Check using `git diff`. Inline comments must be generally avoided, unless active maintainers of the repo, after long careful study of the code and the issue, will still misinterpret the code without the comments.
- - Check if you accidentally add copyright or license headers. If so, remove them.
- - Try to run pre-commit if it is available.
- - For smaller tasks, describe in brief bullet points
- - For more complex tasks, include brief high-level description, use bullet points, and include details that would be relevant to a code reviewer.
-
-# Doing tasks
-
-The user will primarily request you perform software engineering tasks. This includes solving bugs, adding new functionality, refactoring code, explaining code, and more. For these tasks the following steps are recommended:
-
-1. Use the available search tools to understand the codebase and the user's query.
-2. Implement the solution using all tools available to you
-3. Verify the solution if possible with tests. NEVER assume specific test framework or test script. Check the README or search codebase to determine the testing approach.
-4. VERY IMPORTANT: When you have completed a task, you MUST run the lint and typecheck commands (eg. npm run lint, npm run typecheck, ruff, etc.) if they were provided to you to ensure your code is correct. If you are unable to find the correct command, ask the user for the command to run and if they supply it, proactively suggest writing it to CRUSH.md so that you will know to run it next time.
-
-NEVER commit changes unless the user explicitly asks you to. It is VERY IMPORTANT to only commit when explicitly asked, otherwise the user will feel that you are being too proactive.
-
-# Tool usage policy
-
-- When doing file search, prefer to use the Agent tool in order to reduce context usage.
-- IMPORTANT: All tools are executed in parallel when multiple tool calls are sent in a single message. Only send multiple tool calls when they are safe to run in parallel (no dependencies between them).
-- IMPORTANT: The user does not see the full output of the tool responses, so if you need the output of the tool for the response make sure to summarize it for the user.
-
-# Proactiveness
-
-You are allowed to be proactive, but only when the user asks you to do something. You should strive to strike a balance between:
-
-1. Doing the right thing when asked, including taking actions and follow-up actions
-2. Not surprising the user with actions you take without asking
- For example, if the user asks you how to approach something, you should do your best to answer their question first, and not immediately jump into taking actions.
-3. Do not add additional code explanation summary unless requested by the user. After working on a file, just stop, rather than providing an explanation of what you did.
-
-- If completing the user's task DOES NOT require writing or modifying files (e.g., the user asks a question about the code base):
- - Respond in a friendly tone as a remote teammate, who is knowledgeable, capable and eager to help with coding.
-- When your task involves writing or modifying files:
- - Do NOT tell the user to "save the file" or "copy the code into a file" if you already created or modified the file using `edit`. Instead, reference the file as already saved.
- - Do NOT show the full contents of large files you have already written, unless the user explicitly asks for them.
-- NEVER use emojis in your responses
diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go
index 3fffd6b35fe6ee8b6a765e2e5b815ad36a5b6a55..981ff4590fd7db92288ff11b3d8f607e594cb0fd 100644
--- a/internal/llm/provider/anthropic.go
+++ b/internal/llm/provider/anthropic.go
@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"log/slog"
+ "net/http"
"regexp"
"strconv"
"strings"
@@ -175,7 +176,7 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
}
}
- return
+ return anthropicMessages
}
func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolUnionParam {
@@ -492,17 +493,23 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
}
- if apiErr.StatusCode == 401 {
+ if apiErr.StatusCode == http.StatusUnauthorized {
+ prev := a.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
a.providerOptions.apiKey, err = config.Get().Resolve(a.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
+ // if it didn't change, do not retry.
+ if prev == a.providerOptions.apiKey {
+ return false, 0, err
+ }
a.client = createAnthropicClient(a.providerOptions, a.tp)
return true, 0, nil
}
// Handle context limit exceeded error (400 Bad Request)
- if apiErr.StatusCode == 400 {
+ if apiErr.StatusCode == http.StatusBadRequest {
if adjusted, ok := a.handleContextLimitError(apiErr); ok {
a.adjustedMaxTokens = adjusted
slog.Debug("Adjusted max_tokens due to context limit", "new_max_tokens", adjusted)
@@ -511,7 +518,8 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
}
isOverloaded := strings.Contains(apiErr.Error(), "overloaded") || strings.Contains(apiErr.Error(), "rate limit exceeded")
- if apiErr.StatusCode != 429 && apiErr.StatusCode != 529 && !isOverloaded {
+ // 529 (unofficial): The service is overloaded
+ if apiErr.StatusCode != http.StatusTooManyRequests && apiErr.StatusCode != 529 && !isOverloaded {
return false, 0, err
}
diff --git a/internal/llm/provider/gemini.go b/internal/llm/provider/gemini.go
index 9d5164973a5ad86b4c0dee001e54b46b838b89e6..a846d8d582524bb6bf9c8ed31e3796ec8d94b419 100644
--- a/internal/llm/provider/gemini.go
+++ b/internal/llm/provider/gemini.go
@@ -44,6 +44,14 @@ func createGeminiClient(opts providerClientOptions) (*genai.Client, error) {
APIKey: opts.apiKey,
Backend: genai.BackendGeminiAPI,
}
+ if opts.baseURL != "" {
+ resolvedBaseURL, err := config.Get().Resolve(opts.baseURL)
+ if err == nil && resolvedBaseURL != "" {
+ cc.HTTPOptions = genai.HTTPOptions{
+ BaseURL: resolvedBaseURL,
+ }
+ }
+ }
if config.Get().Options.Debug {
cc.HTTPClient = log.NewHTTPClient()
}
@@ -62,9 +70,8 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
var parts []*genai.Part
parts = append(parts, &genai.Part{Text: msg.Content().String()})
for _, binaryContent := range msg.BinaryContent() {
- imageFormat := strings.Split(binaryContent.MIMEType, "/")
parts = append(parts, &genai.Part{InlineData: &genai.Blob{
- MIMEType: imageFormat[1],
+ MIMEType: binaryContent.MIMEType,
Data: binaryContent.Data,
}})
}
@@ -102,6 +109,7 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
}
case message.Tool:
+ var toolParts []*genai.Part
for _, result := range msg.ToolResults() {
response := map[string]any{"result": result.Content}
parsed, err := parseJSONToMap(result.Content)
@@ -121,16 +129,17 @@ func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Cont
}
}
- history = append(history, &genai.Content{
- Parts: []*genai.Part{
- {
- FunctionResponse: &genai.FunctionResponse{
- Name: toolCall.Name,
- Response: response,
- },
- },
+ toolParts = append(toolParts, &genai.Part{
+ FunctionResponse: &genai.FunctionResponse{
+ Name: toolCall.Name,
+ Response: response,
},
- Role: genai.RoleModel,
+ })
+ }
+ if len(toolParts) > 0 {
+ history = append(history, &genai.Content{
+ Parts: toolParts,
+ Role: genai.RoleUser,
})
}
}
@@ -373,17 +382,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
Finished: true,
}
- isNew := true
- for _, existing := range toolCalls {
- if existing.Name == newCall.Name && existing.Input == newCall.Input {
- isNew = false
- break
- }
- }
-
- if isNew {
- toolCalls = append(toolCalls, newCall)
- }
+ toolCalls = append(toolCalls, newCall)
}
}
} else {
@@ -441,10 +440,16 @@ func (g *geminiClient) shouldRetry(attempts int, err error) (bool, int64, error)
// Check for token expiration (401 Unauthorized)
if contains(errMsg, "unauthorized", "invalid api key", "api key expired") {
+ prev := g.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
g.providerOptions.apiKey, err = config.Get().Resolve(g.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
+ // if it didn't change, do not retry.
+ if prev == g.providerOptions.apiKey {
+ return false, 0, err
+ }
g.client, err = createGeminiClient(g.providerOptions)
if err != nil {
return false, 0, fmt.Errorf("failed to create Gemini client after API key refresh: %w", err)
diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go
index eb5a84867aecf0a76b30a7c022ccb14bf6a2139a..3e92e077b3156ddccc186e0b104b7db174290c18 100644
--- a/internal/llm/provider/openai.go
+++ b/internal/llm/provider/openai.go
@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"log/slog"
+ "net/http"
"strings"
"time"
@@ -178,7 +179,7 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
}
}
- return
+ return openaiMessages
}
func (o *openaiClient) convertTools(tools []tools.BaseTool) []openai.ChatCompletionToolParam {
@@ -340,6 +341,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
toolCalls := make([]message.ToolCall, 0)
msgToolCalls := make(map[int64]openai.ChatCompletionMessageToolCall)
toolMap := make(map[string]openai.ChatCompletionMessageToolCall)
+ toolCallIDMap := make(map[string]string)
for openaiStream.Next() {
chunk := openaiStream.Current()
// Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index
@@ -367,6 +369,16 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
currentContent += choice.Delta.Content
} else if len(choice.Delta.ToolCalls) > 0 {
toolCall := choice.Delta.ToolCalls[0]
+ if strings.HasPrefix(toolCall.ID, "functions.") {
+ exID, ok := toolCallIDMap[toolCall.ID]
+ if !ok {
+ newID := uuid.NewString()
+ toolCallIDMap[toolCall.ID] = newID
+ toolCall.ID = newID
+ } else {
+ toolCall.ID = exID
+ }
+ }
newToolCall := false
if existingToolCall, ok := msgToolCalls[toolCall.Index]; ok { // tool call exists
if toolCall.ID != "" && toolCall.ID != existingToolCall.ID {
@@ -472,7 +484,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
select {
case <-ctx.Done():
// context cancelled
- if ctx.Err() == nil {
+ if ctx.Err() != nil {
eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
}
close(eventChan)
@@ -502,20 +514,34 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
retryAfterValues := []string{}
if errors.As(err, &apiErr) {
// Check for token expiration (401 Unauthorized)
- if apiErr.StatusCode == 401 {
+ if apiErr.StatusCode == http.StatusUnauthorized {
+ prev := o.providerOptions.apiKey
+ // in case the key comes from a script, we try to re-evaluate it.
o.providerOptions.apiKey, err = config.Get().Resolve(o.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
+ // if it didn't change, do not retry.
+ if prev == o.providerOptions.apiKey {
+ return false, 0, err
+ }
o.client = createOpenAIClient(o.providerOptions)
return true, 0, nil
}
- if apiErr.StatusCode != 429 && apiErr.StatusCode != 500 {
+ if apiErr.StatusCode == http.StatusTooManyRequests {
+ // Check if this is an insufficient quota error (permanent)
+ if apiErr.Type == "insufficient_quota" || apiErr.Code == "insufficient_quota" {
+ return false, 0, fmt.Errorf("OpenAI quota exceeded: %s. Please check your plan and billing details", apiErr.Message)
+ }
+ // Other 429 errors (rate limiting) can be retried
+ } else if apiErr.StatusCode != http.StatusInternalServerError {
return false, 0, err
}
- retryAfterValues = apiErr.Response.Header.Values("Retry-After")
+ if apiErr.Response != nil {
+ retryAfterValues = apiErr.Response.Header.Values("Retry-After")
+ }
}
if apiErr != nil {
diff --git a/internal/llm/provider/openai_test.go b/internal/llm/provider/openai_test.go
index 8088ba22b4cd49b26130cd3812e8705e8dfe1cba..52b0a20c9316d67ba987ccc5051aa2f6d321aff4 100644
--- a/internal/llm/provider/openai_test.go
+++ b/internal/llm/provider/openai_test.go
@@ -6,6 +6,7 @@ import (
"net/http"
"net/http/httptest"
"os"
+ "strings"
"testing"
"time"
@@ -88,3 +89,78 @@ func TestOpenAIClientStreamChoices(t *testing.T) {
}
}
}
+
+func TestOpenAIClient429InsufficientQuotaError(t *testing.T) {
+ client := &openaiClient{
+ providerOptions: providerClientOptions{
+ modelType: config.SelectedModelTypeLarge,
+ apiKey: "test-key",
+ systemMessage: "test",
+ config: config.ProviderConfig{
+ ID: "test-openai",
+ APIKey: "test-key",
+ },
+ model: func(config.SelectedModelType) catwalk.Model {
+ return catwalk.Model{
+ ID: "test-model",
+ Name: "test-model",
+ }
+ },
+ },
+ }
+
+ // Test insufficient_quota error should not retry
+ apiErr := &openai.Error{
+ StatusCode: 429,
+ Message: "You exceeded your current quota, please check your plan and billing details. For more information on this error, read the docs: https://platform.openai.com/docs/guides/error-codes/api-errors.",
+ Type: "insufficient_quota",
+ Code: "insufficient_quota",
+ }
+
+ retry, _, err := client.shouldRetry(1, apiErr)
+ if retry {
+ t.Error("Expected shouldRetry to return false for insufficient_quota error, but got true")
+ }
+ if err == nil {
+ t.Error("Expected shouldRetry to return an error for insufficient_quota, but got nil")
+ }
+ if err != nil && !strings.Contains(err.Error(), "quota") {
+ t.Errorf("Expected error message to mention quota, got: %v", err)
+ }
+}
+
+func TestOpenAIClient429RateLimitError(t *testing.T) {
+ client := &openaiClient{
+ providerOptions: providerClientOptions{
+ modelType: config.SelectedModelTypeLarge,
+ apiKey: "test-key",
+ systemMessage: "test",
+ config: config.ProviderConfig{
+ ID: "test-openai",
+ APIKey: "test-key",
+ },
+ model: func(config.SelectedModelType) catwalk.Model {
+ return catwalk.Model{
+ ID: "test-model",
+ Name: "test-model",
+ }
+ },
+ },
+ }
+
+ // Test regular rate limit error should retry
+ apiErr := &openai.Error{
+ StatusCode: 429,
+ Message: "Rate limit reached for requests",
+ Type: "rate_limit_exceeded",
+ Code: "rate_limit_exceeded",
+ }
+
+ retry, _, err := client.shouldRetry(1, apiErr)
+ if !retry {
+ t.Error("Expected shouldRetry to return true for rate_limit_exceeded error, but got false")
+ }
+ if err != nil {
+ t.Errorf("Expected shouldRetry to return nil error for rate_limit_exceeded, but got: %v", err)
+ }
+}
diff --git a/internal/llm/provider/provider.go b/internal/llm/provider/provider.go
index 28562f2f484a75c445d9eaa21ce90af4ef5ca613..0dada9d8b1e353801fde43b1d9ebb1fc6eaa0a1e 100644
--- a/internal/llm/provider/provider.go
+++ b/internal/llm/provider/provider.go
@@ -13,7 +13,7 @@ import (
type EventType string
-const maxRetries = 8
+const maxRetries = 3
const (
EventContentStart EventType = "content_start"
@@ -98,7 +98,7 @@ func (p *baseProvider[C]) cleanMessages(messages []message.Message) (cleaned []m
}
cleaned = append(cleaned, msg)
}
- return
+ return cleaned
}
func (p *baseProvider[C]) SendMessages(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
diff --git a/internal/llm/tools/bash.go b/internal/llm/tools/bash.go
index 6b55820632029e84f9381faa5ca2bd25734abeee..7f91ecb78b3d79380d029a1d58bb62083cdf27fe 100644
--- a/internal/llm/tools/bash.go
+++ b/internal/llm/tools/bash.go
@@ -1,12 +1,16 @@
package tools
import (
+ "bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
+ "html/template"
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/shell"
)
@@ -30,6 +34,7 @@ type BashResponseMetadata struct {
type bashTool struct {
permissions permission.Service
workingDir string
+ attribution *config.Attribution
}
const (
@@ -41,6 +46,22 @@ const (
BashNoOutput = "no output"
)
+//go:embed bash.md
+var bashDescription []byte
+
+var bashDescriptionTpl = template.Must(
+ template.New("bashDescription").
+ Parse(string(bashDescription)),
+)
+
+type bashDescriptionData struct {
+ BannedCommands string
+ MaxOutputLength int
+ AttributionStep string
+ AttributionExample string
+ PRAttribution string
+}
+
var bannedCommands = []string{
// Network/Download tools
"alias",
@@ -114,162 +135,65 @@ var bannedCommands = []string{
"ufw",
}
-func bashDescription() string {
+func (b *bashTool) bashDescription() string {
bannedCommandsStr := strings.Join(bannedCommands, ", ")
- return fmt.Sprintf(`Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
-
-CROSS-PLATFORM SHELL SUPPORT:
-* This tool uses a shell interpreter (mvdan/sh) that mimics the Bash language,
- so you should use Bash syntax in all platforms, including Windows.
- The most common shell builtins and core utils are available in Windows as
- well.
-* Make sure to use forward slashes (/) as path separators in commands, even on
- Windows. Example: "ls C:/foo/bar" instead of "ls C:\foo\bar".
-
-Before executing the command, please follow these steps:
-
-1. Directory Verification:
- - If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location
- - For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory
-
-2. Security Check:
- - For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.
- - Verify that the command is not one of the banned commands: %s.
-
-3. Command Execution:
- - After ensuring proper quoting, execute the command.
- - Capture the output of the command.
-
-4. Output Processing:
- - If the output exceeds %d characters, output will be truncated before being returned to you.
- - Prepare the output for display to the user.
-
-5. Return Result:
- - Provide the processed output of the command.
- - If any errors occurred during execution, include those in the output.
- - The result will also have metadata like the cwd (current working directory) at the end, included with tags.
-
-Usage notes:
-- The command argument is required.
-- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.
-- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files.
-- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).
-- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands.
-- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it.
-
-pytest /foo/bar/tests
-
-
-cd /foo/bar && pytest tests
-
-
-# Committing changes with git
-
-When the user asks you to create a new git commit, follow these steps carefully:
-
-1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!):
- - Run a git status command to see all untracked files.
- - Run a git diff command to see both staged and unstaged changes that will be committed.
- - Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
-
-2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit.
-
-3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in tags:
-
-
-- List the files that have been changed or added
-- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
-- Brainstorm the purpose or motivation behind these changes
-- Do not use tools to explore code, beyond what is available in the git context
-- Assess the impact of these changes on the overall project
-- Check for any sensitive information that shouldn't be committed
-- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
-- Ensure your language is clear, concise, and to the point
-- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
-- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
-- Review the draft message to ensure it accurately reflects the changes and their purpose
-
-
-4. Create the commit with a message ending with:
-💘 Generated with Crush
-Co-Authored-By: Crush
-
-- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
-
+
+ // Build attribution text based on settings
+ var attributionStep, attributionExample, prAttribution string
+
+ // Default to true if attribution is nil (backward compatibility)
+ generatedWith := b.attribution == nil || b.attribution.GeneratedWith
+ coAuthoredBy := b.attribution == nil || b.attribution.CoAuthoredBy
+
+ // Build PR attribution
+ if generatedWith {
+ prAttribution = "💘 Generated with Crush"
+ }
+
+ if generatedWith || coAuthoredBy {
+ var attributionParts []string
+ if generatedWith {
+ attributionParts = append(attributionParts, "💘 Generated with Crush")
+ }
+ if coAuthoredBy {
+ attributionParts = append(attributionParts, "Co-Authored-By: Crush ")
+ }
+
+ if len(attributionParts) > 0 {
+ attributionStep = fmt.Sprintf("4. Create the commit with a message ending with:\n%s", strings.Join(attributionParts, "\n"))
+
+ attributionText := strings.Join(attributionParts, "\n ")
+ attributionExample = fmt.Sprintf(`
git commit -m "$(cat <<'EOF'
Commit message here.
- 💘 Generated with Crush
- Co-Authored-By: 💘 Crush
+ %s
EOF
- )"
-
-
-5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
-
-6. Finally, run git status to make sure the commit succeeded.
-
-Important notes:
-- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up
-- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit.
-- NEVER update the git config
-- DO NOT push to the remote repository
-- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
-- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
-- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
-- Return an empty response - the user will see the git output directly
-
-# Creating pull requests
-Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
-
-IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
-
-1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!):
- - Run a git status command to see all untracked files.
- - Run a git diff command to see both staged and unstaged changes that will be committed.
- - Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
- - Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.)
-
-2. Create new branch if needed
-
-3. Commit changes if needed
-
-4. Push to remote with -u flag if needed
-
-5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in tags:
-
-
-- List the commits since diverging from the main branch
-- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
-- Brainstorm the purpose or motivation behind these changes
-- Assess the impact of these changes on the overall project
-- Do not use tools to explore code, beyond what is available in the git context
-- Check for any sensitive information that shouldn't be committed
-- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what"
-- Ensure the summary accurately reflects all changes since diverging from the main branch
-- Ensure your language is clear, concise, and to the point
-- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
-- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context)
-- Review the draft summary to ensure it accurately reflects the changes and their purpose
-
-
-6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
-
-gh pr create --title "the pr title" --body "$(cat <<'EOF'
-## Summary
-<1-3 bullet points>
-
-## Test plan
-[Checklist of TODOs for testing the pull request...]
-
-💘 Generated with Crush
-EOF
-)"
-
-
-Important:
-- Return an empty response - the user will see the gh output directly
-- Never update git config`, bannedCommandsStr, MaxOutputLength)
+)"`, attributionText)
+ }
+ }
+
+ if attributionStep == "" {
+ attributionStep = "4. Create the commit with your commit message."
+ attributionExample = `
+git commit -m "$(cat <<'EOF'
+ Commit message here.
+ EOF
+)"`
+ }
+
+ var out bytes.Buffer
+ if err := bashDescriptionTpl.Execute(&out, bashDescriptionData{
+ BannedCommands: bannedCommandsStr,
+ MaxOutputLength: MaxOutputLength,
+ AttributionStep: attributionStep,
+ AttributionExample: attributionExample,
+ PRAttribution: prAttribution,
+ }); err != nil {
+ // this should never happen.
+ panic("failed to execute bash description template: " + err.Error())
+ }
+ return out.String()
}
func blockFuncs() []shell.BlockFunc {
@@ -304,7 +228,7 @@ func blockFuncs() []shell.BlockFunc {
}
}
-func NewBashTool(permission permission.Service, workingDir string) BaseTool {
+func NewBashTool(permission permission.Service, workingDir string, attribution *config.Attribution) BaseTool {
// Set up command blocking on the persistent shell
persistentShell := shell.GetPersistentShell(workingDir)
persistentShell.SetBlockFuncs(blockFuncs())
@@ -312,6 +236,7 @@ func NewBashTool(permission permission.Service, workingDir string) BaseTool {
return &bashTool{
permissions: permission,
workingDir: workingDir,
+ attribution: attribution,
}
}
@@ -322,7 +247,7 @@ func (b *bashTool) Name() string {
func (b *bashTool) Info() ToolInfo {
return ToolInfo{
Name: BashToolName,
- Description: bashDescription(),
+ Description: b.bashDescription(),
Parameters: map[string]any{
"command": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/bash.md b/internal/llm/tools/bash.md
new file mode 100644
index 0000000000000000000000000000000000000000..464b2749fb0205729caafbccc2dde57ffe267cd1
--- /dev/null
+++ b/internal/llm/tools/bash.md
@@ -0,0 +1,161 @@
+Executes a given bash command in a persistent shell session with optional timeout, ensuring proper handling and security measures.
+
+CROSS-PLATFORM SHELL SUPPORT:
+
+- This tool uses a shell interpreter (mvdan/sh) that mimics the Bash language,
+ so you should use Bash syntax in all platforms, including Windows.
+ The most common shell builtins and core utils are available in Windows as
+ well.
+- Make sure to use forward slashes (/) as path separators in commands, even on
+ Windows. Example: "ls C:/foo/bar" instead of "ls C:\foo\bar".
+
+Before executing the command, please follow these steps:
+
+1. Directory Verification:
+
+- If the command will create new directories or files, first use the LS tool to verify the parent directory exists and is the correct location
+- For example, before running "mkdir foo/bar", first use LS to check that "foo" exists and is the intended parent directory
+
+2. Security Check:
+
+- For security and to limit the threat of a prompt injection attack, some commands are limited or banned. If you use a disallowed command, you will receive an error message explaining the restriction. Explain the error to the User.
+- Verify that the command is not one of the banned commands: {{ .BannedCommands }}.
+
+3. Command Execution:
+
+- After ensuring proper quoting, execute the command.
+- Capture the output of the command.
+
+4. Output Processing:
+
+- If the output exceeds {{ .MaxOutputLength }} characters, output will be truncated before being returned to you.
+- Prepare the output for display to the user.
+
+5. Return Result:
+
+- Provide the processed output of the command.
+- If any errors occurred during execution, include those in the output.
+- The result will also have metadata like the cwd (current working directory) at the end, included with tags.
+
+Usage notes:
+
+- The command argument is required.
+- You can specify an optional timeout in milliseconds (up to 600000ms / 10 minutes). If not specified, commands will timeout after 30 minutes.
+- VERY IMPORTANT: You MUST avoid using search commands like 'find' and 'grep'. Instead use Grep, Glob, or Agent tools to search. You MUST avoid read tools like 'cat', 'head', 'tail', and 'ls', and use FileRead and LS tools to read files.
+- When issuing multiple commands, use the ';' or '&&' operator to separate them. DO NOT use newlines (newlines are ok in quoted strings).
+- IMPORTANT: All commands share the same shell session. Shell state (environment variables, virtual environments, current directory, etc.) persist between commands. For example, if you set an environment variable as part of a command, the environment variable will persist for subsequent commands.
+- Try to maintain your current working directory throughout the session by using absolute paths and avoiding usage of 'cd'. You may use 'cd' if the User explicitly requests it.
+
+ pytest /foo/bar/tests
+
+
+ cd /foo/bar && pytest tests
+
+
+# Committing changes with git
+
+When the user asks you to create a new git commit, follow these steps carefully:
+
+1. Start with a single message that contains exactly three tool_use blocks that do the following (it is VERY IMPORTANT that you send these tool_use blocks in a single message, otherwise it will feel slow to the user!):
+
+- Run a git status command to see all untracked files.
+- Run a git diff command to see both staged and unstaged changes that will be committed.
+- Run a git log command to see recent commit messages, so that you can follow this repository's commit message style.
+
+2. Use the git context at the start of this conversation to determine which files are relevant to your commit. Add relevant untracked files to the staging area. Do not commit files that were already modified at the start of this conversation, if they are not relevant to your commit.
+
+3. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in tags:
+
+
+
+- List the files that have been changed or added
+- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
+- Brainstorm the purpose or motivation behind these changes
+- Do not use tools to explore code, beyond what is available in the git context
+- Assess the impact of these changes on the overall project
+- Check for any sensitive information that shouldn't be committed
+- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
+- Ensure your language is clear, concise, and to the point
+- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
+- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
+- Review the draft message to ensure it accurately reflects the changes and their purpose
+
+
+{{ .AttributionStep }}
+
+- In order to ensure good formatting, ALWAYS pass the commit message via a HEREDOC, a la this example:
+ {{ .AttributionExample }}
+
+5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes. If it fails again, it usually means a pre-commit hook is preventing the commit. If the commit succeeds but you notice that files were modified by the pre-commit hook, you MUST amend your commit to include them.
+
+6. Finally, run git status to make sure the commit succeeded.
+
+Important notes:
+
+- When possible, combine the "git add" and "git commit" commands into a single "git commit -am" command, to speed things up
+- However, be careful not to stage files (e.g. with 'git add .') for commits that aren't part of the change, they may have untracked files they want to keep around, but not commit.
+- NEVER update the git config
+- DO NOT push to the remote repository
+- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
+- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit
+- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
+- Return an empty response - the user will see the git output directly
+
+# Creating pull requests
+
+Use the gh command via the Bash tool for ALL GitHub-related tasks including working with issues, pull requests, checks, and releases. If given a Github URL use the gh command to get the information needed.
+
+IMPORTANT: When the user asks you to create a pull request, follow these steps carefully:
+
+1. Understand the current state of the branch. Remember to send a single message that contains multiple tool_use blocks (it is VERY IMPORTANT that you do this in a single message, otherwise it will feel slow to the user!):
+
+- Run a git status command to see all untracked files.
+- Run a git diff command to see both staged and unstaged changes that will be committed.
+- Check if the current branch tracks a remote branch and is up to date with the remote, so you know if you need to push to the remote
+- Run a git log command and 'git diff main...HEAD' to understand the full commit history for the current branch (from the time it diverged from the 'main' branch.)
+
+2. Create new branch if needed
+
+3. Commit changes if needed
+
+4. Push to remote with -u flag if needed
+
+5. Analyze all changes that will be included in the pull request, making sure to look at all relevant commits (not just the latest commit, but all commits that will be included in the pull request!), and draft a pull request summary. Wrap your analysis process in tags:
+
+
+
+- List the commits since diverging from the main branch
+- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
+- Brainstorm the purpose or motivation behind these changes
+- Assess the impact of these changes on the overall project
+- Do not use tools to explore code, beyond what is available in the git context
+- Check for any sensitive information that shouldn't be committed
+- Draft a concise (1-2 bullet points) pull request summary that focuses on the "why" rather than the "what"
+- Ensure the summary accurately reflects all changes since diverging from the main branch
+- Ensure your language is clear, concise, and to the point
+- Ensure the summary accurately reflects the changes and their purpose (ie. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
+- Ensure the summary is not generic (avoid words like "Update" or "Fix" without context)
+- Review the draft summary to ensure it accurately reflects the changes and their purpose
+
+
+6. Create PR using gh pr create with the format below. Use a HEREDOC to pass the body to ensure correct formatting.
+
+ gh pr create --title "the pr title" --body "$(cat <<'EOF'
+
+## Summary
+
+<1-3 bullet points>
+
+## Test plan
+
+[Checklist of TODOs for testing the pull request...]
+
+{{ .PRAttribution }}
+EOF
+)"
+
+
+Important:
+
+- Return an empty response - the user will see the gh output directly
+- Never update git config
diff --git a/internal/llm/tools/diagnostics.go b/internal/llm/tools/diagnostics.go
index b6773a8cf9de28b71cafca6fb45d3e2cb69d8c0a..8e0c332cef76e40d5e24e74ed3260b95aab8b04b 100644
--- a/internal/llm/tools/diagnostics.go
+++ b/internal/llm/tools/diagnostics.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -9,43 +10,25 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
type DiagnosticsParams struct {
FilePath string `json:"file_path"`
}
+
type diagnosticsTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
}
-const (
- DiagnosticsToolName = "diagnostics"
- diagnosticsDescription = `Get diagnostics for a file and/or project.
-WHEN TO USE THIS TOOL:
-- Use when you need to check for errors or warnings in your code
-- Helpful for debugging and ensuring code quality
-- Good for getting a quick overview of issues in a file or project
-HOW TO USE:
-- Provide a path to a file to get diagnostics for that file
-- Leave the path empty to get diagnostics for the entire project
-- Results are displayed in a structured format with severity levels
-FEATURES:
-- Displays errors, warnings, and hints
-- Groups diagnostics by severity
-- Provides detailed information about each diagnostic
-LIMITATIONS:
-- Results are limited to the diagnostics provided by the LSP clients
-- May not cover all possible issues in the code
-- Does not provide suggestions for fixing issues
-TIPS:
-- Use in conjunction with other tools for a comprehensive code review
-- Combine with the LSP client for real-time diagnostics
-`
-)
+const DiagnosticsToolName = "diagnostics"
+
+//go:embed diagnostics.md
+var diagnosticsDescription []byte
-func NewDiagnosticsTool(lspClients map[string]*lsp.Client) BaseTool {
+func NewDiagnosticsTool(lspClients *csync.Map[string, *lsp.Client]) BaseTool {
return &diagnosticsTool{
lspClients,
}
@@ -58,7 +41,7 @@ func (b *diagnosticsTool) Name() string {
func (b *diagnosticsTool) Info() ToolInfo {
return ToolInfo{
Name: DiagnosticsToolName,
- Description: diagnosticsDescription,
+ Description: string(diagnosticsDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -75,227 +58,148 @@ func (b *diagnosticsTool) Run(ctx context.Context, call ToolCall) (ToolResponse,
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
- lsps := b.lspClients
-
- if len(lsps) == 0 {
+ if b.lspClients.Len() == 0 {
return NewTextErrorResponse("no LSP clients available"), nil
}
-
- if params.FilePath != "" {
- notifyLspOpenFile(ctx, params.FilePath, lsps)
- waitForLspDiagnostics(ctx, params.FilePath, lsps)
- }
-
- output := getDiagnostics(params.FilePath, lsps)
-
+ notifyLSPs(ctx, b.lspClients, params.FilePath)
+ output := getDiagnostics(params.FilePath, b.lspClients)
return NewTextResponse(output), nil
}
-func notifyLspOpenFile(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
- for _, client := range lsps {
- err := client.OpenFile(ctx, filePath)
- if err != nil {
+func notifyLSPs(ctx context.Context, lsps *csync.Map[string, *lsp.Client], filepath string) {
+ if filepath == "" {
+ return
+ }
+ for client := range lsps.Seq() {
+ if !client.HandlesFile(filepath) {
continue
}
+ _ = client.OpenFileOnDemand(ctx, filepath)
+ _ = client.NotifyChange(ctx, filepath)
+ client.WaitForDiagnostics(ctx, 5*time.Second)
}
}
-func waitForLspDiagnostics(ctx context.Context, filePath string, lsps map[string]*lsp.Client) {
- if len(lsps) == 0 {
- return
- }
-
- diagChan := make(chan struct{}, 1)
-
- for _, client := range lsps {
- originalDiags := client.GetDiagnostics()
-
- handler := func(params json.RawMessage) {
- lsp.HandleDiagnostics(client, params)
- var diagParams protocol.PublishDiagnosticsParams
- if err := json.Unmarshal(params, &diagParams); err != nil {
- return
- }
+func getDiagnostics(filePath string, lsps *csync.Map[string, *lsp.Client]) string {
+ fileDiagnostics := []string{}
+ projectDiagnostics := []string{}
- path, err := diagParams.URI.Path()
+ for lspName, client := range lsps.Seq2() {
+ for location, diags := range client.GetDiagnostics() {
+ path, err := location.Path()
if err != nil {
- slog.Error("Failed to convert diagnostic URI to path", "uri", diagParams.URI, "error", err)
- return
+ slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err)
+ continue
}
-
- if path == filePath || hasDiagnosticsChanged(client.GetDiagnostics(), originalDiags) {
- select {
- case diagChan <- struct{}{}:
- default:
+ isCurrentFile := path == filePath
+ for _, diag := range diags {
+ formattedDiag := formatDiagnostic(path, diag, lspName)
+ if isCurrentFile {
+ fileDiagnostics = append(fileDiagnostics, formattedDiag)
+ } else {
+ projectDiagnostics = append(projectDiagnostics, formattedDiag)
}
}
}
+ }
- client.RegisterNotificationHandler("textDocument/publishDiagnostics", handler)
+ sortDiagnostics(fileDiagnostics)
+ sortDiagnostics(projectDiagnostics)
- if client.IsFileOpen(filePath) {
- err := client.NotifyChange(ctx, filePath)
- if err != nil {
- continue
- }
- } else {
- err := client.OpenFile(ctx, filePath)
- if err != nil {
- continue
- }
- }
- }
+ var output strings.Builder
+ writeDiagnostics(&output, "file_diagnostics", fileDiagnostics)
+ writeDiagnostics(&output, "project_diagnostics", projectDiagnostics)
- select {
- case <-diagChan:
- case <-time.After(5 * time.Second):
- case <-ctx.Done():
+ if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
+ fileErrors := countSeverity(fileDiagnostics, "Error")
+ fileWarnings := countSeverity(fileDiagnostics, "Warn")
+ projectErrors := countSeverity(projectDiagnostics, "Error")
+ projectWarnings := countSeverity(projectDiagnostics, "Warn")
+ output.WriteString("\n\n")
+ fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
+ fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
+ output.WriteString("\n")
}
-}
-func hasDiagnosticsChanged(current, original map[protocol.DocumentURI][]protocol.Diagnostic) bool {
- for uri, diags := range current {
- origDiags, exists := original[uri]
- if !exists || len(diags) != len(origDiags) {
- return true
- }
- }
- return false
+ out := output.String()
+ slog.Info("Diagnostics", "output", fmt.Sprintf("%q", out))
+ return out
}
-func getDiagnostics(filePath string, lsps map[string]*lsp.Client) string {
- fileDiagnostics := []string{}
- projectDiagnostics := []string{}
-
- formatDiagnostic := func(pth string, diagnostic protocol.Diagnostic, source string) string {
- severity := "Info"
- switch diagnostic.Severity {
- case protocol.SeverityError:
- severity = "Error"
- case protocol.SeverityWarning:
- severity = "Warn"
- case protocol.SeverityHint:
- severity = "Hint"
- }
-
- location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
-
- sourceInfo := ""
- if diagnostic.Source != "" {
- sourceInfo = diagnostic.Source
- } else if source != "" {
- sourceInfo = source
- }
-
- codeInfo := ""
- if diagnostic.Code != nil {
- codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
- }
-
- tagsInfo := ""
- if len(diagnostic.Tags) > 0 {
- tags := []string{}
- for _, tag := range diagnostic.Tags {
- switch tag {
- case protocol.Unnecessary:
- tags = append(tags, "unnecessary")
- case protocol.Deprecated:
- tags = append(tags, "deprecated")
- }
- }
- if len(tags) > 0 {
- tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
- }
- }
-
- return fmt.Sprintf("%s: %s [%s]%s%s %s",
- severity,
- location,
- sourceInfo,
- codeInfo,
- tagsInfo,
- diagnostic.Message)
+func writeDiagnostics(output *strings.Builder, tag string, in []string) {
+ if len(in) == 0 {
+ return
}
-
- for lspName, client := range lsps {
- diagnostics := client.GetDiagnostics()
- if len(diagnostics) > 0 {
- for location, diags := range diagnostics {
- path, err := location.Path()
- if err != nil {
- slog.Error("Failed to convert diagnostic location URI to path", "uri", location, "error", err)
- continue
- }
- isCurrentFile := path == filePath
-
- for _, diag := range diags {
- formattedDiag := formatDiagnostic(path, diag, lspName)
-
- if isCurrentFile {
- fileDiagnostics = append(fileDiagnostics, formattedDiag)
- } else {
- projectDiagnostics = append(projectDiagnostics, formattedDiag)
- }
- }
- }
- }
+ output.WriteString("\n<" + tag + ">\n")
+ if len(in) > 10 {
+ output.WriteString(strings.Join(in[:10], "\n"))
+ fmt.Fprintf(output, "\n... and %d more diagnostics", len(in)-10)
+ } else {
+ output.WriteString(strings.Join(in, "\n"))
}
+ output.WriteString("\n" + tag + ">\n")
+}
- sort.Slice(fileDiagnostics, func(i, j int) bool {
- iIsError := strings.HasPrefix(fileDiagnostics[i], "Error")
- jIsError := strings.HasPrefix(fileDiagnostics[j], "Error")
+func sortDiagnostics(in []string) []string {
+ sort.Slice(in, func(i, j int) bool {
+ iIsError := strings.HasPrefix(in[i], "Error")
+ jIsError := strings.HasPrefix(in[j], "Error")
if iIsError != jIsError {
return iIsError // Errors come first
}
- return fileDiagnostics[i] < fileDiagnostics[j] // Then alphabetically
+ return in[i] < in[j] // Then alphabetically
})
+ return in
+}
- sort.Slice(projectDiagnostics, func(i, j int) bool {
- iIsError := strings.HasPrefix(projectDiagnostics[i], "Error")
- jIsError := strings.HasPrefix(projectDiagnostics[j], "Error")
- if iIsError != jIsError {
- return iIsError
- }
- return projectDiagnostics[i] < projectDiagnostics[j]
- })
+func formatDiagnostic(pth string, diagnostic protocol.Diagnostic, source string) string {
+ severity := "Info"
+ switch diagnostic.Severity {
+ case protocol.SeverityError:
+ severity = "Error"
+ case protocol.SeverityWarning:
+ severity = "Warn"
+ case protocol.SeverityHint:
+ severity = "Hint"
+ }
- var output strings.Builder
+ location := fmt.Sprintf("%s:%d:%d", pth, diagnostic.Range.Start.Line+1, diagnostic.Range.Start.Character+1)
- if len(fileDiagnostics) > 0 {
- output.WriteString("\n\n")
- if len(fileDiagnostics) > 10 {
- output.WriteString(strings.Join(fileDiagnostics[:10], "\n"))
- fmt.Fprintf(&output, "\n... and %d more diagnostics", len(fileDiagnostics)-10)
- } else {
- output.WriteString(strings.Join(fileDiagnostics, "\n"))
- }
- output.WriteString("\n\n")
+ sourceInfo := ""
+ if diagnostic.Source != "" {
+ sourceInfo = diagnostic.Source
+ } else if source != "" {
+ sourceInfo = source
}
- if len(projectDiagnostics) > 0 {
- output.WriteString("\n\n")
- if len(projectDiagnostics) > 10 {
- output.WriteString(strings.Join(projectDiagnostics[:10], "\n"))
- fmt.Fprintf(&output, "\n... and %d more diagnostics", len(projectDiagnostics)-10)
- } else {
- output.WriteString(strings.Join(projectDiagnostics, "\n"))
- }
- output.WriteString("\n\n")
+ codeInfo := ""
+ if diagnostic.Code != nil {
+ codeInfo = fmt.Sprintf("[%v]", diagnostic.Code)
}
- if len(fileDiagnostics) > 0 || len(projectDiagnostics) > 0 {
- fileErrors := countSeverity(fileDiagnostics, "Error")
- fileWarnings := countSeverity(fileDiagnostics, "Warn")
- projectErrors := countSeverity(projectDiagnostics, "Error")
- projectWarnings := countSeverity(projectDiagnostics, "Warn")
-
- output.WriteString("\n\n")
- fmt.Fprintf(&output, "Current file: %d errors, %d warnings\n", fileErrors, fileWarnings)
- fmt.Fprintf(&output, "Project: %d errors, %d warnings\n", projectErrors, projectWarnings)
- output.WriteString("\n")
+ tagsInfo := ""
+ if len(diagnostic.Tags) > 0 {
+ tags := []string{}
+ for _, tag := range diagnostic.Tags {
+ switch tag {
+ case protocol.Unnecessary:
+ tags = append(tags, "unnecessary")
+ case protocol.Deprecated:
+ tags = append(tags, "deprecated")
+ }
+ }
+ if len(tags) > 0 {
+ tagsInfo = fmt.Sprintf(" (%s)", strings.Join(tags, ", "))
+ }
}
- return output.String()
+ return fmt.Sprintf("%s: %s [%s]%s%s %s",
+ severity,
+ location,
+ sourceInfo,
+ codeInfo,
+ tagsInfo,
+ diagnostic.Message)
}
func countSeverity(diagnostics []string, severity string) int {
diff --git a/internal/llm/tools/diagnostics.md b/internal/llm/tools/diagnostics.md
new file mode 100644
index 0000000000000000000000000000000000000000..3f00d66039ade58615d46372bbf7f3cd6e619dcf
--- /dev/null
+++ b/internal/llm/tools/diagnostics.md
@@ -0,0 +1,21 @@
+Get diagnostics for a file and/or project.
+WHEN TO USE THIS TOOL:
+
+- Use when you need to check for errors or warnings in your code
+- Helpful for debugging and ensuring code quality
+- Good for getting a quick overview of issues in a file or project
+ HOW TO USE:
+- Provide a path to a file to get diagnostics for that file
+- Leave the path empty to get diagnostics for the entire project
+- Results are displayed in a structured format with severity levels
+ FEATURES:
+- Displays errors, warnings, and hints
+- Groups diagnostics by severity
+- Provides detailed information about each diagnostic
+ LIMITATIONS:
+- Results are limited to the diagnostics provided by the LSP clients
+- May not cover all possible issues in the code
+- Does not provide suggestions for fixing issues
+ TIPS:
+- Use in conjunction with other tools for a comprehensive code review
+- Combine with the LSP client for real-time diagnostics
diff --git a/internal/llm/tools/download.go b/internal/llm/tools/download.go
index fc0c33a846305d002df2bd6e21a54cbe088a511e..63092cf24a0dbb98e7aef58e9d4ee867525ae945 100644
--- a/internal/llm/tools/download.go
+++ b/internal/llm/tools/download.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -32,38 +33,10 @@ type downloadTool struct {
workingDir string
}
-const (
- DownloadToolName = "download"
- downloadToolDescription = `Downloads binary data from a URL and saves it to a local file.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to download files, images, or other binary data from URLs
-- Helpful for downloading assets, documents, or any file type
-- Useful for saving remote content locally for processing or storage
-
-HOW TO USE:
-- Provide the URL to download from
-- Specify the local file path where the content should be saved
-- Optionally set a timeout for the request
-
-FEATURES:
-- Downloads any file type (binary or text)
-- Automatically creates parent directories if they don't exist
-- Handles large files efficiently with streaming
-- Sets reasonable timeouts to prevent hanging
-- Validates input parameters before making requests
-
-LIMITATIONS:
-- Maximum file size is 100MB
-- Only supports HTTP and HTTPS protocols
-- Cannot handle authentication or cookies
-- Some websites may block automated requests
-- Will overwrite existing files without warning
-
-TIPS:
-- Use absolute paths or paths relative to the working directory
-- Set appropriate timeouts for large files or slow connections`
-)
+const DownloadToolName = "download"
+
+//go:embed download.md
+var downloadDescription []byte
func NewDownloadTool(permissions permission.Service, workingDir string) BaseTool {
return &downloadTool{
@@ -87,7 +60,7 @@ func (t *downloadTool) Name() string {
func (t *downloadTool) Info() ToolInfo {
return ToolInfo{
Name: DownloadToolName,
- Description: downloadToolDescription,
+ Description: string(downloadDescription),
Parameters: map[string]any{
"url": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/download.md b/internal/llm/tools/download.md
new file mode 100644
index 0000000000000000000000000000000000000000..4a9516100dfa2c38f3a4bac588793964569e78b3
--- /dev/null
+++ b/internal/llm/tools/download.md
@@ -0,0 +1,34 @@
+Downloads binary data from a URL and saves it to a local file.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to download files, images, or other binary data from URLs
+- Helpful for downloading assets, documents, or any file type
+- Useful for saving remote content locally for processing or storage
+
+HOW TO USE:
+
+- Provide the URL to download from
+- Specify the local file path where the content should be saved
+- Optionally set a timeout for the request
+
+FEATURES:
+
+- Downloads any file type (binary or text)
+- Automatically creates parent directories if they don't exist
+- Handles large files efficiently with streaming
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before making requests
+
+LIMITATIONS:
+
+- Maximum file size is 100MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+- Will overwrite existing files without warning
+
+TIPS:
+
+- Use absolute paths or paths relative to the working directory
+- Set appropriate timeouts for large files or slow connections
diff --git a/internal/llm/tools/edit.go b/internal/llm/tools/edit.go
index 8cc3154ebab98ca34a49715d48b110caabc4ffe3..ae4322e08da087127a3238a1e3edf6c1a9a2e37a 100644
--- a/internal/llm/tools/edit.go
+++ b/internal/llm/tools/edit.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -10,6 +11,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -39,72 +41,18 @@ type EditResponseMetadata struct {
}
type editTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
}
-const (
- EditToolName = "edit"
- editDescription = `Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files.
+const EditToolName = "edit"
-Before using this tool:
+//go:embed edit.md
+var editDescription []byte
-1. Use the FileRead tool to understand the file's contents and context
-
-2. Verify the directory path is correct (only applicable when creating new files):
- - Use the LS tool to verify the parent directory exists and is the correct location
-
-To make a file edit, provide the following:
-1. file_path: The absolute path to the file to modify (must be absolute, not relative)
-2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
-3. new_string: The edited text to replace the old_string
-4. replace_all: Replace all occurrences of old_string (default false)
-
-Special cases:
-- To create a new file: provide file_path and new_string, leave old_string empty
-- To delete content: provide file_path and old_string, leave new_string empty
-
-The tool will replace ONE occurrence of old_string with new_string in the specified file by default. Set replace_all to true to replace all occurrences.
-
-CRITICAL REQUIREMENTS FOR USING THIS TOOL:
-
-1. UNIQUENESS: When replace_all is false (default), the old_string MUST uniquely identify the specific instance you want to change. This means:
- - Include AT LEAST 3-5 lines of context BEFORE the change point
- - Include AT LEAST 3-5 lines of context AFTER the change point
- - Include all whitespace, indentation, and surrounding code exactly as it appears in the file
-
-2. SINGLE INSTANCE: When replace_all is false, this tool can only change ONE instance at a time. If you need to change multiple instances:
- - Set replace_all to true to replace all occurrences at once
- - Or make separate calls to this tool for each instance
- - Each call must uniquely identify its specific instance using extensive context
-
-3. VERIFICATION: Before using this tool:
- - Check how many instances of the target text exist in the file
- - If multiple instances exist and replace_all is false, gather enough context to uniquely identify each one
- - Plan separate tool calls for each instance or use replace_all
-
-WARNING: If you do not follow these requirements:
- - The tool will fail if old_string matches multiple locations and replace_all is false
- - The tool will fail if old_string doesn't match exactly (including whitespace)
- - You may change the wrong instance if you don't include enough context
-
-When making edits:
- - Ensure the edit results in idiomatic, correct code
- - Do not leave the code in a broken state
- - Always use absolute file paths (starting with /)
-
-WINDOWS NOTES:
-- File paths should use forward slashes (/) for cross-platform compatibility
-- On Windows, absolute paths start with drive letters (C:/) but forward slashes work throughout
-- File permissions are handled automatically by the Go runtime
-- Always assumes \n for line endings. The tool will handle \r\n conversion automatically if needed.
-
-Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.`
-)
-
-func NewEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &editTool{
lspClients: lspClients,
permissions: permissions,
@@ -120,7 +68,7 @@ func (e *editTool) Name() string {
func (e *editTool) Info() ToolInfo {
return ToolInfo{
Name: EditToolName,
- Description: editDescription,
+ Description: string(editDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -184,7 +132,8 @@ func (e *editTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
return response, nil
}
- waitForLspDiagnostics(ctx, params.FilePath, e.lspClients)
+ notifyLSPs(ctx, e.lspClients, params.FilePath)
+
text := fmt.Sprintf("\n%s\n\n", response.Content)
text += getDiagnostics(params.FilePath, e.lspClients)
response.Content = text
diff --git a/internal/llm/tools/edit.md b/internal/llm/tools/edit.md
new file mode 100644
index 0000000000000000000000000000000000000000..2f9ec73883f3a68546e7b14bd53a12f9f69cc430
--- /dev/null
+++ b/internal/llm/tools/edit.md
@@ -0,0 +1,60 @@
+Edits files by replacing text, creating new files, or deleting content. For moving or renaming files, use the Bash tool with the 'mv' command instead. For larger file edits, use the FileWrite tool to overwrite files.
+
+Before using this tool:
+
+1. Use the FileRead tool to understand the file's contents and context
+
+2. Verify the directory path is correct (only applicable when creating new files):
+ - Use the LS tool to verify the parent directory exists and is the correct location
+
+To make a file edit, provide the following:
+
+1. file_path: The absolute path to the file to modify (must be absolute, not relative)
+2. old_string: The text to replace (must be unique within the file, and must match the file contents exactly, including all whitespace and indentation)
+3. new_string: The edited text to replace the old_string
+4. replace_all: Replace all occurrences of old_string (default false)
+
+Special cases:
+
+- To create a new file: provide file_path and new_string, leave old_string empty
+- To delete content: provide file_path and old_string, leave new_string empty
+
+The tool will replace ONE occurrence of old_string with new_string in the specified file by default. Set replace_all to true to replace all occurrences.
+
+CRITICAL REQUIREMENTS FOR USING THIS TOOL:
+
+1. UNIQUENESS: When replace_all is false (default), the old_string MUST uniquely identify the specific instance you want to change. This means:
+ - Include AT LEAST 3-5 lines of context BEFORE the change point
+ - Include AT LEAST 3-5 lines of context AFTER the change point
+ - Include all whitespace, indentation, and surrounding code exactly as it appears in the file
+
+2. SINGLE INSTANCE: When replace_all is false, this tool can only change ONE instance at a time. If you need to change multiple instances:
+ - Set replace_all to true to replace all occurrences at once
+ - Or make separate calls to this tool for each instance
+ - Each call must uniquely identify its specific instance using extensive context
+
+3. VERIFICATION: Before using this tool:
+ - Check how many instances of the target text exist in the file
+ - If multiple instances exist and replace_all is false, gather enough context to uniquely identify each one
+ - Plan separate tool calls for each instance or use replace_all
+
+WARNING: If you do not follow these requirements:
+
+- The tool will fail if old_string matches multiple locations and replace_all is false
+- The tool will fail if old_string doesn't match exactly (including whitespace)
+- You may change the wrong instance if you don't include enough context
+
+When making edits:
+
+- Ensure the edit results in idiomatic, correct code
+- Do not leave the code in a broken state
+- Always use absolute file paths (starting with /)
+
+WINDOWS NOTES:
+
+- File paths should use forward slashes (/) for cross-platform compatibility
+- On Windows, absolute paths start with drive letters (C:/) but forward slashes work throughout
+- File permissions are handled automatically by the Go runtime
+- Always assumes \n for line endings. The tool will handle \r\n conversion automatically if needed.
+
+Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.
diff --git a/internal/llm/tools/fetch.go b/internal/llm/tools/fetch.go
index 156dbff7edd5747c4e758fc09cf94a5230c50deb..e9c5732462651cd9e31337f91f4f4d91a36e70c1 100644
--- a/internal/llm/tools/fetch.go
+++ b/internal/llm/tools/fetch.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -33,38 +34,10 @@ type fetchTool struct {
workingDir string
}
-const (
- FetchToolName = "fetch"
- fetchToolDescription = `Fetches content from a URL and returns it in the specified format.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to download content from a URL
-- Helpful for retrieving documentation, API responses, or web content
-- Useful for getting external information to assist with tasks
-
-HOW TO USE:
-- Provide the URL to fetch content from
-- Specify the desired output format (text, markdown, or html)
-- Optionally set a timeout for the request
-
-FEATURES:
-- Supports three output formats: text, markdown, and html
-- Automatically handles HTTP redirects
-- Sets reasonable timeouts to prevent hanging
-- Validates input parameters before making requests
-
-LIMITATIONS:
-- Maximum response size is 5MB
-- Only supports HTTP and HTTPS protocols
-- Cannot handle authentication or cookies
-- Some websites may block automated requests
-
-TIPS:
-- Use text format for plain text content or simple API responses
-- Use markdown format for content that should be rendered with formatting
-- Use html format when you need the raw HTML structure
-- Set appropriate timeouts for potentially slow websites`
-)
+const FetchToolName = "fetch"
+
+//go:embed fetch.md
+var fetchDescription []byte
func NewFetchTool(permissions permission.Service, workingDir string) BaseTool {
return &fetchTool{
@@ -88,7 +61,7 @@ func (t *fetchTool) Name() string {
func (t *fetchTool) Info() ToolInfo {
return ToolInfo{
Name: FetchToolName,
- Description: fetchToolDescription,
+ Description: string(fetchDescription),
Parameters: map[string]any{
"url": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/fetch.md b/internal/llm/tools/fetch.md
new file mode 100644
index 0000000000000000000000000000000000000000..ffb7bbaf968e4ae4e338ab1997211dc89a9234d1
--- /dev/null
+++ b/internal/llm/tools/fetch.md
@@ -0,0 +1,34 @@
+Fetches content from a URL and returns it in the specified format.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to download content from a URL
+- Helpful for retrieving documentation, API responses, or web content
+- Useful for getting external information to assist with tasks
+
+HOW TO USE:
+
+- Provide the URL to fetch content from
+- Specify the desired output format (text, markdown, or html)
+- Optionally set a timeout for the request
+
+FEATURES:
+
+- Supports three output formats: text, markdown, and html
+- Automatically handles HTTP redirects
+- Sets reasonable timeouts to prevent hanging
+- Validates input parameters before making requests
+
+LIMITATIONS:
+
+- Maximum response size is 5MB
+- Only supports HTTP and HTTPS protocols
+- Cannot handle authentication or cookies
+- Some websites may block automated requests
+
+TIPS:
+
+- Use text format for plain text content or simple API responses
+- Use markdown format for content that should be rendered with formatting
+- Use html format when you need the raw HTML structure
+- Set appropriate timeouts for potentially slow websites
diff --git a/internal/llm/tools/glob.go b/internal/llm/tools/glob.go
index 1d558cc7d65c8f7d766f8251073416248442c25c..85207c28ea3408ab4a3867e983766d35c9474b47 100644
--- a/internal/llm/tools/glob.go
+++ b/internal/llm/tools/glob.go
@@ -3,6 +3,7 @@ package tools
import (
"bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -14,48 +15,10 @@ import (
"github.com/charmbracelet/crush/internal/fsext"
)
-const (
- GlobToolName = "glob"
- globDescription = `Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first).
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find files by name patterns or extensions
-- Great for finding specific file types across a directory structure
-- Useful for discovering files that match certain naming conventions
-
-HOW TO USE:
-- Provide a glob pattern to match against file paths
-- Optionally specify a starting directory (defaults to current working directory)
-- Results are sorted with most recently modified files first
-
-GLOB PATTERN SYNTAX:
-- '*' matches any sequence of non-separator characters
-- '**' matches any sequence of characters, including separators
-- '?' matches any single non-separator character
-- '[...]' matches any character in the brackets
-- '[!...]' matches any character not in the brackets
-
-COMMON PATTERN EXAMPLES:
-- '*.js' - Find all JavaScript files in the current directory
-- '**/*.js' - Find all JavaScript files in any subdirectory
-- 'src/**/*.{ts,tsx}' - Find all TypeScript files in the src directory
-- '*.{html,css,js}' - Find all HTML, CSS, and JS files
-
-LIMITATIONS:
-- Results are limited to 100 files (newest first)
-- Does not search file contents (use Grep tool for that)
-- Hidden files (starting with '.') are skipped
-
-WINDOWS NOTES:
-- Path separators are handled automatically (both / and \ work)
-- Uses ripgrep (rg) command if available, otherwise falls back to built-in Go implementation
-
-TIPS:
-- Patterns should use forward slashes (/) for cross-platform compatibility
-- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep
-- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
-- Always check if results are truncated and refine your search pattern if needed`
-)
+const GlobToolName = "glob"
+
+//go:embed glob.md
+var globDescription []byte
type GlobParams struct {
Pattern string `json:"pattern"`
@@ -84,7 +47,7 @@ func (g *globTool) Name() string {
func (g *globTool) Info() ToolInfo {
return ToolInfo{
Name: GlobToolName,
- Description: globDescription,
+ Description: string(globDescription),
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/glob.md b/internal/llm/tools/glob.md
new file mode 100644
index 0000000000000000000000000000000000000000..1d73d42c9bc59372368965f8c04ef0c068d3deca
--- /dev/null
+++ b/internal/llm/tools/glob.md
@@ -0,0 +1,46 @@
+Fast file pattern matching tool that finds files by name and pattern, returning matching paths sorted by modification time (newest first).
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find files by name patterns or extensions
+- Great for finding specific file types across a directory structure
+- Useful for discovering files that match certain naming conventions
+
+HOW TO USE:
+
+- Provide a glob pattern to match against file paths
+- Optionally specify a starting directory (defaults to current working directory)
+- Results are sorted with most recently modified files first
+
+GLOB PATTERN SYNTAX:
+
+- '\*' matches any sequence of non-separator characters
+- '\*\*' matches any sequence of characters, including separators
+- '?' matches any single non-separator character
+- '[...]' matches any character in the brackets
+- '[!...]' matches any character not in the brackets
+
+COMMON PATTERN EXAMPLES:
+
+- '\*.js' - Find all JavaScript files in the current directory
+- '\*_/_.js' - Find all JavaScript files in any subdirectory
+- 'src/\*_/_.{ts,tsx}' - Find all TypeScript files in the src directory
+- '\*.{html,css,js}' - Find all HTML, CSS, and JS files
+
+LIMITATIONS:
+
+- Results are limited to 100 files (newest first)
+- Does not search file contents (use Grep tool for that)
+- Hidden files (starting with '.') are skipped
+
+WINDOWS NOTES:
+
+- Path separators are handled automatically (both / and \ work)
+- Uses ripgrep (rg) command if available, otherwise falls back to built-in Go implementation
+
+TIPS:
+
+- Patterns should use forward slashes (/) for cross-platform compatibility
+- For the most useful results, combine with the Grep tool: first find files with Glob, then search their contents with Grep
+- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
+- Always check if results are truncated and refine your search pattern if needed
diff --git a/internal/llm/tools/grep.go b/internal/llm/tools/grep.go
index 1160fc287088f960d15fa1bf847eb13f77e84b92..cbf50360b9355c05797690678a99d1310b19556f 100644
--- a/internal/llm/tools/grep.go
+++ b/internal/llm/tools/grep.go
@@ -3,6 +3,7 @@ package tools
import (
"bufio"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -92,55 +93,10 @@ type grepTool struct {
workingDir string
}
-const (
- GrepToolName = "grep"
- grepDescription = `Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find files containing specific text or patterns
-- Great for searching code bases for function names, variable declarations, or error messages
-- Useful for finding all files that use a particular API or pattern
-
-HOW TO USE:
-- Provide a regex pattern to search for within file contents
-- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
-- Optionally specify a starting directory (defaults to current working directory)
-- Optionally provide an include pattern to filter which files to search
-- Results are sorted with most recently modified files first
-
-REGEX PATTERN SYNTAX (when literal_text=false):
-- Supports standard regular expression syntax
-- 'function' searches for the literal text "function"
-- 'log\..*Error' finds text starting with "log." and ending with "Error"
-- 'import\s+.*\s+from' finds import statements in JavaScript/TypeScript
-
-COMMON INCLUDE PATTERN EXAMPLES:
-- '*.js' - Only search JavaScript files
-- '*.{ts,tsx}' - Only search TypeScript files
-- '*.go' - Only search Go files
-
-LIMITATIONS:
-- Results are limited to 100 files (newest first)
-- Performance depends on the number of files being searched
-- Very large binary files may be skipped
-- Hidden files (starting with '.') are skipped
-
-IGNORE FILE SUPPORT:
-- Respects .gitignore patterns to skip ignored files and directories
-- Respects .crushignore patterns for additional ignore rules
-- Both ignore files are automatically detected in the search root directory
-
-CROSS-PLATFORM NOTES:
-- Uses ripgrep (rg) command if available for better performance
-- Falls back to built-in Go implementation if ripgrep is not available
-- File paths are normalized automatically for cross-platform compatibility
-
-TIPS:
-- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
-- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
-- Always check if results are truncated and refine your search pattern if needed
-- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.`
-)
+const GrepToolName = "grep"
+
+//go:embed grep.md
+var grepDescription []byte
func NewGrepTool(workingDir string) BaseTool {
return &grepTool{
@@ -155,7 +111,7 @@ func (g *grepTool) Name() string {
func (g *grepTool) Info() ToolInfo {
return ToolInfo{
Name: GrepToolName,
- Description: grepDescription,
+ Description: string(grepDescription),
Parameters: map[string]any{
"pattern": map[string]any{
"type": "string",
@@ -303,18 +259,16 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
continue
}
- // Parse ripgrep output format: file:line:content
- parts := strings.SplitN(line, ":", 3)
- if len(parts) < 3 {
+ // Parse ripgrep output using null separation
+ filePath, lineNumStr, lineText, ok := parseRipgrepLine(line)
+ if !ok {
continue
}
- filePath := parts[0]
- lineNum, err := strconv.Atoi(parts[1])
+ lineNum, err := strconv.Atoi(lineNumStr)
if err != nil {
continue
}
- lineText := parts[2]
fileInfo, err := os.Stat(filePath)
if err != nil {
@@ -332,6 +286,33 @@ func searchWithRipgrep(ctx context.Context, pattern, path, include string) ([]gr
return matches, nil
}
+// parseRipgrepLine parses ripgrep output with null separation to handle Windows paths
+func parseRipgrepLine(line string) (filePath, lineNum, lineText string, ok bool) {
+ // Split on null byte first to separate filename from rest
+ parts := strings.SplitN(line, "\x00", 2)
+ if len(parts) != 2 {
+ return "", "", "", false
+ }
+
+ filePath = parts[0]
+ remainder := parts[1]
+
+ // Now split the remainder on first colon: "linenum:content"
+ colonIndex := strings.Index(remainder, ":")
+ if colonIndex == -1 {
+ return "", "", "", false
+ }
+
+ lineNumStr := remainder[:colonIndex]
+ lineText = remainder[colonIndex+1:]
+
+ if _, err := strconv.Atoi(lineNumStr); err != nil {
+ return "", "", "", false
+ }
+
+ return filePath, lineNumStr, lineText, true
+}
+
func searchFilesWithRegex(pattern, rootPath, include string) ([]grepMatch, error) {
matches := []grepMatch{}
diff --git a/internal/llm/tools/grep.md b/internal/llm/tools/grep.md
new file mode 100644
index 0000000000000000000000000000000000000000..c17a0ad1b8531a4fb9fd73b56e89973c749f91b1
--- /dev/null
+++ b/internal/llm/tools/grep.md
@@ -0,0 +1,54 @@
+Fast content search tool that finds files containing specific text or patterns, returning matching file paths sorted by modification time (newest first).
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find files containing specific text or patterns
+- Great for searching code bases for function names, variable declarations, or error messages
+- Useful for finding all files that use a particular API or pattern
+
+HOW TO USE:
+
+- Provide a regex pattern to search for within file contents
+- Set literal_text=true if you want to search for the exact text with special characters (recommended for non-regex users)
+- Optionally specify a starting directory (defaults to current working directory)
+- Optionally provide an include pattern to filter which files to search
+- Results are sorted with most recently modified files first
+
+REGEX PATTERN SYNTAX (when literal_text=false):
+
+- Supports standard regular expression syntax
+- 'function' searches for the literal text "function"
+- 'log\..\*Error' finds text starting with "log." and ending with "Error"
+- 'import\s+.\*\s+from' finds import statements in JavaScript/TypeScript
+
+COMMON INCLUDE PATTERN EXAMPLES:
+
+- '\*.js' - Only search JavaScript files
+- '\*.{ts,tsx}' - Only search TypeScript files
+- '\*.go' - Only search Go files
+
+LIMITATIONS:
+
+- Results are limited to 100 files (newest first)
+- Performance depends on the number of files being searched
+- Very large binary files may be skipped
+- Hidden files (starting with '.') are skipped
+
+IGNORE FILE SUPPORT:
+
+- Respects .gitignore patterns to skip ignored files and directories
+- Respects .crushignore patterns for additional ignore rules
+- Both ignore files are automatically detected in the search root directory
+
+CROSS-PLATFORM NOTES:
+
+- Uses ripgrep (rg) command if available for better performance
+- Falls back to built-in Go implementation if ripgrep is not available
+- File paths are normalized automatically for cross-platform compatibility
+
+TIPS:
+
+- For faster, more targeted searches, first use Glob to find relevant files, then use Grep
+- When doing iterative exploration that may require multiple rounds of searching, consider using the Agent tool instead
+- Always check if results are truncated and refine your search pattern if needed
+- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.
diff --git a/internal/llm/tools/ls.go b/internal/llm/tools/ls.go
index 2546dd77a6b64faa24f54cf604710d568ffe9c5b..305f7f10249594ff06ac008a8bf81145d7d834de 100644
--- a/internal/llm/tools/ls.go
+++ b/internal/llm/tools/ls.go
@@ -1,13 +1,16 @@
package tools
import (
+ "cmp"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
+ "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/permission"
)
@@ -15,11 +18,13 @@ import (
type LSParams struct {
Path string `json:"path"`
Ignore []string `json:"ignore"`
+ Depth int `json:"depth"`
}
type LSPermissionsParams struct {
Path string `json:"path"`
Ignore []string `json:"ignore"`
+ Depth int `json:"depth"`
}
type TreeNode struct {
@@ -40,44 +45,13 @@ type lsTool struct {
}
const (
- LSToolName = "ls"
- MaxLSFiles = 1000
- lsDescription = `Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to explore the structure of a directory
-- Helpful for understanding the organization of a project
-- Good first step when getting familiar with a new codebase
-
-HOW TO USE:
-- Provide a path to list (defaults to current working directory)
-- Optionally specify glob patterns to ignore
-- Results are displayed in a tree structure
-
-FEATURES:
-- Displays a hierarchical view of files and directories
-- Automatically skips hidden files/directories (starting with '.')
-- Skips common system directories like __pycache__
-- Can filter out files matching specific patterns
-
-LIMITATIONS:
-- Results are limited to 1000 files
-- Very large directories will be truncated
-- Does not show file sizes or permissions
-- Cannot recursively list all directories in a large project
-
-WINDOWS NOTES:
-- Hidden file detection uses Unix convention (files starting with '.')
-- Windows-specific hidden files (with hidden attribute) are not automatically skipped
-- Common Windows directories like System32, Program Files are not in default ignore list
-- Path separators are handled automatically (both / and \ work)
-
-TIPS:
-- Use Glob tool for finding files by name patterns instead of browsing
-- Use Grep tool for searching file contents
-- Combine with other tools for more effective exploration`
+ LSToolName = "ls"
+ maxLSFiles = 1000
)
+//go:embed ls.md
+var lsDescription []byte
+
func NewLsTool(permissions permission.Service, workingDir string) BaseTool {
return &lsTool{
workingDir: workingDir,
@@ -92,12 +66,16 @@ func (l *lsTool) Name() string {
func (l *lsTool) Info() ToolInfo {
return ToolInfo{
Name: LSToolName,
- Description: lsDescription,
+ Description: string(lsDescription),
Parameters: map[string]any{
"path": map[string]any{
"type": "string",
"description": "The path to the directory to list (defaults to current working directory)",
},
+ "depth": map[string]any{
+ "type": "integer",
+ "description": "The maximum depth to traverse",
+ },
"ignore": map[string]any{
"type": "array",
"description": "List of glob patterns to ignore",
@@ -116,13 +94,7 @@ func (l *lsTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
return NewTextErrorResponse(fmt.Sprintf("error parsing parameters: %s", err)), nil
}
- searchPath := params.Path
- if searchPath == "" {
- searchPath = l.workingDir
- }
-
- var err error
- searchPath, err = fsext.Expand(searchPath)
+ searchPath, err := fsext.Expand(cmp.Or(params.Path, l.workingDir))
if err != nil {
return ToolResponse{}, fmt.Errorf("error expanding path: %w", err)
}
@@ -167,44 +139,49 @@ func (l *lsTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
}
}
- output, err := ListDirectoryTree(searchPath, params.Ignore)
+ output, metadata, err := ListDirectoryTree(searchPath, params)
if err != nil {
return ToolResponse{}, err
}
- // Get file count for metadata
- files, truncated, err := fsext.ListDirectory(searchPath, params.Ignore, MaxLSFiles)
- if err != nil {
- return ToolResponse{}, fmt.Errorf("error listing directory for metadata: %w", err)
- }
-
return WithResponseMetadata(
NewTextResponse(output),
- LSResponseMetadata{
- NumberOfFiles: len(files),
- Truncated: truncated,
- },
+ metadata,
), nil
}
-func ListDirectoryTree(searchPath string, ignore []string) (string, error) {
+func ListDirectoryTree(searchPath string, params LSParams) (string, LSResponseMetadata, error) {
if _, err := os.Stat(searchPath); os.IsNotExist(err) {
- return "", fmt.Errorf("path does not exist: %s", searchPath)
+ return "", LSResponseMetadata{}, fmt.Errorf("path does not exist: %s", searchPath)
}
- files, truncated, err := fsext.ListDirectory(searchPath, ignore, MaxLSFiles)
+ ls := config.Get().Tools.Ls
+ depth, limit := ls.Limits()
+ maxFiles := min(limit, maxLSFiles)
+ files, truncated, err := fsext.ListDirectory(
+ searchPath,
+ params.Ignore,
+ cmp.Or(params.Depth, depth),
+ maxFiles,
+ )
if err != nil {
- return "", fmt.Errorf("error listing directory: %w", err)
+ return "", LSResponseMetadata{}, fmt.Errorf("error listing directory: %w", err)
}
+ metadata := LSResponseMetadata{
+ NumberOfFiles: len(files),
+ Truncated: truncated,
+ }
tree := createFileTree(files, searchPath)
- output := printTree(tree, searchPath)
+ var output string
if truncated {
- output = fmt.Sprintf("There are more than %d files in the directory. Use a more specific path or use the Glob tool to find specific files. The first %d files and directories are included below:\n\n%s", MaxLSFiles, MaxLSFiles, output)
+ output = fmt.Sprintf("There are more than %d files in the directory. Use a more specific path or use the Glob tool to find specific files. The first %[1]d files and directories are included below.\n", maxFiles)
}
-
- return output, nil
+ if depth > 0 {
+ output = fmt.Sprintf("The directory tree is shown up to a depth of %d. Use a higher depth and a specific path to see more levels.\n", cmp.Or(params.Depth, depth))
+ }
+ return output + "\n" + printTree(tree, searchPath), metadata, nil
}
func createFileTree(sortedPaths []string, rootPath string) []*TreeNode {
diff --git a/internal/llm/tools/ls.md b/internal/llm/tools/ls.md
new file mode 100644
index 0000000000000000000000000000000000000000..798ddcc90f9659032870a3b893129a7ec2edad50
--- /dev/null
+++ b/internal/llm/tools/ls.md
@@ -0,0 +1,40 @@
+Directory listing tool that shows files and subdirectories in a tree structure, helping you explore and understand the project organization.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to explore the structure of a directory
+- Helpful for understanding the organization of a project
+- Good first step when getting familiar with a new codebase
+
+HOW TO USE:
+
+- Provide a path to list (defaults to current working directory)
+- Optionally specify glob patterns to ignore
+- Results are displayed in a tree structure
+
+FEATURES:
+
+- Displays a hierarchical view of files and directories
+- Automatically skips hidden files/directories (starting with '.')
+- Skips common system directories like **pycache**
+- Can filter out files matching specific patterns
+
+LIMITATIONS:
+
+- Results are limited to 1000 files
+- Very large directories will be truncated
+- Does not show file sizes or permissions
+- Cannot recursively list all directories in a large project
+
+WINDOWS NOTES:
+
+- Hidden file detection uses Unix convention (files starting with '.')
+- Windows-specific hidden files (with hidden attribute) are not automatically skipped
+- Common Windows directories like System32, Program Files are not in default ignore list
+- Path separators are handled automatically (both / and \ work)
+
+TIPS:
+
+- Use Glob tool for finding files by name patterns instead of browsing
+- Use Grep tool for searching file contents
+- Combine with other tools for more effective exploration
diff --git a/internal/llm/tools/multiedit.go b/internal/llm/tools/multiedit.go
index 50070ca613464a280e53ab964155b8d1e205dde5..1d6e415ceb8cc10bcc32788cddbbdc40fc781907 100644
--- a/internal/llm/tools/multiedit.go
+++ b/internal/llm/tools/multiedit.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -10,6 +11,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -43,59 +45,18 @@ type MultiEditResponseMetadata struct {
}
type multiEditTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
}
-const (
- MultiEditToolName = "multiedit"
- multiEditDescription = `This is a tool for making multiple edits to a single file in one operation. It is built on top of the Edit tool and allows you to perform multiple find-and-replace operations efficiently. Prefer this tool over the Edit tool when you need to make multiple edits to the same file.
-
-Before using this tool:
-
-1. Use the Read tool to understand the file's contents and context
-
-2. Verify the directory path is correct
-
-To make multiple file edits, provide the following:
-1. file_path: The absolute path to the file to modify (must be absolute, not relative)
-2. edits: An array of edit operations to perform, where each edit contains:
- - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)
- - new_string: The edited text to replace the old_string
- - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false.
-
-IMPORTANT:
-- All edits are applied in sequence, in the order they are provided
-- Each edit operates on the result of the previous edit
-- All edits must be valid for the operation to succeed - if any edit fails, none will be applied
-- This tool is ideal when you need to make several changes to different parts of the same file
-
-CRITICAL REQUIREMENTS:
-1. All edits follow the same requirements as the single Edit tool
-2. The edits are atomic - either all succeed or none are applied
-3. Plan your edits carefully to avoid conflicts between sequential operations
-
-WARNING:
-- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace)
-- The tool will fail if edits.old_string and edits.new_string are the same
-- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find
-
-When making edits:
-- Ensure all edits result in idiomatic, correct code
-- Do not leave the code in a broken state
-- Always use absolute file paths (starting with /)
-- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
-- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
-
-If you want to create a new file, use:
-- A new file path, including dir name if needed
-- First edit: empty old_string and the new file's contents as new_string
-- Subsequent edits: normal edit operations on the created content`
-)
+const MultiEditToolName = "multiedit"
+
+//go:embed multiedit.md
+var multieditDescription []byte
-func NewMultiEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewMultiEditTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &multiEditTool{
lspClients: lspClients,
permissions: permissions,
@@ -111,7 +72,7 @@ func (m *multiEditTool) Name() string {
func (m *multiEditTool) Info() ToolInfo {
return ToolInfo{
Name: MultiEditToolName,
- Description: multiEditDescription,
+ Description: string(multieditDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -188,8 +149,10 @@ func (m *multiEditTool) Run(ctx context.Context, call ToolCall) (ToolResponse, e
return response, nil
}
+ // Notify LSP clients about the change
+ notifyLSPs(ctx, m.lspClients, params.FilePath)
+
// Wait for LSP diagnostics and add them to the response
- waitForLspDiagnostics(ctx, params.FilePath, m.lspClients)
text := fmt.Sprintf("\n%s\n\n", response.Content)
text += getDiagnostics(params.FilePath, m.lspClients)
response.Content = text
diff --git a/internal/llm/tools/multiedit.md b/internal/llm/tools/multiedit.md
new file mode 100644
index 0000000000000000000000000000000000000000..4cc64f7a82c46140b79491a897fa8b691e29cc40
--- /dev/null
+++ b/internal/llm/tools/multiedit.md
@@ -0,0 +1,48 @@
+This is a tool for making multiple edits to a single file in one operation. It is built on top of the Edit tool and allows you to perform multiple find-and-replace operations efficiently. Prefer this tool over the Edit tool when you need to make multiple edits to the same file.
+
+Before using this tool:
+
+1. Use the Read tool to understand the file's contents and context
+
+2. Verify the directory path is correct
+
+To make multiple file edits, provide the following:
+
+1. file_path: The absolute path to the file to modify (must be absolute, not relative)
+2. edits: An array of edit operations to perform, where each edit contains:
+ - old_string: The text to replace (must match the file contents exactly, including all whitespace and indentation)
+ - new_string: The edited text to replace the old_string
+ - replace_all: Replace all occurrences of old_string. This parameter is optional and defaults to false.
+
+IMPORTANT:
+
+- All edits are applied in sequence, in the order they are provided
+- Each edit operates on the result of the previous edit
+- All edits must be valid for the operation to succeed - if any edit fails, none will be applied
+- This tool is ideal when you need to make several changes to different parts of the same file
+
+CRITICAL REQUIREMENTS:
+
+1. All edits follow the same requirements as the single Edit tool
+2. The edits are atomic - either all succeed or none are applied
+3. Plan your edits carefully to avoid conflicts between sequential operations
+
+WARNING:
+
+- The tool will fail if edits.old_string doesn't match the file contents exactly (including whitespace)
+- The tool will fail if edits.old_string and edits.new_string are the same
+- Since edits are applied in sequence, ensure that earlier edits don't affect the text that later edits are trying to find
+
+When making edits:
+
+- Ensure all edits result in idiomatic, correct code
+- Do not leave the code in a broken state
+- Always use absolute file paths (starting with /)
+- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
+- Use replace_all for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
+
+If you want to create a new file, use:
+
+- A new file path, including dir name if needed
+- First edit: empty old_string and the new file's contents as new_string
+- Subsequent edits: normal edit operations on the created content
diff --git a/internal/llm/tools/rg.go b/internal/llm/tools/rg.go
index 40ab7f2f520697659e3ef092a7ff3e96b2c3c47c..8809b57c8db30b4ac1ed6c070df5a7218c59e233 100644
--- a/internal/llm/tools/rg.go
+++ b/internal/llm/tools/rg.go
@@ -42,8 +42,8 @@ func getRgSearchCmd(ctx context.Context, pattern, path, include string) *exec.Cm
if name == "" {
return nil
}
- // Use -n to show line numbers and include the matched line
- args := []string{"-H", "-n", pattern}
+ // Use -n to show line numbers, -0 for null separation to handle Windows paths
+ args := []string{"-H", "-n", "-0", pattern}
if include != "" {
args = append(args, "--glob", include)
}
diff --git a/internal/llm/tools/sourcegraph.go b/internal/llm/tools/sourcegraph.go
index fcc9bb57428b45a4620417775c61acb380b4d7ad..aea6b1ae8f0d52d10c083debf5bd3780957f261a 100644
--- a/internal/llm/tools/sourcegraph.go
+++ b/internal/llm/tools/sourcegraph.go
@@ -3,6 +3,7 @@ package tools
import (
"bytes"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -27,103 +28,10 @@ type sourcegraphTool struct {
client *http.Client
}
-const (
- SourcegraphToolName = "sourcegraph"
- sourcegraphToolDescription = `Search code across public repositories using Sourcegraph's GraphQL API.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to find code examples or implementations across public repositories
-- Helpful for researching how others have solved similar problems
-- Useful for discovering patterns and best practices in open source code
-
-HOW TO USE:
-- Provide a search query using Sourcegraph's query syntax
-- Optionally specify the number of results to return (default: 10)
-- Optionally set a timeout for the request
-
-QUERY SYNTAX:
-- Basic search: "fmt.Println" searches for exact matches
-- File filters: "file:.go fmt.Println" limits to Go files
-- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos
-- Language filters: "lang:go fmt.Println" limits to Go code
-- Boolean operators: "fmt.Println AND log.Fatal" for combined terms
-- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching
-- Quoted strings: "\"exact phrase\"" for exact phrase matching
-- Exclude filters: "-file:test" or "-repo:forks" to exclude matches
-
-ADVANCED FILTERS:
-- Repository filters:
- * "repo:name" - Match repositories with name containing "name"
- * "repo:^github\.com/org/repo$" - Exact repository match
- * "repo:org/repo@branch" - Search specific branch
- * "repo:org/repo rev:branch" - Alternative branch syntax
- * "-repo:name" - Exclude repositories
- * "fork:yes" or "fork:only" - Include or only show forks
- * "archived:yes" or "archived:only" - Include or only show archived repos
- * "visibility:public" or "visibility:private" - Filter by visibility
-
-- File filters:
- * "file:\.js$" - Files with .js extension
- * "file:internal/" - Files in internal directory
- * "-file:test" - Exclude test files
- * "file:has.content(Copyright)" - Files containing "Copyright"
- * "file:has.contributor([email protected])" - Files with specific contributor
-
-- Content filters:
- * "content:\"exact string\"" - Search for exact string
- * "-content:\"unwanted\"" - Exclude files with unwanted content
- * "case:yes" - Case-sensitive search
-
-- Type filters:
- * "type:symbol" - Search for symbols (functions, classes, etc.)
- * "type:file" - Search file content only
- * "type:path" - Search filenames only
- * "type:diff" - Search code changes
- * "type:commit" - Search commit messages
-
-- Commit/diff search:
- * "after:\"1 month ago\"" - Commits after date
- * "before:\"2023-01-01\"" - Commits before date
- * "author:name" - Commits by author
- * "message:\"fix bug\"" - Commits with message
-
-- Result selection:
- * "select:repo" - Show only repository names
- * "select:file" - Show only file paths
- * "select:content" - Show only matching content
- * "select:symbol" - Show only matching symbols
-
-- Result control:
- * "count:100" - Return up to 100 results
- * "count:all" - Return all results
- * "timeout:30s" - Set search timeout
-
-EXAMPLES:
-- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout
-- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks
-- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing
-- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code
-- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images
-- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow
-
-BOOLEAN OPERATORS:
-- "term1 AND term2" - Results containing both terms
-- "term1 OR term2" - Results containing either term
-- "term1 NOT term2" - Results with term1 but not term2
-- "term1 and (term2 or term3)" - Grouping with parentheses
-
-LIMITATIONS:
-- Only searches public repositories
-- Rate limits may apply
-- Complex queries may take longer to execute
-- Maximum of 20 results per query
-
-TIPS:
-- Use specific file extensions to narrow results
-- Add repo: filters for more targeted searches
-- Use type:symbol to find function/method definitions
-- Use type:file to find relevant files`
-)
+const SourcegraphToolName = "sourcegraph"
+
+//go:embed sourcegraph.md
+var sourcegraphDescription []byte
func NewSourcegraphTool() BaseTool {
return &sourcegraphTool{
@@ -145,7 +53,7 @@ func (t *sourcegraphTool) Name() string {
func (t *sourcegraphTool) Info() ToolInfo {
return ToolInfo{
Name: SourcegraphToolName,
- Description: sourcegraphToolDescription,
+ Description: string(sourcegraphDescription),
Parameters: map[string]any{
"query": map[string]any{
"type": "string",
diff --git a/internal/llm/tools/sourcegraph.md b/internal/llm/tools/sourcegraph.md
new file mode 100644
index 0000000000000000000000000000000000000000..ec6610d7af429454783282061252a5b40191f5e5
--- /dev/null
+++ b/internal/llm/tools/sourcegraph.md
@@ -0,0 +1,102 @@
+Search code across public repositories using Sourcegraph's GraphQL API.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to find code examples or implementations across public repositories
+- Helpful for researching how others have solved similar problems
+- Useful for discovering patterns and best practices in open source code
+
+HOW TO USE:
+
+- Provide a search query using Sourcegraph's query syntax
+- Optionally specify the number of results to return (default: 10)
+- Optionally set a timeout for the request
+
+QUERY SYNTAX:
+
+- Basic search: "fmt.Println" searches for exact matches
+- File filters: "file:.go fmt.Println" limits to Go files
+- Repository filters: "repo:^github\.com/golang/go$ fmt.Println" limits to specific repos
+- Language filters: "lang:go fmt.Println" limits to Go code
+- Boolean operators: "fmt.Println AND log.Fatal" for combined terms
+- Regular expressions: "fmt\.(Print|Printf|Println)" for pattern matching
+- Quoted strings: "\"exact phrase\"" for exact phrase matching
+- Exclude filters: "-file:test" or "-repo:forks" to exclude matches
+
+ADVANCED FILTERS:
+
+- Repository filters:
+ - "repo:name" - Match repositories with name containing "name"
+ - "repo:^github\.com/org/repo$" - Exact repository match
+ - "repo:org/repo@branch" - Search specific branch
+ - "repo:org/repo rev:branch" - Alternative branch syntax
+ - "-repo:name" - Exclude repositories
+ - "fork:yes" or "fork:only" - Include or only show forks
+ - "archived:yes" or "archived:only" - Include or only show archived repos
+ - "visibility:public" or "visibility:private" - Filter by visibility
+
+- File filters:
+ - "file:\.js$" - Files with .js extension
+ - "file:internal/" - Files in internal directory
+ - "-file:test" - Exclude test files
+ - "file:has.content(Copyright)" - Files containing "Copyright"
+ - "file:has.contributor([email protected])" - Files with specific contributor
+
+- Content filters:
+ - "content:\"exact string\"" - Search for exact string
+ - "-content:\"unwanted\"" - Exclude files with unwanted content
+ - "case:yes" - Case-sensitive search
+
+- Type filters:
+ - "type:symbol" - Search for symbols (functions, classes, etc.)
+ - "type:file" - Search file content only
+ - "type:path" - Search filenames only
+ - "type:diff" - Search code changes
+ - "type:commit" - Search commit messages
+
+- Commit/diff search:
+ - "after:\"1 month ago\"" - Commits after date
+ - "before:\"2023-01-01\"" - Commits before date
+ - "author:name" - Commits by author
+ - "message:\"fix bug\"" - Commits with message
+
+- Result selection:
+ - "select:repo" - Show only repository names
+ - "select:file" - Show only file paths
+ - "select:content" - Show only matching content
+ - "select:symbol" - Show only matching symbols
+
+- Result control:
+ - "count:100" - Return up to 100 results
+ - "count:all" - Return all results
+ - "timeout:30s" - Set search timeout
+
+EXAMPLES:
+
+- "file:.go context.WithTimeout" - Find Go code using context.WithTimeout
+- "lang:typescript useState type:symbol" - Find TypeScript React useState hooks
+- "repo:^github\.com/kubernetes/kubernetes$ pod list type:file" - Find Kubernetes files related to pod listing
+- "repo:sourcegraph/sourcegraph$ after:\"3 months ago\" type:diff database" - Recent changes to database code
+- "file:Dockerfile (alpine OR ubuntu) -content:alpine:latest" - Dockerfiles with specific base images
+- "repo:has.path(\.py) file:requirements.txt tensorflow" - Python projects using TensorFlow
+
+BOOLEAN OPERATORS:
+
+- "term1 AND term2" - Results containing both terms
+- "term1 OR term2" - Results containing either term
+- "term1 NOT term2" - Results with term1 but not term2
+- "term1 and (term2 or term3)" - Grouping with parentheses
+
+LIMITATIONS:
+
+- Only searches public repositories
+- Rate limits may apply
+- Complex queries may take longer to execute
+- Maximum of 20 results per query
+
+TIPS:
+
+- Use specific file extensions to narrow results
+- Add repo: filters for more targeted searches
+- Use type:symbol to find function/method definitions
+- Use type:file to find relevant files
diff --git a/internal/llm/tools/view.go b/internal/llm/tools/view.go
index ee1fd6614b3fee0a0c3d65c433bb6d9e1dd6489c..c3824bf1b1cc11dc22c57d60eb72654999a5e970 100644
--- a/internal/llm/tools/view.go
+++ b/internal/llm/tools/view.go
@@ -3,6 +3,7 @@ package tools
import (
"bufio"
"context"
+ _ "embed"
"encoding/json"
"fmt"
"io"
@@ -11,10 +12,14 @@ import (
"strings"
"unicode/utf8"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/permission"
)
+//go:embed view.md
+var viewDescription []byte
+
type ViewParams struct {
FilePath string `json:"file_path"`
Offset int `json:"offset"`
@@ -28,7 +33,7 @@ type ViewPermissionsParams struct {
}
type viewTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
workingDir string
permissions permission.Service
}
@@ -43,45 +48,9 @@ const (
MaxReadSize = 250 * 1024
DefaultReadLimit = 2000
MaxLineLength = 2000
- viewDescription = `File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to read the contents of a specific file
-- Helpful for examining source code, configuration files, or log files
-- Perfect for looking at text-based file formats
-
-HOW TO USE:
-- Provide the path to the file you want to view
-- Optionally specify an offset to start reading from a specific line
-- Optionally specify a limit to control how many lines are read
-- Do not use this for directories use the ls tool instead
-
-FEATURES:
-- Displays file contents with line numbers for easy reference
-- Can read from any position in a file using the offset parameter
-- Handles large files by limiting the number of lines read
-- Automatically truncates very long lines for better display
-- Suggests similar file names when the requested file isn't found
-
-LIMITATIONS:
-- Maximum file size is 250KB
-- Default reading limit is 2000 lines
-- Lines longer than 2000 characters are truncated
-- Cannot display binary files or images
-- Images can be identified but not displayed
-
-WINDOWS NOTES:
-- Handles both Windows (CRLF) and Unix (LF) line endings automatically
-- File paths work with both forward slashes (/) and backslashes (\)
-- Text encoding is detected automatically for most common formats
-
-TIPS:
-- Use with Glob tool to first find files you want to view
-- For code exploration, first use Grep to find relevant files, then View to examine them
-- When viewing large files, use the offset parameter to read specific sections`
)
-func NewViewTool(lspClients map[string]*lsp.Client, permissions permission.Service, workingDir string) BaseTool {
+func NewViewTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, workingDir string) BaseTool {
return &viewTool{
lspClients: lspClients,
workingDir: workingDir,
@@ -96,7 +65,7 @@ func (v *viewTool) Name() string {
func (v *viewTool) Info() ToolInfo {
return ToolInfo{
Name: ViewToolName,
- Description: viewDescription,
+ Description: string(viewDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -233,7 +202,7 @@ func (v *viewTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
return ToolResponse{}, fmt.Errorf("error reading file: %w", err)
}
- notifyLspOpenFile(ctx, filePath, v.lspClients)
+ notifyLSPs(ctx, v.lspClients, filePath)
output := "\n"
// Format the output with line numbers
output += addLineNumbers(content, params.Offset+1)
diff --git a/internal/llm/tools/view.md b/internal/llm/tools/view.md
new file mode 100644
index 0000000000000000000000000000000000000000..2179b77817b3e9a98ca7077bf8430abdde597392
--- /dev/null
+++ b/internal/llm/tools/view.md
@@ -0,0 +1,42 @@
+File viewing tool that reads and displays the contents of files with line numbers, allowing you to examine code, logs, or text data.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to read the contents of a specific file
+- Helpful for examining source code, configuration files, or log files
+- Perfect for looking at text-based file formats
+
+HOW TO USE:
+
+- Provide the path to the file you want to view
+- Optionally specify an offset to start reading from a specific line
+- Optionally specify a limit to control how many lines are read
+- Do not use this for directories use the ls tool instead
+
+FEATURES:
+
+- Displays file contents with line numbers for easy reference
+- Can read from any position in a file using the offset parameter
+- Handles large files by limiting the number of lines read
+- Automatically truncates very long lines for better display
+- Suggests similar file names when the requested file isn't found
+
+LIMITATIONS:
+
+- Maximum file size is 250KB
+- Default reading limit is 2000 lines
+- Lines longer than 2000 characters are truncated
+- Cannot display binary files or images
+- Images can be identified but not displayed
+
+WINDOWS NOTES:
+
+- Handles both Windows (CRLF) and Unix (LF) line endings automatically
+- File paths work with both forward slashes (/) and backslashes (\)
+- Text encoding is detected automatically for most common formats
+
+TIPS:
+
+- Use with Glob tool to first find files you want to view
+- For code exploration, first use Grep to find relevant files, then View to examine them
+- When viewing large files, use the offset parameter to read specific sections
diff --git a/internal/llm/tools/write.go b/internal/llm/tools/write.go
index d719337a971cb09babd47753444d761586806fdd..36ea13d2dd6e1930eb645ab036c950b9a4d6fdc8 100644
--- a/internal/llm/tools/write.go
+++ b/internal/llm/tools/write.go
@@ -2,6 +2,7 @@ package tools
import (
"context"
+ _ "embed"
"encoding/json"
"fmt"
"log/slog"
@@ -10,6 +11,7 @@ import (
"strings"
"time"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
@@ -18,6 +20,9 @@ import (
"github.com/charmbracelet/crush/internal/permission"
)
+//go:embed write.md
+var writeDescription []byte
+
type WriteParams struct {
FilePath string `json:"file_path"`
Content string `json:"content"`
@@ -30,7 +35,7 @@ type WritePermissionsParams struct {
}
type writeTool struct {
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
permissions permission.Service
files history.Service
workingDir string
@@ -42,43 +47,9 @@ type WriteResponseMetadata struct {
Removals int `json:"removals"`
}
-const (
- WriteToolName = "write"
- writeDescription = `File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content.
-
-WHEN TO USE THIS TOOL:
-- Use when you need to create a new file
-- Helpful for updating existing files with modified content
-- Perfect for saving generated code, configurations, or text data
-
-HOW TO USE:
-- Provide the path to the file you want to write
-- Include the content to be written to the file
-- The tool will create any necessary parent directories
-
-FEATURES:
-- Can create new files or overwrite existing ones
-- Creates parent directories automatically if they don't exist
-- Checks if the file has been modified since last read for safety
-- Avoids unnecessary writes when content hasn't changed
-
-LIMITATIONS:
-- You should read a file before writing to it to avoid conflicts
-- Cannot append to files (rewrites the entire file)
-
-WINDOWS NOTES:
-- File permissions (0o755, 0o644) are Unix-style but work on Windows with appropriate translations
-- Use forward slashes (/) in paths for cross-platform compatibility
-- Windows file attributes and permissions are handled automatically by the Go runtime
-
-TIPS:
-- Use the View tool first to examine existing files before modifying them
-- Use the LS tool to verify the correct location when creating new files
-- Combine with Glob and Grep tools to find and modify multiple files
-- Always include descriptive comments when making changes to existing code`
-)
+const WriteToolName = "write"
-func NewWriteTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
+func NewWriteTool(lspClients *csync.Map[string, *lsp.Client], permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &writeTool{
lspClients: lspClients,
permissions: permissions,
@@ -94,7 +65,7 @@ func (w *writeTool) Name() string {
func (w *writeTool) Info() ToolInfo {
return ToolInfo{
Name: WriteToolName,
- Description: writeDescription,
+ Description: string(writeDescription),
Parameters: map[string]any{
"file_path": map[string]any{
"type": "string",
@@ -221,7 +192,8 @@ func (w *writeTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error
recordFileWrite(filePath)
recordFileRead(filePath)
- waitForLspDiagnostics(ctx, filePath, w.lspClients)
+
+ notifyLSPs(ctx, w.lspClients, params.FilePath)
result := fmt.Sprintf("File successfully written: %s", filePath)
result = fmt.Sprintf("\n%s\n", result)
diff --git a/internal/llm/tools/write.md b/internal/llm/tools/write.md
new file mode 100644
index 0000000000000000000000000000000000000000..b8cc811ffe6234e7ebfd34037de57bd483305738
--- /dev/null
+++ b/internal/llm/tools/write.md
@@ -0,0 +1,38 @@
+File writing tool that creates or updates files in the filesystem, allowing you to save or modify text content.
+
+WHEN TO USE THIS TOOL:
+
+- Use when you need to create a new file
+- Helpful for updating existing files with modified content
+- Perfect for saving generated code, configurations, or text data
+
+HOW TO USE:
+
+- Provide the path to the file you want to write
+- Include the content to be written to the file
+- The tool will create any necessary parent directories
+
+FEATURES:
+
+- Can create new files or overwrite existing ones
+- Creates parent directories automatically if they don't exist
+- Checks if the file has been modified since last read for safety
+- Avoids unnecessary writes when content hasn't changed
+
+LIMITATIONS:
+
+- You should read a file before writing to it to avoid conflicts
+- Cannot append to files (rewrites the entire file)
+
+WINDOWS NOTES:
+
+- File permissions (0o755, 0o644) are Unix-style but work on Windows with appropriate translations
+- Use forward slashes (/) in paths for cross-platform compatibility
+- Windows file attributes and permissions are handled automatically by the Go runtime
+
+TIPS:
+
+- Use the View tool first to examine existing files before modifying them
+- Use the LS tool to verify the correct location when creating new files
+- Combine with Glob and Grep tools to find and modify multiple files
+- Always include descriptive comments when making changes to existing code
diff --git a/internal/log/http.go b/internal/log/http.go
index 2c74bd05201ad9cbd8d60c6e3c3db3f637fb99b3..46c4b42af599f1809478a5c3f083c6249a3e13d0 100644
--- a/internal/log/http.go
+++ b/internal/log/http.go
@@ -2,7 +2,6 @@ package log
import (
"bytes"
- "context"
"encoding/json"
"io"
"log/slog"
@@ -13,9 +12,6 @@ import (
// NewHTTPClient creates an HTTP client with debug logging enabled when debug mode is on.
func NewHTTPClient() *http.Client {
- if !slog.Default().Enabled(context.TODO(), slog.LevelDebug) {
- return http.DefaultClient
- }
return &http.Client{
Transport: &HTTPRoundTripLogger{
Transport: http.DefaultTransport,
diff --git a/internal/log/log.go b/internal/log/log.go
index bf99fe60fa9a5015029af171adfd6b3f9bf5596b..9463c3bd97956da3ab895b8600f79d1c05790844 100644
--- a/internal/log/log.go
+++ b/internal/log/log.go
@@ -9,6 +9,7 @@ import (
"sync/atomic"
"time"
+ "github.com/charmbracelet/crush/internal/event"
"gopkg.in/natefinch/lumberjack.v2"
)
@@ -48,6 +49,8 @@ func Initialized() bool {
func RecoverPanic(name string, cleanup func()) {
if r := recover(); r != nil {
+ event.Error(r, "panic", true, "name", name)
+
// Create a timestamped panic log file
timestamp := time.Now().Format("20060102-150405")
filename := fmt.Sprintf("crush-panic-%s-%s.log", name, timestamp)
diff --git a/internal/lsp/caps.go b/internal/lsp/caps.go
deleted file mode 100644
index 7edc0886f72a92183a8570e45db74218e3aead47..0000000000000000000000000000000000000000
--- a/internal/lsp/caps.go
+++ /dev/null
@@ -1,112 +0,0 @@
-package lsp
-
-import "github.com/charmbracelet/crush/internal/lsp/protocol"
-
-func (c *Client) setCapabilities(caps protocol.ServerCapabilities) {
- c.capsMu.Lock()
- defer c.capsMu.Unlock()
- c.caps = caps
- c.capsSet.Store(true)
-}
-
-func (c *Client) getCapabilities() (protocol.ServerCapabilities, bool) {
- c.capsMu.RLock()
- defer c.capsMu.RUnlock()
- return c.caps, c.capsSet.Load()
-}
-
-func (c *Client) IsMethodSupported(method string) bool {
- // Always allow core lifecycle and generic methods
- switch method {
- case "initialize", "shutdown", "exit", "$/cancelRequest":
- return true
- }
-
- caps, ok := c.getCapabilities()
- if !ok {
- // caps not set yet, be permissive
- return true
- }
-
- switch method {
- case "textDocument/hover":
- return caps.HoverProvider != nil
- case "textDocument/definition":
- return caps.DefinitionProvider != nil
- case "textDocument/references":
- return caps.ReferencesProvider != nil
- case "textDocument/implementation":
- return caps.ImplementationProvider != nil
- case "textDocument/typeDefinition":
- return caps.TypeDefinitionProvider != nil
- case "textDocument/documentColor", "textDocument/colorPresentation":
- return caps.ColorProvider != nil
- case "textDocument/foldingRange":
- return caps.FoldingRangeProvider != nil
- case "textDocument/declaration":
- return caps.DeclarationProvider != nil
- case "textDocument/selectionRange":
- return caps.SelectionRangeProvider != nil
- case "textDocument/prepareCallHierarchy", "callHierarchy/incomingCalls", "callHierarchy/outgoingCalls":
- return caps.CallHierarchyProvider != nil
- case "textDocument/semanticTokens/full", "textDocument/semanticTokens/full/delta", "textDocument/semanticTokens/range":
- return caps.SemanticTokensProvider != nil
- case "textDocument/linkedEditingRange":
- return caps.LinkedEditingRangeProvider != nil
- case "workspace/willCreateFiles":
- return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillCreate != nil
- case "workspace/willRenameFiles":
- return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillRename != nil
- case "workspace/willDeleteFiles":
- return caps.Workspace != nil && caps.Workspace.FileOperations != nil && caps.Workspace.FileOperations.WillDelete != nil
- case "textDocument/moniker":
- return caps.MonikerProvider != nil
- case "textDocument/prepareTypeHierarchy", "typeHierarchy/supertypes", "typeHierarchy/subtypes":
- return caps.TypeHierarchyProvider != nil
- case "textDocument/inlineValue":
- return caps.InlineValueProvider != nil
- case "textDocument/inlayHint", "inlayHint/resolve":
- return caps.InlayHintProvider != nil
- case "textDocument/diagnostic", "workspace/diagnostic":
- return caps.DiagnosticProvider != nil
- case "textDocument/inlineCompletion":
- return caps.InlineCompletionProvider != nil
- case "workspace/textDocumentContent":
- return caps.Workspace != nil && caps.Workspace.TextDocumentContent != nil
- case "textDocument/willSaveWaitUntil":
- if caps.TextDocumentSync == nil {
- return false
- }
- return true
- case "textDocument/completion", "completionItem/resolve":
- return caps.CompletionProvider != nil
- case "textDocument/signatureHelp":
- return caps.SignatureHelpProvider != nil
- case "textDocument/documentHighlight":
- return caps.DocumentHighlightProvider != nil
- case "textDocument/documentSymbol":
- return caps.DocumentSymbolProvider != nil
- case "textDocument/codeAction", "codeAction/resolve":
- return caps.CodeActionProvider != nil
- case "workspace/symbol", "workspaceSymbol/resolve":
- return caps.WorkspaceSymbolProvider != nil
- case "textDocument/codeLens", "codeLens/resolve":
- return caps.CodeLensProvider != nil
- case "textDocument/documentLink", "documentLink/resolve":
- return caps.DocumentLinkProvider != nil
- case "textDocument/formatting":
- return caps.DocumentFormattingProvider != nil
- case "textDocument/rangeFormatting":
- return caps.DocumentRangeFormattingProvider != nil
- case "textDocument/rangesFormatting":
- return caps.DocumentRangeFormattingProvider != nil
- case "textDocument/onTypeFormatting":
- return caps.DocumentOnTypeFormattingProvider != nil
- case "textDocument/rename", "textDocument/prepareRename":
- return caps.RenameProvider != nil
- case "workspace/executeCommand":
- return caps.ExecuteCommandProvider != nil
- default:
- return true
- }
-}
diff --git a/internal/lsp/client.go b/internal/lsp/client.go
index e09a6a446db2f62476e072c79daadd2d832f895b..ff9a3ac9b5249663c151fb2df04a4acb168e4de4 100644
--- a/internal/lsp/client.go
+++ b/internal/lsp/client.go
@@ -1,291 +1,166 @@
package lsp
import (
- "bufio"
"context"
"encoding/json"
"fmt"
- "io"
"log/slog"
"maps"
"os"
- "os/exec"
"path/filepath"
- "slices"
"strings"
- "sync"
"sync/atomic"
"time"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/log"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/crush/internal/csync"
+ "github.com/charmbracelet/crush/internal/fsext"
+ "github.com/charmbracelet/crush/internal/home"
+ powernap "github.com/charmbracelet/x/powernap/pkg/lsp"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/transport"
)
type Client struct {
- Cmd *exec.Cmd
- stdin io.WriteCloser
- stdout *bufio.Reader
- stderr io.ReadCloser
-
- // Client name for identification
- name string
+ client *powernap.Client
+ name string
// File types this LSP server handles (e.g., .go, .rs, .py)
fileTypes []string
+ // Configuration for this LSP client
+ config config.LSPConfig
+
// Diagnostic change callback
onDiagnosticsChanged func(name string, count int)
- // Request ID counter
- nextID atomic.Int32
-
- // Response handlers
- handlers map[int32]chan *Message
- handlersMu sync.RWMutex
-
- // Server request handlers
- serverRequestHandlers map[string]ServerRequestHandler
- serverHandlersMu sync.RWMutex
-
- // Notification handlers
- notificationHandlers map[string]NotificationHandler
- notificationMu sync.RWMutex
-
// Diagnostic cache
- diagnostics map[protocol.DocumentURI][]protocol.Diagnostic
- diagnosticsMu sync.RWMutex
+ diagnostics *csync.VersionedMap[protocol.DocumentURI, []protocol.Diagnostic]
// Files are currently opened by the LSP
- openFiles map[string]*OpenFileInfo
- openFilesMu sync.RWMutex
+ openFiles *csync.Map[string, *OpenFileInfo]
// Server state
serverState atomic.Value
-
- // Server capabilities as returned by initialize
- caps protocol.ServerCapabilities
- capsMu sync.RWMutex
- capsSet atomic.Bool
}
-// NewClient creates a new LSP client.
-func NewClient(ctx context.Context, name string, config config.LSPConfig) (*Client, error) {
- cmd := exec.CommandContext(ctx, config.Command, config.Args...)
-
- // Copy env
- cmd.Env = slices.Concat(os.Environ(), config.ResolvedEnv())
-
- stdin, err := cmd.StdinPipe()
+// New creates a new LSP client using the powernap implementation.
+func New(ctx context.Context, name string, config config.LSPConfig, resolver config.VariableResolver) (*Client, error) {
+ // Convert working directory to file URI
+ workDir, err := os.Getwd()
if err != nil {
- return nil, fmt.Errorf("failed to create stdin pipe: %w", err)
+ return nil, fmt.Errorf("failed to get working directory: %w", err)
}
- stdout, err := cmd.StdoutPipe()
+ rootURI := string(protocol.URIFromPath(workDir))
+
+ command, err := resolver.ResolveValue(config.Command)
if err != nil {
- return nil, fmt.Errorf("failed to create stdout pipe: %w", err)
+ return nil, fmt.Errorf("invalid lsp command: %w", err)
+ }
+
+ // Create powernap client config
+ clientConfig := powernap.ClientConfig{
+ Command: home.Long(command),
+ Args: config.Args,
+ RootURI: rootURI,
+ Environment: func() map[string]string {
+ env := make(map[string]string)
+ maps.Copy(env, config.Env)
+ return env
+ }(),
+ Settings: config.Options,
+ InitOptions: config.InitOptions,
+ WorkspaceFolders: []protocol.WorkspaceFolder{
+ {
+ URI: rootURI,
+ Name: filepath.Base(workDir),
+ },
+ },
}
- stderr, err := cmd.StderrPipe()
+ // Create the powernap client
+ powernapClient, err := powernap.NewClient(clientConfig)
if err != nil {
- return nil, fmt.Errorf("failed to create stderr pipe: %w", err)
+ return nil, fmt.Errorf("failed to create lsp client: %w", err)
}
client := &Client{
- Cmd: cmd,
- name: name,
- fileTypes: config.FileTypes,
- stdin: stdin,
- stdout: bufio.NewReader(stdout),
- stderr: stderr,
- handlers: make(map[int32]chan *Message),
- notificationHandlers: make(map[string]NotificationHandler),
- serverRequestHandlers: make(map[string]ServerRequestHandler),
- diagnostics: make(map[protocol.DocumentURI][]protocol.Diagnostic),
- openFiles: make(map[string]*OpenFileInfo),
+ client: powernapClient,
+ name: name,
+ fileTypes: config.FileTypes,
+ diagnostics: csync.NewVersionedMap[protocol.DocumentURI, []protocol.Diagnostic](),
+ openFiles: csync.NewMap[string, *OpenFileInfo](),
+ config: config,
}
// Initialize server state
client.serverState.Store(StateStarting)
- // Start the LSP server process
- if err := cmd.Start(); err != nil {
- return nil, fmt.Errorf("failed to start LSP server: %w", err)
- }
-
- // Handle stderr in a separate goroutine
- go func() {
- scanner := bufio.NewScanner(stderr)
- for scanner.Scan() {
- slog.Error("LSP Server", "err", scanner.Text())
- }
- if err := scanner.Err(); err != nil {
- slog.Error("Error reading", "err", err)
- }
- }()
-
- // Start message handling loop
- go func() {
- defer log.RecoverPanic("LSP-message-handler", func() {
- slog.Error("LSP message handler crashed, LSP functionality may be impaired")
- })
- client.handleMessages()
- }()
-
return client, nil
}
-func (c *Client) RegisterNotificationHandler(method string, handler NotificationHandler) {
- c.notificationMu.Lock()
- defer c.notificationMu.Unlock()
- c.notificationHandlers[method] = handler
-}
-
-func (c *Client) RegisterServerRequestHandler(method string, handler ServerRequestHandler) {
- c.serverHandlersMu.Lock()
- defer c.serverHandlersMu.Unlock()
- c.serverRequestHandlers[method] = handler
-}
-
-func (c *Client) InitializeLSPClient(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
- initParams := protocol.ParamInitialize{
- WorkspaceFoldersInitializeParams: protocol.WorkspaceFoldersInitializeParams{
- WorkspaceFolders: []protocol.WorkspaceFolder{
- {
- URI: protocol.URI(protocol.URIFromPath(workspaceDir)),
- Name: workspaceDir,
- },
- },
- },
-
- XInitializeParams: protocol.XInitializeParams{
- ProcessID: int32(os.Getpid()),
- ClientInfo: &protocol.ClientInfo{
- Name: "mcp-language-server",
- Version: "0.1.0",
- },
- RootPath: workspaceDir,
- RootURI: protocol.URIFromPath(workspaceDir),
- Capabilities: protocol.ClientCapabilities{
- Workspace: protocol.WorkspaceClientCapabilities{
- Configuration: true,
- DidChangeConfiguration: protocol.DidChangeConfigurationClientCapabilities{
- DynamicRegistration: true,
- },
- DidChangeWatchedFiles: protocol.DidChangeWatchedFilesClientCapabilities{
- DynamicRegistration: true,
- RelativePatternSupport: true,
- },
- },
- TextDocument: protocol.TextDocumentClientCapabilities{
- Synchronization: &protocol.TextDocumentSyncClientCapabilities{
- DynamicRegistration: true,
- DidSave: true,
- },
- Completion: protocol.CompletionClientCapabilities{
- CompletionItem: protocol.ClientCompletionItemOptions{},
- },
- CodeLens: &protocol.CodeLensClientCapabilities{
- DynamicRegistration: true,
- },
- DocumentSymbol: protocol.DocumentSymbolClientCapabilities{},
- CodeAction: protocol.CodeActionClientCapabilities{
- CodeActionLiteralSupport: protocol.ClientCodeActionLiteralOptions{
- CodeActionKind: protocol.ClientCodeActionKindOptions{
- ValueSet: []protocol.CodeActionKind{},
- },
- },
- },
- PublishDiagnostics: protocol.PublishDiagnosticsClientCapabilities{
- VersionSupport: true,
- },
- SemanticTokens: protocol.SemanticTokensClientCapabilities{
- Requests: protocol.ClientSemanticTokensRequestOptions{
- Range: &protocol.Or_ClientSemanticTokensRequestOptions_range{},
- Full: &protocol.Or_ClientSemanticTokensRequestOptions_full{},
- },
- TokenTypes: []string{},
- TokenModifiers: []string{},
- Formats: []protocol.TokenFormat{},
- },
- },
- Window: protocol.WindowClientCapabilities{},
- },
- InitializationOptions: map[string]any{
- "codelenses": map[string]bool{
- "generate": true,
- "regenerate_cgo": true,
- "test": true,
- "tidy": true,
- "upgrade_dependency": true,
- "vendor": true,
- "vulncheck": false,
- },
- },
- },
- }
-
- result, err := c.Initialize(ctx, initParams)
- if err != nil {
- return nil, fmt.Errorf("initialize failed: %w", err)
+// Initialize initializes the LSP client and returns the server capabilities.
+func (c *Client) Initialize(ctx context.Context, workspaceDir string) (*protocol.InitializeResult, error) {
+ if err := c.client.Initialize(ctx, false); err != nil {
+ return nil, fmt.Errorf("failed to initialize the lsp client: %w", err)
+ }
+
+ // Convert powernap capabilities to protocol capabilities
+ caps := c.client.GetCapabilities()
+ protocolCaps := protocol.ServerCapabilities{
+ TextDocumentSync: caps.TextDocumentSync,
+ CompletionProvider: func() *protocol.CompletionOptions {
+ if caps.CompletionProvider != nil {
+ return &protocol.CompletionOptions{
+ TriggerCharacters: caps.CompletionProvider.TriggerCharacters,
+ AllCommitCharacters: caps.CompletionProvider.AllCommitCharacters,
+ ResolveProvider: caps.CompletionProvider.ResolveProvider,
+ }
+ }
+ return nil
+ }(),
}
- c.setCapabilities(result.Capabilities)
-
- if err := c.Initialized(ctx, protocol.InitializedParams{}); err != nil {
- return nil, fmt.Errorf("initialized notification failed: %w", err)
+ result := &protocol.InitializeResult{
+ Capabilities: protocolCaps,
}
- // Register handlers
c.RegisterServerRequestHandler("workspace/applyEdit", HandleApplyEdit)
c.RegisterServerRequestHandler("workspace/configuration", HandleWorkspaceConfiguration)
c.RegisterServerRequestHandler("client/registerCapability", HandleRegisterCapability)
c.RegisterNotificationHandler("window/showMessage", HandleServerMessage)
- c.RegisterNotificationHandler("textDocument/publishDiagnostics", func(params json.RawMessage) {
+ c.RegisterNotificationHandler("textDocument/publishDiagnostics", func(_ context.Context, _ string, params json.RawMessage) {
HandleDiagnostics(c, params)
})
- return &result, nil
+ return result, nil
}
-func (c *Client) Close() error {
+// Close closes the LSP client.
+func (c *Client) Close(ctx context.Context) error {
// Try to close all open files first
- ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ ctx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
- // Attempt to close files but continue shutdown regardless
c.CloseAllFiles(ctx)
- // Close stdin to signal the server
- if err := c.stdin.Close(); err != nil {
- return fmt.Errorf("failed to close stdin: %w", err)
+ // Shutdown and exit the client
+ if err := c.client.Shutdown(ctx); err != nil {
+ slog.Warn("Failed to shutdown LSP client", "error", err)
}
- // Use a channel to handle the Wait with timeout
- done := make(chan error, 1)
- go func() {
- done <- c.Cmd.Wait()
- }()
-
- // Wait for process to exit with timeout
- select {
- case err := <-done:
- return err
- case <-time.After(2 * time.Second):
- // If we timeout, try to kill the process
- if err := c.Cmd.Process.Kill(); err != nil {
- return fmt.Errorf("failed to kill process: %w", err)
- }
- return fmt.Errorf("process killed after timeout")
- }
+ return c.client.Exit()
}
+// ServerState represents the state of an LSP server
type ServerState int
const (
StateStarting ServerState = iota
StateReady
StateError
+ StateDisabled
)
// GetServerState returns the current state of the LSP server
@@ -311,8 +186,7 @@ func (c *Client) SetDiagnosticsCallback(callback func(name string, count int)) {
c.onDiagnosticsChanged = callback
}
-// WaitForServerReady waits for the server to be ready by polling the server
-// with a simple request until it responds successfully or times out
+// WaitForServerReady waits for the server to be ready
func (c *Client) WaitForServerReady(ctx context.Context) error {
cfg := config.Get()
@@ -327,7 +201,7 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
ticker := time.NewTicker(500 * time.Millisecond)
defer ticker.Stop()
- if cfg.Options.DebugLSP {
+ if cfg != nil && cfg.Options.DebugLSP {
slog.Debug("Waiting for LSP server to be ready...")
}
@@ -339,17 +213,17 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
c.SetServerState(StateError)
return fmt.Errorf("timeout waiting for LSP server to be ready")
case <-ticker.C:
- // Try a ping method appropriate for this server type
- if err := c.ping(ctx); err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("LSP server not ready yet", "error", err, "server", c.name)
+ // Check if client is running
+ if !c.client.IsRunning() {
+ if cfg != nil && cfg.Options.DebugLSP {
+ slog.Debug("LSP server not ready yet", "server", c.name)
}
continue
}
- // Server responded successfully
+ // Server is ready
c.SetServerState(StateReady)
- if cfg.Options.DebugLSP {
+ if cfg != nil && cfg.Options.DebugLSP {
slog.Debug("LSP server is ready")
}
return nil
@@ -357,171 +231,13 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
}
}
-// ServerType represents the type of LSP server
-type ServerType int
-
-const (
- ServerTypeUnknown ServerType = iota
- ServerTypeGo
- ServerTypeTypeScript
- ServerTypeRust
- ServerTypePython
- ServerTypeGeneric
-)
-
-// detectServerType tries to determine what type of LSP server we're dealing with
-func (c *Client) detectServerType() ServerType {
- if c.Cmd == nil {
- return ServerTypeUnknown
- }
-
- cmdPath := strings.ToLower(c.Cmd.Path)
-
- switch {
- case strings.Contains(cmdPath, "gopls"):
- return ServerTypeGo
- case strings.Contains(cmdPath, "typescript") || strings.Contains(cmdPath, "vtsls") || strings.Contains(cmdPath, "tsserver"):
- return ServerTypeTypeScript
- case strings.Contains(cmdPath, "rust-analyzer"):
- return ServerTypeRust
- case strings.Contains(cmdPath, "pyright") || strings.Contains(cmdPath, "pylsp") || strings.Contains(cmdPath, "python"):
- return ServerTypePython
- default:
- return ServerTypeGeneric
- }
-}
-
-// openKeyConfigFiles opens important configuration files that help initialize the server
-func (c *Client) openKeyConfigFiles(ctx context.Context) {
- workDir := config.Get().WorkingDir()
- serverType := c.detectServerType()
-
- var filesToOpen []string
-
- switch serverType {
- case ServerTypeTypeScript:
- // TypeScript servers need these config files to properly initialize
- filesToOpen = []string{
- filepath.Join(workDir, "tsconfig.json"),
- filepath.Join(workDir, "package.json"),
- filepath.Join(workDir, "jsconfig.json"),
- }
-
- // Also find and open a few TypeScript files to help the server initialize
- c.openTypeScriptFiles(ctx, workDir)
- case ServerTypeGo:
- filesToOpen = []string{
- filepath.Join(workDir, "go.mod"),
- filepath.Join(workDir, "go.sum"),
- }
- case ServerTypeRust:
- filesToOpen = []string{
- filepath.Join(workDir, "Cargo.toml"),
- filepath.Join(workDir, "Cargo.lock"),
- }
- }
-
- // Try to open each file, ignoring errors if they don't exist
- for _, file := range filesToOpen {
- if _, err := os.Stat(file); err == nil {
- // File exists, try to open it
- if err := c.OpenFile(ctx, file); err != nil {
- slog.Debug("Failed to open key config file", "file", file, "error", err)
- } else {
- slog.Debug("Opened key config file for initialization", "file", file)
- }
- }
- }
-}
-
-// ping sends a ping request...
-func (c *Client) ping(ctx context.Context) error {
- if _, err := c.Symbol(ctx, protocol.WorkspaceSymbolParams{}); err == nil {
- return nil
- }
- // This is a very lightweight request that should work for most servers
- return c.Notify(ctx, "$/cancelRequest", protocol.CancelParams{ID: "1"})
-}
-
-// openTypeScriptFiles finds and opens TypeScript files to help initialize the server
-func (c *Client) openTypeScriptFiles(ctx context.Context, workDir string) {
- cfg := config.Get()
- filesOpened := 0
- maxFilesToOpen := 5 // Limit to a reasonable number of files
-
- // Find and open TypeScript files
- err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Skip directories and non-TypeScript files
- if d.IsDir() {
- // Skip common directories to avoid wasting time
- if shouldSkipDir(path) {
- return filepath.SkipDir
- }
- return nil
- }
-
- // Check if we've opened enough files
- if filesOpened >= maxFilesToOpen {
- return filepath.SkipAll
- }
-
- // Check file extension
- ext := filepath.Ext(path)
- if ext == ".ts" || ext == ".tsx" || ext == ".js" || ext == ".jsx" {
- // Try to open the file
- if err := c.OpenFile(ctx, path); err == nil {
- filesOpened++
- if cfg.Options.DebugLSP {
- slog.Debug("Opened TypeScript file for initialization", "file", path)
- }
- }
- }
-
- return nil
- })
-
- if err != nil && cfg.Options.DebugLSP {
- slog.Debug("Error walking directory for TypeScript files", "error", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Opened TypeScript files for initialization", "count", filesOpened)
- }
-}
-
-// shouldSkipDir returns true if the directory should be skipped during file search
-func shouldSkipDir(path string) bool {
- dirName := filepath.Base(path)
-
- // Skip hidden directories
- if strings.HasPrefix(dirName, ".") {
- return true
- }
-
- // Skip common directories that won't contain relevant source files
- skipDirs := map[string]bool{
- "node_modules": true,
- "dist": true,
- "build": true,
- "coverage": true,
- "vendor": true,
- "target": true,
- }
-
- return skipDirs[dirName]
-}
-
+// OpenFileInfo contains information about an open file
type OpenFileInfo struct {
Version int32
URI protocol.DocumentURI
}
-// HandlesFile checks if this LSP client handles the given file based on its
-// extension.
+// HandlesFile checks if this LSP client handles the given file based on its extension.
func (c *Client) HandlesFile(path string) bool {
// If no file types are specified, handle all files (backward compatibility)
if len(c.fileTypes) == 0 {
@@ -529,13 +245,13 @@ func (c *Client) HandlesFile(path string) bool {
}
name := strings.ToLower(filepath.Base(path))
- for _, filetpe := range c.fileTypes {
- suffix := strings.ToLower(filetpe)
+ for _, filetype := range c.fileTypes {
+ suffix := strings.ToLower(filetype)
if !strings.HasPrefix(suffix, ".") {
suffix = "." + suffix
}
if strings.HasSuffix(name, suffix) {
- slog.Debug("handles file", "name", c.name, "file", name, "filetype", filetpe)
+ slog.Debug("handles file", "name", c.name, "file", name, "filetype", filetype)
return true
}
}
@@ -543,6 +259,7 @@ func (c *Client) HandlesFile(path string) bool {
return false
}
+// OpenFile opens a file in the LSP server.
func (c *Client) OpenFile(ctx context.Context, filepath string) error {
if !c.HandlesFile(filepath) {
return nil
@@ -550,12 +267,9 @@ func (c *Client) OpenFile(ctx context.Context, filepath string) error {
uri := string(protocol.URIFromPath(filepath))
- c.openFilesMu.Lock()
- if _, exists := c.openFiles[uri]; exists {
- c.openFilesMu.Unlock()
+ if _, exists := c.openFiles.Get(uri); exists {
return nil // Already open
}
- c.openFilesMu.Unlock()
// Skip files that do not exist or cannot be read
content, err := os.ReadFile(filepath)
@@ -563,29 +277,20 @@ func (c *Client) OpenFile(ctx context.Context, filepath string) error {
return fmt.Errorf("error reading file: %w", err)
}
- params := protocol.DidOpenTextDocumentParams{
- TextDocument: protocol.TextDocumentItem{
- URI: protocol.DocumentURI(uri),
- LanguageID: DetectLanguageID(uri),
- Version: 1,
- Text: string(content),
- },
- }
-
- if err := c.DidOpen(ctx, params); err != nil {
+ // Notify the server about the opened document
+ if err = c.client.NotifyDidOpenTextDocument(ctx, uri, string(DetectLanguageID(uri)), 1, string(content)); err != nil {
return err
}
- c.openFilesMu.Lock()
- c.openFiles[uri] = &OpenFileInfo{
+ c.openFiles.Set(uri, &OpenFileInfo{
Version: 1,
URI: protocol.DocumentURI(uri),
- }
- c.openFilesMu.Unlock()
+ })
return nil
}
+// NotifyChange notifies the server about a file change.
func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
uri := string(protocol.URIFromPath(filepath))
@@ -594,124 +299,61 @@ func (c *Client) NotifyChange(ctx context.Context, filepath string) error {
return fmt.Errorf("error reading file: %w", err)
}
- c.openFilesMu.Lock()
- fileInfo, isOpen := c.openFiles[uri]
+ fileInfo, isOpen := c.openFiles.Get(uri)
if !isOpen {
- c.openFilesMu.Unlock()
return fmt.Errorf("cannot notify change for unopened file: %s", filepath)
}
// Increment version
fileInfo.Version++
- version := fileInfo.Version
- c.openFilesMu.Unlock()
- params := protocol.DidChangeTextDocumentParams{
- TextDocument: protocol.VersionedTextDocumentIdentifier{
- TextDocumentIdentifier: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
- },
- Version: version,
- },
- ContentChanges: []protocol.TextDocumentContentChangeEvent{
- {
- Value: protocol.TextDocumentContentChangeWholeDocument{
- Text: string(content),
- },
+ // Create change event
+ changes := []protocol.TextDocumentContentChangeEvent{
+ {
+ Value: protocol.TextDocumentContentChangeWholeDocument{
+ Text: string(content),
},
},
}
- return c.DidChange(ctx, params)
-}
-
-func (c *Client) CloseFile(ctx context.Context, filepath string) error {
- cfg := config.Get()
- uri := string(protocol.URIFromPath(filepath))
-
- c.openFilesMu.Lock()
- if _, exists := c.openFiles[uri]; !exists {
- c.openFilesMu.Unlock()
- return nil // Already closed
- }
- c.openFilesMu.Unlock()
-
- params := protocol.DidCloseTextDocumentParams{
- TextDocument: protocol.TextDocumentIdentifier{
- URI: protocol.DocumentURI(uri),
- },
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Closing file", "file", filepath)
- }
- if err := c.DidClose(ctx, params); err != nil {
- return err
- }
-
- c.openFilesMu.Lock()
- delete(c.openFiles, uri)
- c.openFilesMu.Unlock()
-
- return nil
+ return c.client.NotifyDidChangeTextDocument(ctx, uri, int(fileInfo.Version), changes)
}
+// IsFileOpen checks if a file is currently open.
func (c *Client) IsFileOpen(filepath string) bool {
uri := string(protocol.URIFromPath(filepath))
- c.openFilesMu.RLock()
- defer c.openFilesMu.RUnlock()
- _, exists := c.openFiles[uri]
+ _, exists := c.openFiles.Get(uri)
return exists
}
-// CloseAllFiles closes all currently open files
+// CloseAllFiles closes all currently open files.
func (c *Client) CloseAllFiles(ctx context.Context) {
cfg := config.Get()
- c.openFilesMu.Lock()
- filesToClose := make([]string, 0, len(c.openFiles))
-
- // First collect all URIs that need to be closed
- for uri := range c.openFiles {
- // Convert URI back to file path using proper URI handling
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path for file closing", "uri", uri, "error", err)
- continue
+ debugLSP := cfg != nil && cfg.Options.DebugLSP
+ for uri := range c.openFiles.Seq2() {
+ if debugLSP {
+ slog.Debug("Closing file", "file", uri)
}
- filesToClose = append(filesToClose, filePath)
- }
- c.openFilesMu.Unlock()
-
- // Then close them all
- for _, filePath := range filesToClose {
- err := c.CloseFile(ctx, filePath)
- if err != nil && cfg.Options.DebugLSP {
- slog.Warn("Error closing file", "file", filePath, "error", err)
+ if err := c.client.NotifyDidCloseTextDocument(ctx, uri); err != nil {
+ slog.Warn("Error closing rile", "uri", uri, "error", err)
+ continue
}
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Closed all files", "files", filesToClose)
+ c.openFiles.Del(uri)
}
}
+// GetFileDiagnostics returns diagnostics for a specific file.
func (c *Client) GetFileDiagnostics(uri protocol.DocumentURI) []protocol.Diagnostic {
- c.diagnosticsMu.RLock()
- defer c.diagnosticsMu.RUnlock()
-
- return c.diagnostics[uri]
+ diags, _ := c.diagnostics.Get(uri)
+ return diags
}
-// GetDiagnostics returns all diagnostics for all files
+// GetDiagnostics returns all diagnostics for all files.
func (c *Client) GetDiagnostics() map[protocol.DocumentURI][]protocol.Diagnostic {
- c.diagnosticsMu.RLock()
- defer c.diagnosticsMu.RUnlock()
-
- return maps.Clone(c.diagnostics)
+ return maps.Collect(c.diagnostics.Seq2())
}
-// OpenFileOnDemand opens a file only if it's not already open
-// This is used for lazy-loading files when they're actually needed
+// OpenFileOnDemand opens a file only if it's not already open.
func (c *Client) OpenFileOnDemand(ctx context.Context, filepath string) error {
// Check if the file is already open
if c.IsFileOpen(filepath) {
@@ -722,8 +364,7 @@ func (c *Client) OpenFileOnDemand(ctx context.Context, filepath string) error {
return c.OpenFile(ctx, filepath)
}
-// GetDiagnosticsForFile ensures a file is open and returns its diagnostics
-// This is useful for on-demand diagnostics when using lazy loading
+// GetDiagnosticsForFile ensures a file is open and returns its diagnostics.
func (c *Client) GetDiagnosticsForFile(ctx context.Context, filepath string) ([]protocol.Diagnostic, error) {
documentURI := protocol.URIFromPath(filepath)
@@ -738,16 +379,84 @@ func (c *Client) GetDiagnosticsForFile(ctx context.Context, filepath string) ([]
}
// Get diagnostics
- c.diagnosticsMu.RLock()
- diagnostics := c.diagnostics[documentURI]
- c.diagnosticsMu.RUnlock()
+ diagnostics, _ := c.diagnostics.Get(documentURI)
return diagnostics, nil
}
-// ClearDiagnosticsForURI removes diagnostics for a specific URI from the cache
+// ClearDiagnosticsForURI removes diagnostics for a specific URI from the cache.
func (c *Client) ClearDiagnosticsForURI(uri protocol.DocumentURI) {
- c.diagnosticsMu.Lock()
- defer c.diagnosticsMu.Unlock()
- delete(c.diagnostics, uri)
+ c.diagnostics.Del(uri)
+}
+
+// RegisterNotificationHandler registers a notification handler.
+func (c *Client) RegisterNotificationHandler(method string, handler transport.NotificationHandler) {
+ c.client.RegisterNotificationHandler(method, handler)
+}
+
+// RegisterServerRequestHandler handles server requests.
+func (c *Client) RegisterServerRequestHandler(method string, handler transport.Handler) {
+ c.client.RegisterHandler(method, handler)
+}
+
+// DidChangeWatchedFiles sends a workspace/didChangeWatchedFiles notification to the server.
+func (c *Client) DidChangeWatchedFiles(ctx context.Context, params protocol.DidChangeWatchedFilesParams) error {
+ return c.client.NotifyDidChangeWatchedFiles(ctx, params.Changes)
+}
+
+// openKeyConfigFiles opens important configuration files that help initialize the server.
+func (c *Client) openKeyConfigFiles(ctx context.Context) {
+ wd, err := os.Getwd()
+ if err != nil {
+ return
+ }
+
+ // Try to open each file, ignoring errors if they don't exist
+ for _, file := range c.config.RootMarkers {
+ file = filepath.Join(wd, file)
+ if _, err := os.Stat(file); err == nil {
+ // File exists, try to open it
+ if err := c.OpenFile(ctx, file); err != nil {
+ slog.Debug("Failed to open key config file", "file", file, "error", err)
+ } else {
+ slog.Debug("Opened key config file for initialization", "file", file)
+ }
+ }
+ }
+}
+
+// WaitForDiagnostics waits until diagnostics change or the timeout is reached.
+func (c *Client) WaitForDiagnostics(ctx context.Context, d time.Duration) {
+ ticker := time.NewTicker(200 * time.Millisecond)
+ defer ticker.Stop()
+ timeout := time.After(d)
+ pv := c.diagnostics.Version()
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ case <-timeout:
+ return
+ case <-ticker.C:
+ if pv != c.diagnostics.Version() {
+ return
+ }
+ }
+ }
+}
+
+// HasRootMarkers checks if any of the specified root marker patterns exist in the given directory.
+// Uses glob patterns to match files, allowing for more flexible matching.
+func HasRootMarkers(dir string, rootMarkers []string) bool {
+ if len(rootMarkers) == 0 {
+ return true
+ }
+ for _, pattern := range rootMarkers {
+ // Use fsext.GlobWithDoubleStar to find matches
+ matches, _, err := fsext.GlobWithDoubleStar(pattern, dir, 1)
+ if err == nil && len(matches) > 0 {
+ return true
+ }
+ }
+ return false
}
diff --git a/internal/lsp/client_test.go b/internal/lsp/client_test.go
index f97b9bdddba1e0fa5ab22cbe68635b4f1b9b02c3..7cc9f2f4ba230a4c6896e7ccef367a450c1c55c7 100644
--- a/internal/lsp/client_test.go
+++ b/internal/lsp/client_test.go
@@ -1,93 +1,57 @@
package lsp
import (
+ "context"
"testing"
- "github.com/stretchr/testify/require"
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/env"
)
-func TestHandlesFile(t *testing.T) {
- tests := []struct {
- name string
- fileTypes []string
- filepath string
- expected bool
- }{
- {
- name: "no file types specified - handles all files",
- fileTypes: nil,
- filepath: "test.go",
- expected: true,
- },
- {
- name: "empty file types - handles all files",
- fileTypes: []string{},
- filepath: "test.go",
- expected: true,
- },
- {
- name: "matches .go extension",
- fileTypes: []string{".go"},
- filepath: "main.go",
- expected: true,
- },
- {
- name: "matches go extension without dot",
- fileTypes: []string{"go"},
- filepath: "main.go",
- expected: true,
- },
- {
- name: "matches one of multiple extensions",
- fileTypes: []string{".js", ".ts", ".tsx"},
- filepath: "component.tsx",
- expected: true,
- },
- {
- name: "does not match extension",
- fileTypes: []string{".go", ".rs"},
- filepath: "script.sh",
- expected: false,
- },
- {
- name: "matches with full path",
- fileTypes: []string{".sh"},
- filepath: "/usr/local/bin/script.sh",
- expected: true,
- },
- {
- name: "case insensitive matching",
- fileTypes: []string{".GO"},
- filepath: "main.go",
- expected: true,
- },
- {
- name: "bash file types",
- fileTypes: []string{".sh", ".bash", ".zsh", ".ksh"},
- filepath: "script.sh",
- expected: true,
- },
- {
- name: "bash should not handle go files",
- fileTypes: []string{".sh", ".bash", ".zsh", ".ksh"},
- filepath: "main.go",
- expected: false,
- },
- {
- name: "bash should not handle json files",
- fileTypes: []string{".sh", ".bash", ".zsh", ".ksh"},
- filepath: "config.json",
- expected: false,
- },
+func TestClient(t *testing.T) {
+ ctx := context.Background()
+
+ // Create a simple config for testing
+ cfg := config.LSPConfig{
+ Command: "$THE_CMD", // Use echo as a dummy command that won't fail
+ Args: []string{"hello"},
+ FileTypes: []string{"go"},
+ Env: map[string]string{},
+ }
+
+ // Test creating a powernap client - this will likely fail with echo
+ // but we can still test the basic structure
+ client, err := New(ctx, "test", cfg, config.NewEnvironmentVariableResolver(env.NewFromMap(map[string]string{
+ "THE_CMD": "echo",
+ })))
+ if err != nil {
+ // Expected to fail with echo command, skip the rest
+ t.Skipf("Powernap client creation failed as expected with dummy command: %v", err)
+ return
+ }
+
+ // If we get here, test basic interface methods
+ if client.GetName() != "test" {
+ t.Errorf("Expected name 'test', got '%s'", client.GetName())
+ }
+
+ if !client.HandlesFile("test.go") {
+ t.Error("Expected client to handle .go files")
+ }
+
+ if client.HandlesFile("test.py") {
+ t.Error("Expected client to not handle .py files")
+ }
+
+ // Test server state
+ client.SetServerState(StateReady)
+ if client.GetServerState() != StateReady {
+ t.Error("Expected server state to be StateReady")
}
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- client := &Client{
- fileTypes: tt.fileTypes,
- }
- result := client.HandlesFile(tt.filepath)
- require.Equal(t, tt.expected, result)
- })
+ // Clean up - expect this to fail with echo command
+ if err := client.Close(t.Context()); err != nil {
+ // Expected to fail with echo command
+ t.Logf("Close failed as expected with dummy command: %v", err)
}
}
diff --git a/internal/lsp/handlers.go b/internal/lsp/handlers.go
index 72f3018b3da969000672e5b4ba47f73f2b72df97..b386e0780f6f6db6db13be380496c60a6e3c457e 100644
--- a/internal/lsp/handlers.go
+++ b/internal/lsp/handlers.go
@@ -1,22 +1,22 @@
package lsp
import (
+ "context"
"encoding/json"
"log/slog"
"github.com/charmbracelet/crush/internal/config"
-
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/lsp/util"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
-// Requests
-
-func HandleWorkspaceConfiguration(params json.RawMessage) (any, error) {
+// HandleWorkspaceConfiguration handles workspace configuration requests
+func HandleWorkspaceConfiguration(_ context.Context, _ string, params json.RawMessage) (any, error) {
return []map[string]any{{}}, nil
}
-func HandleRegisterCapability(params json.RawMessage) (any, error) {
+// HandleRegisterCapability handles capability registration requests
+func HandleRegisterCapability(_ context.Context, _ string, params json.RawMessage) (any, error) {
var registerParams protocol.RegistrationParams
if err := json.Unmarshal(params, ®isterParams); err != nil {
slog.Error("Error unmarshaling registration params", "error", err)
@@ -32,22 +32,20 @@ func HandleRegisterCapability(params json.RawMessage) (any, error) {
slog.Error("Error marshaling registration options", "error", err)
continue
}
-
var options protocol.DidChangeWatchedFilesRegistrationOptions
if err := json.Unmarshal(optionsJSON, &options); err != nil {
slog.Error("Error unmarshaling registration options", "error", err)
continue
}
-
// Store the file watchers registrations
notifyFileWatchRegistration(reg.ID, options.Watchers)
}
}
-
return nil, nil
}
-func HandleApplyEdit(params json.RawMessage) (any, error) {
+// HandleApplyEdit handles workspace edit requests
+func HandleApplyEdit(_ context.Context, _ string, params json.RawMessage) (any, error) {
var edit protocol.ApplyWorkspaceEditParams
if err := json.Unmarshal(params, &edit); err != nil {
return nil, err
@@ -80,21 +78,32 @@ func notifyFileWatchRegistration(id string, watchers []protocol.FileSystemWatche
}
}
-// Notifications
-
-func HandleServerMessage(params json.RawMessage) {
+// HandleServerMessage handles server messages
+func HandleServerMessage(_ context.Context, method string, params json.RawMessage) {
cfg := config.Get()
- var msg struct {
- Type int `json:"type"`
- Message string `json:"message"`
+ if !cfg.Options.DebugLSP {
+ return
}
- if err := json.Unmarshal(params, &msg); err == nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Server message", "type", msg.Type, "message", msg.Message)
- }
+
+ var msg protocol.ShowMessageParams
+ if err := json.Unmarshal(params, &msg); err != nil {
+ slog.Debug("Server message", "type", msg.Type, "message", msg.Message)
+ return
+ }
+
+ switch msg.Type {
+ case protocol.Error:
+ slog.Error("LSP Server", "message", msg.Message)
+ case protocol.Warning:
+ slog.Warn("LSP Server", "message", msg.Message)
+ case protocol.Info:
+ slog.Info("LSP Server", "message", msg.Message)
+ case protocol.Log:
+ slog.Debug("LSP Server", "message", msg.Message)
}
}
+// HandleDiagnostics handles diagnostic notifications from the LSP server
func HandleDiagnostics(client *Client, params json.RawMessage) {
var diagParams protocol.PublishDiagnosticsParams
if err := json.Unmarshal(params, &diagParams); err != nil {
@@ -102,15 +111,13 @@ func HandleDiagnostics(client *Client, params json.RawMessage) {
return
}
- client.diagnosticsMu.Lock()
- client.diagnostics[diagParams.URI] = diagParams.Diagnostics
+ client.diagnostics.Set(diagParams.URI, diagParams.Diagnostics)
// Calculate total diagnostic count
totalCount := 0
- for _, diagnostics := range client.diagnostics {
+ for _, diagnostics := range client.diagnostics.Seq2() {
totalCount += len(diagnostics)
}
- client.diagnosticsMu.Unlock()
// Trigger callback if set
if client.onDiagnosticsChanged != nil {
diff --git a/internal/lsp/language.go b/internal/lsp/language.go
index 87d209f1dbc51eafbde4d85b0ce6001dd17729b5..7d6a1517e849b6f09352447b2acb05539b3220af 100644
--- a/internal/lsp/language.go
+++ b/internal/lsp/language.go
@@ -4,7 +4,7 @@ import (
"path/filepath"
"strings"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
func DetectLanguageID(uri string) protocol.LanguageKind {
diff --git a/internal/lsp/methods.go b/internal/lsp/methods.go
deleted file mode 100644
index afd087c1b86d5242e845e419c47234de11ce467f..0000000000000000000000000000000000000000
--- a/internal/lsp/methods.go
+++ /dev/null
@@ -1,554 +0,0 @@
-// Generated code. Do not edit
-package lsp
-
-import (
- "context"
-
- "github.com/charmbracelet/crush/internal/lsp/protocol"
-)
-
-// Implementation sends a textDocument/implementation request to the LSP server.
-// A request to resolve the implementation locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
-func (c *Client) Implementation(ctx context.Context, params protocol.ImplementationParams) (protocol.Or_Result_textDocument_implementation, error) {
- var result protocol.Or_Result_textDocument_implementation
- err := c.Call(ctx, "textDocument/implementation", params, &result)
- return result, err
-}
-
-// TypeDefinition sends a textDocument/typeDefinition request to the LSP server.
-// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Definition or a Thenable that resolves to such.
-func (c *Client) TypeDefinition(ctx context.Context, params protocol.TypeDefinitionParams) (protocol.Or_Result_textDocument_typeDefinition, error) {
- var result protocol.Or_Result_textDocument_typeDefinition
- err := c.Call(ctx, "textDocument/typeDefinition", params, &result)
- return result, err
-}
-
-// DocumentColor sends a textDocument/documentColor request to the LSP server.
-// A request to list all color symbols found in a given text document. The request's parameter is of type DocumentColorParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
-func (c *Client) DocumentColor(ctx context.Context, params protocol.DocumentColorParams) ([]protocol.ColorInformation, error) {
- var result []protocol.ColorInformation
- err := c.Call(ctx, "textDocument/documentColor", params, &result)
- return result, err
-}
-
-// ColorPresentation sends a textDocument/colorPresentation request to the LSP server.
-// A request to list all presentation for a color. The request's parameter is of type ColorPresentationParams the response is of type ColorInformation ColorInformation[] or a Thenable that resolves to such.
-func (c *Client) ColorPresentation(ctx context.Context, params protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) {
- var result []protocol.ColorPresentation
- err := c.Call(ctx, "textDocument/colorPresentation", params, &result)
- return result, err
-}
-
-// FoldingRange sends a textDocument/foldingRange request to the LSP server.
-// A request to provide folding ranges in a document. The request's parameter is of type FoldingRangeParams, the response is of type FoldingRangeList or a Thenable that resolves to such.
-func (c *Client) FoldingRange(ctx context.Context, params protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) {
- var result []protocol.FoldingRange
- err := c.Call(ctx, "textDocument/foldingRange", params, &result)
- return result, err
-}
-
-// Declaration sends a textDocument/declaration request to the LSP server.
-// A request to resolve the type definition locations of a symbol at a given text document position. The request's parameter is of type TextDocumentPositionParams the response is of type Declaration or a typed array of DeclarationLink or a Thenable that resolves to such.
-func (c *Client) Declaration(ctx context.Context, params protocol.DeclarationParams) (protocol.Or_Result_textDocument_declaration, error) {
- var result protocol.Or_Result_textDocument_declaration
- err := c.Call(ctx, "textDocument/declaration", params, &result)
- return result, err
-}
-
-// SelectionRange sends a textDocument/selectionRange request to the LSP server.
-// A request to provide selection ranges in a document. The request's parameter is of type SelectionRangeParams, the response is of type SelectionRange SelectionRange[] or a Thenable that resolves to such.
-func (c *Client) SelectionRange(ctx context.Context, params protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) {
- var result []protocol.SelectionRange
- err := c.Call(ctx, "textDocument/selectionRange", params, &result)
- return result, err
-}
-
-// PrepareCallHierarchy sends a textDocument/prepareCallHierarchy request to the LSP server.
-// A request to result a CallHierarchyItem in a document at a given position. Can be used as an input to an incoming or outgoing call hierarchy. Since 3.16.0
-func (c *Client) PrepareCallHierarchy(ctx context.Context, params protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) {
- var result []protocol.CallHierarchyItem
- err := c.Call(ctx, "textDocument/prepareCallHierarchy", params, &result)
- return result, err
-}
-
-// IncomingCalls sends a callHierarchy/incomingCalls request to the LSP server.
-// A request to resolve the incoming calls for a given CallHierarchyItem. Since 3.16.0
-func (c *Client) IncomingCalls(ctx context.Context, params protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) {
- var result []protocol.CallHierarchyIncomingCall
- err := c.Call(ctx, "callHierarchy/incomingCalls", params, &result)
- return result, err
-}
-
-// OutgoingCalls sends a callHierarchy/outgoingCalls request to the LSP server.
-// A request to resolve the outgoing calls for a given CallHierarchyItem. Since 3.16.0
-func (c *Client) OutgoingCalls(ctx context.Context, params protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) {
- var result []protocol.CallHierarchyOutgoingCall
- err := c.Call(ctx, "callHierarchy/outgoingCalls", params, &result)
- return result, err
-}
-
-// SemanticTokensFull sends a textDocument/semanticTokens/full request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensFull(ctx context.Context, params protocol.SemanticTokensParams) (protocol.SemanticTokens, error) {
- var result protocol.SemanticTokens
- err := c.Call(ctx, "textDocument/semanticTokens/full", params, &result)
- return result, err
-}
-
-// SemanticTokensFullDelta sends a textDocument/semanticTokens/full/delta request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensFullDelta(ctx context.Context, params protocol.SemanticTokensDeltaParams) (protocol.Or_Result_textDocument_semanticTokens_full_delta, error) {
- var result protocol.Or_Result_textDocument_semanticTokens_full_delta
- err := c.Call(ctx, "textDocument/semanticTokens/full/delta", params, &result)
- return result, err
-}
-
-// SemanticTokensRange sends a textDocument/semanticTokens/range request to the LSP server.
-// Since 3.16.0
-func (c *Client) SemanticTokensRange(ctx context.Context, params protocol.SemanticTokensRangeParams) (protocol.SemanticTokens, error) {
- var result protocol.SemanticTokens
- err := c.Call(ctx, "textDocument/semanticTokens/range", params, &result)
- return result, err
-}
-
-// LinkedEditingRange sends a textDocument/linkedEditingRange request to the LSP server.
-// A request to provide ranges that can be edited together. Since 3.16.0
-func (c *Client) LinkedEditingRange(ctx context.Context, params protocol.LinkedEditingRangeParams) (protocol.LinkedEditingRanges, error) {
- var result protocol.LinkedEditingRanges
- err := c.Call(ctx, "textDocument/linkedEditingRange", params, &result)
- return result, err
-}
-
-// WillCreateFiles sends a workspace/willCreateFiles request to the LSP server.
-// The will create files request is sent from the client to the server before files are actually created as long as the creation is triggered from within the client. The request can return a WorkspaceEdit which will be applied to workspace before the files are created. Hence the WorkspaceEdit can not manipulate the content of the file to be created. Since 3.16.0
-func (c *Client) WillCreateFiles(ctx context.Context, params protocol.CreateFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willCreateFiles", params, &result)
- return result, err
-}
-
-// WillRenameFiles sends a workspace/willRenameFiles request to the LSP server.
-// The will rename files request is sent from the client to the server before files are actually renamed as long as the rename is triggered from within the client. Since 3.16.0
-func (c *Client) WillRenameFiles(ctx context.Context, params protocol.RenameFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willRenameFiles", params, &result)
- return result, err
-}
-
-// WillDeleteFiles sends a workspace/willDeleteFiles request to the LSP server.
-// The did delete files notification is sent from the client to the server when files were deleted from within the client. Since 3.16.0
-func (c *Client) WillDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "workspace/willDeleteFiles", params, &result)
- return result, err
-}
-
-// Moniker sends a textDocument/moniker request to the LSP server.
-// A request to get the moniker of a symbol at a given text document position. The request parameter is of type TextDocumentPositionParams. The response is of type Moniker Moniker[] or null.
-func (c *Client) Moniker(ctx context.Context, params protocol.MonikerParams) ([]protocol.Moniker, error) {
- var result []protocol.Moniker
- err := c.Call(ctx, "textDocument/moniker", params, &result)
- return result, err
-}
-
-// PrepareTypeHierarchy sends a textDocument/prepareTypeHierarchy request to the LSP server.
-// A request to result a TypeHierarchyItem in a document at a given position. Can be used as an input to a subtypes or supertypes type hierarchy. Since 3.17.0
-func (c *Client) PrepareTypeHierarchy(ctx context.Context, params protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "textDocument/prepareTypeHierarchy", params, &result)
- return result, err
-}
-
-// Supertypes sends a typeHierarchy/supertypes request to the LSP server.
-// A request to resolve the supertypes for a given TypeHierarchyItem. Since 3.17.0
-func (c *Client) Supertypes(ctx context.Context, params protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "typeHierarchy/supertypes", params, &result)
- return result, err
-}
-
-// Subtypes sends a typeHierarchy/subtypes request to the LSP server.
-// A request to resolve the subtypes for a given TypeHierarchyItem. Since 3.17.0
-func (c *Client) Subtypes(ctx context.Context, params protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) {
- var result []protocol.TypeHierarchyItem
- err := c.Call(ctx, "typeHierarchy/subtypes", params, &result)
- return result, err
-}
-
-// InlineValue sends a textDocument/inlineValue request to the LSP server.
-// A request to provide inline values in a document. The request's parameter is of type InlineValueParams, the response is of type InlineValue InlineValue[] or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) InlineValue(ctx context.Context, params protocol.InlineValueParams) ([]protocol.InlineValue, error) {
- var result []protocol.InlineValue
- err := c.Call(ctx, "textDocument/inlineValue", params, &result)
- return result, err
-}
-
-// InlayHint sends a textDocument/inlayHint request to the LSP server.
-// A request to provide inlay hints in a document. The request's parameter is of type InlayHintsParams, the response is of type InlayHint InlayHint[] or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) InlayHint(ctx context.Context, params protocol.InlayHintParams) ([]protocol.InlayHint, error) {
- var result []protocol.InlayHint
- err := c.Call(ctx, "textDocument/inlayHint", params, &result)
- return result, err
-}
-
-// Resolve sends a inlayHint/resolve request to the LSP server.
-// A request to resolve additional properties for an inlay hint. The request's parameter is of type InlayHint, the response is of type InlayHint or a Thenable that resolves to such. Since 3.17.0
-func (c *Client) Resolve(ctx context.Context, params protocol.InlayHint) (protocol.InlayHint, error) {
- var result protocol.InlayHint
- err := c.Call(ctx, "inlayHint/resolve", params, &result)
- return result, err
-}
-
-// Diagnostic sends a textDocument/diagnostic request to the LSP server.
-// The document diagnostic request definition. Since 3.17.0
-func (c *Client) Diagnostic(ctx context.Context, params protocol.DocumentDiagnosticParams) (protocol.DocumentDiagnosticReport, error) {
- var result protocol.DocumentDiagnosticReport
- err := c.Call(ctx, "textDocument/diagnostic", params, &result)
- return result, err
-}
-
-// DiagnosticWorkspace sends a workspace/diagnostic request to the LSP server.
-// The workspace diagnostic request definition. Since 3.17.0
-func (c *Client) DiagnosticWorkspace(ctx context.Context, params protocol.WorkspaceDiagnosticParams) (protocol.WorkspaceDiagnosticReport, error) {
- var result protocol.WorkspaceDiagnosticReport
- err := c.Call(ctx, "workspace/diagnostic", params, &result)
- return result, err
-}
-
-// InlineCompletion sends a textDocument/inlineCompletion request to the LSP server.
-// A request to provide inline completions in a document. The request's parameter is of type InlineCompletionParams, the response is of type InlineCompletion InlineCompletion[] or a Thenable that resolves to such. Since 3.18.0 PROPOSED
-func (c *Client) InlineCompletion(ctx context.Context, params protocol.InlineCompletionParams) (protocol.Or_Result_textDocument_inlineCompletion, error) {
- var result protocol.Or_Result_textDocument_inlineCompletion
- err := c.Call(ctx, "textDocument/inlineCompletion", params, &result)
- return result, err
-}
-
-// TextDocumentContent sends a workspace/textDocumentContent request to the LSP server.
-// The workspace/textDocumentContent request is sent from the client to the server to request the content of a text document. Since 3.18.0 PROPOSED
-func (c *Client) TextDocumentContent(ctx context.Context, params protocol.TextDocumentContentParams) (string, error) {
- var result string
- err := c.Call(ctx, "workspace/textDocumentContent", params, &result)
- return result, err
-}
-
-// Initialize sends a initialize request to the LSP server.
-// The initialize request is sent from the client to the server. It is sent once as the request after starting up the server. The requests parameter is of type InitializeParams the response if of type InitializeResult of a Thenable that resolves to such.
-func (c *Client) Initialize(ctx context.Context, params protocol.ParamInitialize) (protocol.InitializeResult, error) {
- var result protocol.InitializeResult
- err := c.Call(ctx, "initialize", params, &result)
- return result, err
-}
-
-// Shutdown sends a shutdown request to the LSP server.
-// A shutdown request is sent from the client to the server. It is sent once when the client decides to shutdown the server. The only notification that is sent after a shutdown request is the exit event.
-func (c *Client) Shutdown(ctx context.Context) error {
- return c.Call(ctx, "shutdown", nil, nil)
-}
-
-// WillSaveWaitUntil sends a textDocument/willSaveWaitUntil request to the LSP server.
-// A document will save request is sent from the client to the server before the document is actually saved. The request can return an array of TextEdits which will be applied to the text document before it is saved. Please note that clients might drop results if computing the text edits took too long or if a server constantly fails on this request. This is done to keep the save fast and reliable.
-func (c *Client) WillSaveWaitUntil(ctx context.Context, params protocol.WillSaveTextDocumentParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/willSaveWaitUntil", params, &result)
- return result, err
-}
-
-// Completion sends a textDocument/completion request to the LSP server.
-// Request to request completion at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type CompletionItem CompletionItem[] or CompletionList or a Thenable that resolves to such. The request can delay the computation of the CompletionItem.detail detail and CompletionItem.documentation documentation properties to the completionItem/resolve request. However, properties that are needed for the initial sorting and filtering, like sortText, filterText, insertText, and textEdit, must not be changed during resolve.
-func (c *Client) Completion(ctx context.Context, params protocol.CompletionParams) (protocol.Or_Result_textDocument_completion, error) {
- var result protocol.Or_Result_textDocument_completion
- err := c.Call(ctx, "textDocument/completion", params, &result)
- return result, err
-}
-
-// ResolveCompletionItem sends a completionItem/resolve request to the LSP server.
-// Request to resolve additional information for a given completion item.The request's parameter is of type CompletionItem the response is of type CompletionItem or a Thenable that resolves to such.
-func (c *Client) ResolveCompletionItem(ctx context.Context, params protocol.CompletionItem) (protocol.CompletionItem, error) {
- var result protocol.CompletionItem
- err := c.Call(ctx, "completionItem/resolve", params, &result)
- return result, err
-}
-
-// Hover sends a textDocument/hover request to the LSP server.
-// Request to request hover information at a given text document position. The request's parameter is of type TextDocumentPosition the response is of type Hover or a Thenable that resolves to such.
-func (c *Client) Hover(ctx context.Context, params protocol.HoverParams) (protocol.Hover, error) {
- var result protocol.Hover
- err := c.Call(ctx, "textDocument/hover", params, &result)
- return result, err
-}
-
-// SignatureHelp sends a textDocument/signatureHelp request to the LSP server.
-func (c *Client) SignatureHelp(ctx context.Context, params protocol.SignatureHelpParams) (protocol.SignatureHelp, error) {
- var result protocol.SignatureHelp
- err := c.Call(ctx, "textDocument/signatureHelp", params, &result)
- return result, err
-}
-
-// Definition sends a textDocument/definition request to the LSP server.
-// A request to resolve the definition location of a symbol at a given text document position. The request's parameter is of type TextDocumentPosition the response is of either type Definition or a typed array of DefinitionLink or a Thenable that resolves to such.
-func (c *Client) Definition(ctx context.Context, params protocol.DefinitionParams) (protocol.Or_Result_textDocument_definition, error) {
- var result protocol.Or_Result_textDocument_definition
- err := c.Call(ctx, "textDocument/definition", params, &result)
- return result, err
-}
-
-// References sends a textDocument/references request to the LSP server.
-// A request to resolve project-wide references for the symbol denoted by the given text document position. The request's parameter is of type ReferenceParams the response is of type Location Location[] or a Thenable that resolves to such.
-func (c *Client) References(ctx context.Context, params protocol.ReferenceParams) ([]protocol.Location, error) {
- var result []protocol.Location
- err := c.Call(ctx, "textDocument/references", params, &result)
- return result, err
-}
-
-// DocumentHighlight sends a textDocument/documentHighlight request to the LSP server.
-// Request to resolve a DocumentHighlight for a given text document position. The request's parameter is of type TextDocumentPosition the request response is an array of type DocumentHighlight or a Thenable that resolves to such.
-func (c *Client) DocumentHighlight(ctx context.Context, params protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) {
- var result []protocol.DocumentHighlight
- err := c.Call(ctx, "textDocument/documentHighlight", params, &result)
- return result, err
-}
-
-// DocumentSymbol sends a textDocument/documentSymbol request to the LSP server.
-// A request to list all symbols found in a given text document. The request's parameter is of type TextDocumentIdentifier the response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such.
-func (c *Client) DocumentSymbol(ctx context.Context, params protocol.DocumentSymbolParams) (protocol.Or_Result_textDocument_documentSymbol, error) {
- var result protocol.Or_Result_textDocument_documentSymbol
- err := c.Call(ctx, "textDocument/documentSymbol", params, &result)
- return result, err
-}
-
-// CodeAction sends a textDocument/codeAction request to the LSP server.
-// A request to provide commands for the given text document and range.
-func (c *Client) CodeAction(ctx context.Context, params protocol.CodeActionParams) ([]protocol.Or_Result_textDocument_codeAction_Item0_Elem, error) {
- var result []protocol.Or_Result_textDocument_codeAction_Item0_Elem
- err := c.Call(ctx, "textDocument/codeAction", params, &result)
- return result, err
-}
-
-// ResolveCodeAction sends a codeAction/resolve request to the LSP server.
-// Request to resolve additional information for a given code action.The request's parameter is of type CodeAction the response is of type CodeAction or a Thenable that resolves to such.
-func (c *Client) ResolveCodeAction(ctx context.Context, params protocol.CodeAction) (protocol.CodeAction, error) {
- var result protocol.CodeAction
- err := c.Call(ctx, "codeAction/resolve", params, &result)
- return result, err
-}
-
-// Symbol sends a workspace/symbol request to the LSP server.
-// A request to list project-wide symbols matching the query string given by the WorkspaceSymbolParams. The response is of type SymbolInformation SymbolInformation[] or a Thenable that resolves to such. Since 3.17.0 - support for WorkspaceSymbol in the returned data. Clients need to advertise support for WorkspaceSymbols via the client capability workspace.symbol.resolveSupport.
-func (c *Client) Symbol(ctx context.Context, params protocol.WorkspaceSymbolParams) (protocol.Or_Result_workspace_symbol, error) {
- var result protocol.Or_Result_workspace_symbol
- err := c.Call(ctx, "workspace/symbol", params, &result)
- return result, err
-}
-
-// ResolveWorkspaceSymbol sends a workspaceSymbol/resolve request to the LSP server.
-// A request to resolve the range inside the workspace symbol's location. Since 3.17.0
-func (c *Client) ResolveWorkspaceSymbol(ctx context.Context, params protocol.WorkspaceSymbol) (protocol.WorkspaceSymbol, error) {
- var result protocol.WorkspaceSymbol
- err := c.Call(ctx, "workspaceSymbol/resolve", params, &result)
- return result, err
-}
-
-// CodeLens sends a textDocument/codeLens request to the LSP server.
-// A request to provide code lens for the given text document.
-func (c *Client) CodeLens(ctx context.Context, params protocol.CodeLensParams) ([]protocol.CodeLens, error) {
- var result []protocol.CodeLens
- err := c.Call(ctx, "textDocument/codeLens", params, &result)
- return result, err
-}
-
-// ResolveCodeLens sends a codeLens/resolve request to the LSP server.
-// A request to resolve a command for a given code lens.
-func (c *Client) ResolveCodeLens(ctx context.Context, params protocol.CodeLens) (protocol.CodeLens, error) {
- var result protocol.CodeLens
- err := c.Call(ctx, "codeLens/resolve", params, &result)
- return result, err
-}
-
-// DocumentLink sends a textDocument/documentLink request to the LSP server.
-// A request to provide document links
-func (c *Client) DocumentLink(ctx context.Context, params protocol.DocumentLinkParams) ([]protocol.DocumentLink, error) {
- var result []protocol.DocumentLink
- err := c.Call(ctx, "textDocument/documentLink", params, &result)
- return result, err
-}
-
-// ResolveDocumentLink sends a documentLink/resolve request to the LSP server.
-// Request to resolve additional information for a given document link. The request's parameter is of type DocumentLink the response is of type DocumentLink or a Thenable that resolves to such.
-func (c *Client) ResolveDocumentLink(ctx context.Context, params protocol.DocumentLink) (protocol.DocumentLink, error) {
- var result protocol.DocumentLink
- err := c.Call(ctx, "documentLink/resolve", params, &result)
- return result, err
-}
-
-// Formatting sends a textDocument/formatting request to the LSP server.
-// A request to format a whole document.
-func (c *Client) Formatting(ctx context.Context, params protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/formatting", params, &result)
- return result, err
-}
-
-// RangeFormatting sends a textDocument/rangeFormatting request to the LSP server.
-// A request to format a range in a document.
-func (c *Client) RangeFormatting(ctx context.Context, params protocol.DocumentRangeFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/rangeFormatting", params, &result)
- return result, err
-}
-
-// RangesFormatting sends a textDocument/rangesFormatting request to the LSP server.
-// A request to format ranges in a document. Since 3.18.0 PROPOSED
-func (c *Client) RangesFormatting(ctx context.Context, params protocol.DocumentRangesFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/rangesFormatting", params, &result)
- return result, err
-}
-
-// OnTypeFormatting sends a textDocument/onTypeFormatting request to the LSP server.
-// A request to format a document on type.
-func (c *Client) OnTypeFormatting(ctx context.Context, params protocol.DocumentOnTypeFormattingParams) ([]protocol.TextEdit, error) {
- var result []protocol.TextEdit
- err := c.Call(ctx, "textDocument/onTypeFormatting", params, &result)
- return result, err
-}
-
-// Rename sends a textDocument/rename request to the LSP server.
-// A request to rename a symbol.
-func (c *Client) Rename(ctx context.Context, params protocol.RenameParams) (protocol.WorkspaceEdit, error) {
- var result protocol.WorkspaceEdit
- err := c.Call(ctx, "textDocument/rename", params, &result)
- return result, err
-}
-
-// PrepareRename sends a textDocument/prepareRename request to the LSP server.
-// A request to test and perform the setup necessary for a rename. Since 3.16 - support for default behavior
-func (c *Client) PrepareRename(ctx context.Context, params protocol.PrepareRenameParams) (protocol.PrepareRenameResult, error) {
- var result protocol.PrepareRenameResult
- err := c.Call(ctx, "textDocument/prepareRename", params, &result)
- return result, err
-}
-
-// ExecuteCommand sends a workspace/executeCommand request to the LSP server.
-// A request send from the client to the server to execute a command. The request might return a workspace edit which the client will apply to the workspace.
-func (c *Client) ExecuteCommand(ctx context.Context, params protocol.ExecuteCommandParams) (any, error) {
- var result any
- err := c.Call(ctx, "workspace/executeCommand", params, &result)
- return result, err
-}
-
-// DidChangeWorkspaceFolders sends a workspace/didChangeWorkspaceFolders notification to the LSP server.
-// The workspace/didChangeWorkspaceFolders notification is sent from the client to the server when the workspace folder configuration changes.
-func (c *Client) DidChangeWorkspaceFolders(ctx context.Context, params protocol.DidChangeWorkspaceFoldersParams) error {
- return c.Notify(ctx, "workspace/didChangeWorkspaceFolders", params)
-}
-
-// WorkDoneProgressCancel sends a window/workDoneProgress/cancel notification to the LSP server.
-// The window/workDoneProgress/cancel notification is sent from the client to the server to cancel a progress initiated on the server side.
-func (c *Client) WorkDoneProgressCancel(ctx context.Context, params protocol.WorkDoneProgressCancelParams) error {
- return c.Notify(ctx, "window/workDoneProgress/cancel", params)
-}
-
-// DidCreateFiles sends a workspace/didCreateFiles notification to the LSP server.
-// The did create files notification is sent from the client to the server when files were created from within the client. Since 3.16.0
-func (c *Client) DidCreateFiles(ctx context.Context, params protocol.CreateFilesParams) error {
- return c.Notify(ctx, "workspace/didCreateFiles", params)
-}
-
-// DidRenameFiles sends a workspace/didRenameFiles notification to the LSP server.
-// The did rename files notification is sent from the client to the server when files were renamed from within the client. Since 3.16.0
-func (c *Client) DidRenameFiles(ctx context.Context, params protocol.RenameFilesParams) error {
- return c.Notify(ctx, "workspace/didRenameFiles", params)
-}
-
-// DidDeleteFiles sends a workspace/didDeleteFiles notification to the LSP server.
-// The will delete files request is sent from the client to the server before files are actually deleted as long as the deletion is triggered from within the client. Since 3.16.0
-func (c *Client) DidDeleteFiles(ctx context.Context, params protocol.DeleteFilesParams) error {
- return c.Notify(ctx, "workspace/didDeleteFiles", params)
-}
-
-// DidOpenNotebookDocument sends a notebookDocument/didOpen notification to the LSP server.
-// A notification sent when a notebook opens. Since 3.17.0
-func (c *Client) DidOpenNotebookDocument(ctx context.Context, params protocol.DidOpenNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didOpen", params)
-}
-
-// DidChangeNotebookDocument sends a notebookDocument/didChange notification to the LSP server.
-func (c *Client) DidChangeNotebookDocument(ctx context.Context, params protocol.DidChangeNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didChange", params)
-}
-
-// DidSaveNotebookDocument sends a notebookDocument/didSave notification to the LSP server.
-// A notification sent when a notebook document is saved. Since 3.17.0
-func (c *Client) DidSaveNotebookDocument(ctx context.Context, params protocol.DidSaveNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didSave", params)
-}
-
-// DidCloseNotebookDocument sends a notebookDocument/didClose notification to the LSP server.
-// A notification sent when a notebook closes. Since 3.17.0
-func (c *Client) DidCloseNotebookDocument(ctx context.Context, params protocol.DidCloseNotebookDocumentParams) error {
- return c.Notify(ctx, "notebookDocument/didClose", params)
-}
-
-// Initialized sends a initialized notification to the LSP server.
-// The initialized notification is sent from the client to the server after the client is fully initialized and the server is allowed to send requests from the server to the client.
-func (c *Client) Initialized(ctx context.Context, params protocol.InitializedParams) error {
- return c.Notify(ctx, "initialized", params)
-}
-
-// Exit sends a exit notification to the LSP server.
-// The exit event is sent from the client to the server to ask the server to exit its process.
-func (c *Client) Exit(ctx context.Context) error {
- return c.Notify(ctx, "exit", nil)
-}
-
-// DidChangeConfiguration sends a workspace/didChangeConfiguration notification to the LSP server.
-// The configuration change notification is sent from the client to the server when the client's configuration has changed. The notification contains the changed configuration as defined by the language client.
-func (c *Client) DidChangeConfiguration(ctx context.Context, params protocol.DidChangeConfigurationParams) error {
- return c.Notify(ctx, "workspace/didChangeConfiguration", params)
-}
-
-// DidOpen sends a textDocument/didOpen notification to the LSP server.
-// The document open notification is sent from the client to the server to signal newly opened text documents. The document's truth is now managed by the client and the server must not try to read the document's truth using the document's uri. Open in this sense means it is managed by the client. It doesn't necessarily mean that its content is presented in an editor. An open notification must not be sent more than once without a corresponding close notification send before. This means open and close notification must be balanced and the max open count is one.
-func (c *Client) DidOpen(ctx context.Context, params protocol.DidOpenTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didOpen", params)
-}
-
-// DidChange sends a textDocument/didChange notification to the LSP server.
-// The document change notification is sent from the client to the server to signal changes to a text document.
-func (c *Client) DidChange(ctx context.Context, params protocol.DidChangeTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didChange", params)
-}
-
-// DidClose sends a textDocument/didClose notification to the LSP server.
-// The document close notification is sent from the client to the server when the document got closed in the client. The document's truth now exists where the document's uri points to (e.g. if the document's uri is a file uri the truth now exists on disk). As with the open notification the close notification is about managing the document's content. Receiving a close notification doesn't mean that the document was open in an editor before. A close notification requires a previous open notification to be sent.
-func (c *Client) DidClose(ctx context.Context, params protocol.DidCloseTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didClose", params)
-}
-
-// DidSave sends a textDocument/didSave notification to the LSP server.
-// The document save notification is sent from the client to the server when the document got saved in the client.
-func (c *Client) DidSave(ctx context.Context, params protocol.DidSaveTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/didSave", params)
-}
-
-// WillSave sends a textDocument/willSave notification to the LSP server.
-// A document will save notification is sent from the client to the server before the document is actually saved.
-func (c *Client) WillSave(ctx context.Context, params protocol.WillSaveTextDocumentParams) error {
- return c.Notify(ctx, "textDocument/willSave", params)
-}
-
-// DidChangeWatchedFiles sends a workspace/didChangeWatchedFiles notification to the LSP server.
-// The watched files notification is sent from the client to the server when the client detects changes to file watched by the language client.
-func (c *Client) DidChangeWatchedFiles(ctx context.Context, params protocol.DidChangeWatchedFilesParams) error {
- return c.Notify(ctx, "workspace/didChangeWatchedFiles", params)
-}
-
-// SetTrace sends a $/setTrace notification to the LSP server.
-func (c *Client) SetTrace(ctx context.Context, params protocol.SetTraceParams) error {
- return c.Notify(ctx, "$/setTrace", params)
-}
-
-// Progress sends a $/progress notification to the LSP server.
-func (c *Client) Progress(ctx context.Context, params protocol.ProgressParams) error {
- return c.Notify(ctx, "$/progress", params)
-}
diff --git a/internal/lsp/protocol.go b/internal/lsp/protocol.go
deleted file mode 100644
index e70e2824b5fbdfdb2055b9bb827cce8c4d3ed850..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package lsp
-
-import (
- "encoding/json"
-)
-
-// Message represents a JSON-RPC 2.0 message
-type Message struct {
- JSONRPC string `json:"jsonrpc"`
- ID int32 `json:"id,omitempty"`
- Method string `json:"method,omitempty"`
- Params json.RawMessage `json:"params,omitempty"`
- Result json.RawMessage `json:"result,omitempty"`
- Error *ResponseError `json:"error,omitempty"`
-}
-
-// ResponseError represents a JSON-RPC 2.0 error
-type ResponseError struct {
- Code int `json:"code"`
- Message string `json:"message"`
-}
-
-func NewRequest(id int32, method string, params any) (*Message, error) {
- paramsJSON, err := json.Marshal(params)
- if err != nil {
- return nil, err
- }
-
- return &Message{
- JSONRPC: "2.0",
- ID: id,
- Method: method,
- Params: paramsJSON,
- }, nil
-}
-
-func NewNotification(method string, params any) (*Message, error) {
- paramsJSON, err := json.Marshal(params)
- if err != nil {
- return nil, err
- }
-
- return &Message{
- JSONRPC: "2.0",
- Method: method,
- Params: paramsJSON,
- }, nil
-}
diff --git a/internal/lsp/protocol/LICENSE b/internal/lsp/protocol/LICENSE
deleted file mode 100644
index 2a7cf70da6e498df9c11ab6a5eaa2ddd7af34da4..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright 2009 The Go Authors.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google LLC nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/internal/lsp/protocol/interface.go b/internal/lsp/protocol/interface.go
deleted file mode 100644
index 89311b31c8398db1d2da63fc15961294ff79badd..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/interface.go
+++ /dev/null
@@ -1,117 +0,0 @@
-package protocol
-
-import "fmt"
-
-// WorkspaceSymbolResult is an interface for types that represent workspace symbols
-type WorkspaceSymbolResult interface {
- GetName() string
- GetLocation() Location
- isWorkspaceSymbol() // marker method
-}
-
-func (ws *WorkspaceSymbol) GetName() string { return ws.Name }
-func (ws *WorkspaceSymbol) GetLocation() Location {
- switch v := ws.Location.Value.(type) {
- case Location:
- return v
- case LocationUriOnly:
- return Location{URI: v.URI}
- }
- return Location{}
-}
-func (ws *WorkspaceSymbol) isWorkspaceSymbol() {}
-
-func (si *SymbolInformation) GetName() string { return si.Name }
-func (si *SymbolInformation) GetLocation() Location { return si.Location }
-func (si *SymbolInformation) isWorkspaceSymbol() {}
-
-// Results converts the Value to a slice of WorkspaceSymbolResult
-func (r Or_Result_workspace_symbol) Results() ([]WorkspaceSymbolResult, error) {
- if r.Value == nil {
- return make([]WorkspaceSymbolResult, 0), nil
- }
- switch v := r.Value.(type) {
- case []WorkspaceSymbol:
- results := make([]WorkspaceSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- case []SymbolInformation:
- results := make([]WorkspaceSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- default:
- return nil, fmt.Errorf("unknown symbol type: %T", r.Value)
- }
-}
-
-// DocumentSymbolResult is an interface for types that represent document symbols
-type DocumentSymbolResult interface {
- GetRange() Range
- GetName() string
- isDocumentSymbol() // marker method
-}
-
-func (ds *DocumentSymbol) GetRange() Range { return ds.Range }
-func (ds *DocumentSymbol) GetName() string { return ds.Name }
-func (ds *DocumentSymbol) isDocumentSymbol() {}
-
-func (si *SymbolInformation) GetRange() Range { return si.Location.Range }
-
-// Note: SymbolInformation already has GetName() implemented above
-func (si *SymbolInformation) isDocumentSymbol() {}
-
-// Results converts the Value to a slice of DocumentSymbolResult
-func (r Or_Result_textDocument_documentSymbol) Results() ([]DocumentSymbolResult, error) {
- if r.Value == nil {
- return make([]DocumentSymbolResult, 0), nil
- }
- switch v := r.Value.(type) {
- case []DocumentSymbol:
- results := make([]DocumentSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- case []SymbolInformation:
- results := make([]DocumentSymbolResult, len(v))
- for i := range v {
- results[i] = &v[i]
- }
- return results, nil
- default:
- return nil, fmt.Errorf("unknown document symbol type: %T", v)
- }
-}
-
-// TextEditResult is an interface for types that can be used as text edits
-type TextEditResult interface {
- GetRange() Range
- GetNewText() string
- isTextEdit() // marker method
-}
-
-func (te *TextEdit) GetRange() Range { return te.Range }
-func (te *TextEdit) GetNewText() string { return te.NewText }
-func (te *TextEdit) isTextEdit() {}
-
-// AsTextEdit converts Or_TextDocumentEdit_edits_Elem to TextEdit
-func (e Or_TextDocumentEdit_edits_Elem) AsTextEdit() (TextEdit, error) {
- if e.Value == nil {
- return TextEdit{}, fmt.Errorf("nil text edit")
- }
- switch v := e.Value.(type) {
- case TextEdit:
- return v, nil
- case AnnotatedTextEdit:
- return TextEdit{
- Range: v.Range,
- NewText: v.NewText,
- }, nil
- default:
- return TextEdit{}, fmt.Errorf("unknown text edit type: %T", e.Value)
- }
-}
diff --git a/internal/lsp/protocol/pattern_interfaces.go b/internal/lsp/protocol/pattern_interfaces.go
deleted file mode 100644
index 5cb5dbb84ea385d96ac33fa2075d6590872da3cd..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/pattern_interfaces.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package protocol
-
-import (
- "fmt"
- "log/slog"
-)
-
-// PatternInfo is an interface for types that represent glob patterns
-type PatternInfo interface {
- GetPattern() string
- GetBasePath() string
- isPattern() // marker method
-}
-
-// StringPattern implements PatternInfo for string patterns
-type StringPattern struct {
- Pattern string
-}
-
-func (p StringPattern) GetPattern() string { return p.Pattern }
-func (p StringPattern) GetBasePath() string { return "" }
-func (p StringPattern) isPattern() {}
-
-// RelativePatternInfo implements PatternInfo for RelativePattern
-type RelativePatternInfo struct {
- RP RelativePattern
- BasePath string
-}
-
-func (p RelativePatternInfo) GetPattern() string { return string(p.RP.Pattern) }
-func (p RelativePatternInfo) GetBasePath() string { return p.BasePath }
-func (p RelativePatternInfo) isPattern() {}
-
-// AsPattern converts GlobPattern to a PatternInfo object
-func (g *GlobPattern) AsPattern() (PatternInfo, error) {
- if g.Value == nil {
- return nil, fmt.Errorf("nil pattern")
- }
-
- var err error
-
- switch v := g.Value.(type) {
- case string:
- return StringPattern{Pattern: v}, nil
-
- case RelativePattern:
- // Handle BaseURI which could be string or DocumentUri
- basePath := ""
- switch baseURI := v.BaseURI.Value.(type) {
- case string:
- basePath, err = DocumentURI(baseURI).Path()
- if err != nil {
- slog.Error("Failed to convert URI to path", "uri", baseURI, "error", err)
- return nil, fmt.Errorf("invalid URI: %s", baseURI)
- }
-
- case DocumentURI:
- basePath, err = baseURI.Path()
- if err != nil {
- slog.Error("Failed to convert DocumentURI to path", "uri", baseURI, "error", err)
- return nil, fmt.Errorf("invalid DocumentURI: %s", baseURI)
- }
-
- default:
- return nil, fmt.Errorf("unknown BaseURI type: %T", v.BaseURI.Value)
- }
-
- return RelativePatternInfo{RP: v, BasePath: basePath}, nil
-
- default:
- return nil, fmt.Errorf("unknown pattern type: %T", g.Value)
- }
-}
diff --git a/internal/lsp/protocol/tables.go b/internal/lsp/protocol/tables.go
deleted file mode 100644
index 6a8fb99e0a27ce77906ead6f04b71539b741f181..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tables.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package protocol
-
-var TableKindMap = map[SymbolKind]string{
- File: "File",
- Module: "Module",
- Namespace: "Namespace",
- Package: "Package",
- Class: "Class",
- Method: "Method",
- Property: "Property",
- Field: "Field",
- Constructor: "Constructor",
- Enum: "Enum",
- Interface: "Interface",
- Function: "Function",
- Variable: "Variable",
- Constant: "Constant",
- String: "String",
- Number: "Number",
- Boolean: "Boolean",
- Array: "Array",
- Object: "Object",
- Key: "Key",
- Null: "Null",
- EnumMember: "EnumMember",
- Struct: "Struct",
- Event: "Event",
- Operator: "Operator",
- TypeParameter: "TypeParameter",
-}
diff --git a/internal/lsp/protocol/tsdocument-changes.go b/internal/lsp/protocol/tsdocument-changes.go
deleted file mode 100644
index f18825719efad72e04502094931280e78ccbad59..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tsdocument-changes.go
+++ /dev/null
@@ -1,81 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package protocol
-
-import (
- "encoding/json"
- "fmt"
-)
-
-// DocumentChange is a union of various file edit operations.
-//
-// Exactly one field of this struct is non-nil; see [DocumentChange.Valid].
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#resourceChanges
-type DocumentChange struct {
- TextDocumentEdit *TextDocumentEdit
- CreateFile *CreateFile
- RenameFile *RenameFile
- DeleteFile *DeleteFile
-}
-
-// Valid reports whether the DocumentChange sum-type value is valid,
-// that is, exactly one of create, delete, edit, or rename.
-func (d DocumentChange) Valid() bool {
- n := 0
- if d.TextDocumentEdit != nil {
- n++
- }
- if d.CreateFile != nil {
- n++
- }
- if d.RenameFile != nil {
- n++
- }
- if d.DeleteFile != nil {
- n++
- }
- return n == 1
-}
-
-func (d *DocumentChange) UnmarshalJSON(data []byte) error {
- var m map[string]any
- if err := json.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if _, ok := m["textDocument"]; ok {
- d.TextDocumentEdit = new(TextDocumentEdit)
- return json.Unmarshal(data, d.TextDocumentEdit)
- }
-
- // The {Create,Rename,Delete}File types all share a 'kind' field.
- kind := m["kind"]
- switch kind {
- case "create":
- d.CreateFile = new(CreateFile)
- return json.Unmarshal(data, d.CreateFile)
- case "rename":
- d.RenameFile = new(RenameFile)
- return json.Unmarshal(data, d.RenameFile)
- case "delete":
- d.DeleteFile = new(DeleteFile)
- return json.Unmarshal(data, d.DeleteFile)
- }
- return fmt.Errorf("DocumentChanges: unexpected kind: %q", kind)
-}
-
-func (d *DocumentChange) MarshalJSON() ([]byte, error) {
- if d.TextDocumentEdit != nil {
- return json.Marshal(d.TextDocumentEdit)
- } else if d.CreateFile != nil {
- return json.Marshal(d.CreateFile)
- } else if d.RenameFile != nil {
- return json.Marshal(d.RenameFile)
- } else if d.DeleteFile != nil {
- return json.Marshal(d.DeleteFile)
- }
- return nil, fmt.Errorf("empty DocumentChanges union value")
-}
diff --git a/internal/lsp/protocol/tsjson.go b/internal/lsp/protocol/tsjson.go
deleted file mode 100644
index 3cf7275245a5dc532c52e03024652fceda6e713a..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tsjson.go
+++ /dev/null
@@ -1,3073 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Code generated for LSP. DO NOT EDIT.
-
-package protocol
-
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.9 (hash c94395b5da53729e6dff931293b051009ccaaaa4).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.9/protocol/metaModel.json
-// LSP metaData.version = 3.17.0.
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
-)
-
-// UnmarshalError indicates that a JSON value did not conform to
-// one of the expected cases of an LSP union type.
-type UnmarshalError struct {
- msg string
-}
-
-func (e UnmarshalError) Error() string {
- return e.msg
-}
-func (t Or_CancelParams_id) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_CancelParams_id) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder41 := json.NewDecoder(bytes.NewReader(x))
- decoder41.DisallowUnknownFields()
- var int32Val int32
- if err := decoder41.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder42 := json.NewDecoder(bytes.NewReader(x))
- decoder42.DisallowUnknownFields()
- var stringVal string
- if err := decoder42.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_ClientSemanticTokensRequestOptions_full) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ClientSemanticTokensRequestFullDelta:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ClientSemanticTokensRequestFullDelta bool]", t)
-}
-
-func (t *Or_ClientSemanticTokensRequestOptions_full) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder220 := json.NewDecoder(bytes.NewReader(x))
- decoder220.DisallowUnknownFields()
- var boolVal bool
- if err := decoder220.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder221 := json.NewDecoder(bytes.NewReader(x))
- decoder221.DisallowUnknownFields()
- var h221 ClientSemanticTokensRequestFullDelta
- if err := decoder221.Decode(&h221); err == nil {
- t.Value = h221
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ClientSemanticTokensRequestFullDelta bool]"}
-}
-
-func (t Or_ClientSemanticTokensRequestOptions_range) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Lit_ClientSemanticTokensRequestOptions_range_Item1:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]", t)
-}
-
-func (t *Or_ClientSemanticTokensRequestOptions_range) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder217 := json.NewDecoder(bytes.NewReader(x))
- decoder217.DisallowUnknownFields()
- var boolVal bool
- if err := decoder217.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder218 := json.NewDecoder(bytes.NewReader(x))
- decoder218.DisallowUnknownFields()
- var h218 Lit_ClientSemanticTokensRequestOptions_range_Item1
- if err := decoder218.Decode(&h218); err == nil {
- t.Value = h218
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]"}
-}
-
-func (t Or_CompletionItemDefaults_editRange) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case EditRangeWithInsertReplace:
- return json.Marshal(x)
- case Range:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [EditRangeWithInsertReplace Range]", t)
-}
-
-func (t *Or_CompletionItemDefaults_editRange) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder183 := json.NewDecoder(bytes.NewReader(x))
- decoder183.DisallowUnknownFields()
- var h183 EditRangeWithInsertReplace
- if err := decoder183.Decode(&h183); err == nil {
- t.Value = h183
- return nil
- }
- decoder184 := json.NewDecoder(bytes.NewReader(x))
- decoder184.DisallowUnknownFields()
- var h184 Range
- if err := decoder184.Decode(&h184); err == nil {
- t.Value = h184
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [EditRangeWithInsertReplace Range]"}
-}
-
-func (t Or_CompletionItem_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_CompletionItem_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder25 := json.NewDecoder(bytes.NewReader(x))
- decoder25.DisallowUnknownFields()
- var stringVal string
- if err := decoder25.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder26 := json.NewDecoder(bytes.NewReader(x))
- decoder26.DisallowUnknownFields()
- var h26 MarkupContent
- if err := decoder26.Decode(&h26); err == nil {
- t.Value = h26
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_CompletionItem_textEdit) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InsertReplaceEdit:
- return json.Marshal(x)
- case TextEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InsertReplaceEdit TextEdit]", t)
-}
-
-func (t *Or_CompletionItem_textEdit) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder29 := json.NewDecoder(bytes.NewReader(x))
- decoder29.DisallowUnknownFields()
- var h29 InsertReplaceEdit
- if err := decoder29.Decode(&h29); err == nil {
- t.Value = h29
- return nil
- }
- decoder30 := json.NewDecoder(bytes.NewReader(x))
- decoder30.DisallowUnknownFields()
- var h30 TextEdit
- if err := decoder30.Decode(&h30); err == nil {
- t.Value = h30
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InsertReplaceEdit TextEdit]"}
-}
-
-func (t Or_Declaration) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case []Location:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location []Location]", t)
-}
-
-func (t *Or_Declaration) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder237 := json.NewDecoder(bytes.NewReader(x))
- decoder237.DisallowUnknownFields()
- var h237 Location
- if err := decoder237.Decode(&h237); err == nil {
- t.Value = h237
- return nil
- }
- decoder238 := json.NewDecoder(bytes.NewReader(x))
- decoder238.DisallowUnknownFields()
- var h238 []Location
- if err := decoder238.Decode(&h238); err == nil {
- t.Value = h238
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"}
-}
-
-func (t Or_Definition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case []Location:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location []Location]", t)
-}
-
-func (t *Or_Definition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder224 := json.NewDecoder(bytes.NewReader(x))
- decoder224.DisallowUnknownFields()
- var h224 Location
- if err := decoder224.Decode(&h224); err == nil {
- t.Value = h224
- return nil
- }
- decoder225 := json.NewDecoder(bytes.NewReader(x))
- decoder225.DisallowUnknownFields()
- var h225 []Location
- if err := decoder225.Decode(&h225); err == nil {
- t.Value = h225
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location []Location]"}
-}
-
-func (t Or_Diagnostic_code) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_Diagnostic_code) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder179 := json.NewDecoder(bytes.NewReader(x))
- decoder179.DisallowUnknownFields()
- var int32Val int32
- if err := decoder179.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder180 := json.NewDecoder(bytes.NewReader(x))
- decoder180.DisallowUnknownFields()
- var stringVal string
- if err := decoder180.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_DidChangeConfigurationRegistrationOptions_section) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []string:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]string string]", t)
-}
-
-func (t *Or_DidChangeConfigurationRegistrationOptions_section) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder22 := json.NewDecoder(bytes.NewReader(x))
- decoder22.DisallowUnknownFields()
- var stringVal string
- if err := decoder22.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder23 := json.NewDecoder(bytes.NewReader(x))
- decoder23.DisallowUnknownFields()
- var h23 []string
- if err := decoder23.Decode(&h23); err == nil {
- t.Value = h23
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]string string]"}
-}
-
-func (t Or_DocumentDiagnosticReport) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case RelatedFullDocumentDiagnosticReport:
- return json.Marshal(x)
- case RelatedUnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_DocumentDiagnosticReport) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder247 := json.NewDecoder(bytes.NewReader(x))
- decoder247.DisallowUnknownFields()
- var h247 RelatedFullDocumentDiagnosticReport
- if err := decoder247.Decode(&h247); err == nil {
- t.Value = h247
- return nil
- }
- decoder248 := json.NewDecoder(bytes.NewReader(x))
- decoder248.DisallowUnknownFields()
- var h248 RelatedUnchangedDocumentDiagnosticReport
- if err := decoder248.Decode(&h248); err == nil {
- t.Value = h248
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder16 := json.NewDecoder(bytes.NewReader(x))
- decoder16.DisallowUnknownFields()
- var h16 FullDocumentDiagnosticReport
- if err := decoder16.Decode(&h16); err == nil {
- t.Value = h16
- return nil
- }
- decoder17 := json.NewDecoder(bytes.NewReader(x))
- decoder17.DisallowUnknownFields()
- var h17 UnchangedDocumentDiagnosticReport
- if err := decoder17.Decode(&h17); err == nil {
- t.Value = h17
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_DocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookCellTextDocumentFilter:
- return json.Marshal(x)
- case TextDocumentFilter:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookCellTextDocumentFilter TextDocumentFilter]", t)
-}
-
-func (t *Or_DocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder270 := json.NewDecoder(bytes.NewReader(x))
- decoder270.DisallowUnknownFields()
- var h270 NotebookCellTextDocumentFilter
- if err := decoder270.Decode(&h270); err == nil {
- t.Value = h270
- return nil
- }
- decoder271 := json.NewDecoder(bytes.NewReader(x))
- decoder271.DisallowUnknownFields()
- var h271 TextDocumentFilter
- if err := decoder271.Decode(&h271); err == nil {
- t.Value = h271
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookCellTextDocumentFilter TextDocumentFilter]"}
-}
-
-func (t Or_GlobPattern) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Pattern:
- return json.Marshal(x)
- case RelativePattern:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Pattern RelativePattern]", t)
-}
-
-func (t *Or_GlobPattern) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder274 := json.NewDecoder(bytes.NewReader(x))
- decoder274.DisallowUnknownFields()
- var h274 Pattern
- if err := decoder274.Decode(&h274); err == nil {
- t.Value = h274
- return nil
- }
- decoder275 := json.NewDecoder(bytes.NewReader(x))
- decoder275.DisallowUnknownFields()
- var h275 RelativePattern
- if err := decoder275.Decode(&h275); err == nil {
- t.Value = h275
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Pattern RelativePattern]"}
-}
-
-func (t Or_Hover_contents) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkedString:
- return json.Marshal(x)
- case MarkupContent:
- return json.Marshal(x)
- case []MarkedString:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkedString MarkupContent []MarkedString]", t)
-}
-
-func (t *Or_Hover_contents) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder34 := json.NewDecoder(bytes.NewReader(x))
- decoder34.DisallowUnknownFields()
- var h34 MarkedString
- if err := decoder34.Decode(&h34); err == nil {
- t.Value = h34
- return nil
- }
- decoder35 := json.NewDecoder(bytes.NewReader(x))
- decoder35.DisallowUnknownFields()
- var h35 MarkupContent
- if err := decoder35.Decode(&h35); err == nil {
- t.Value = h35
- return nil
- }
- decoder36 := json.NewDecoder(bytes.NewReader(x))
- decoder36.DisallowUnknownFields()
- var h36 []MarkedString
- if err := decoder36.Decode(&h36); err == nil {
- t.Value = h36
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkedString MarkupContent []MarkedString]"}
-}
-
-func (t Or_InlayHintLabelPart_tooltip) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_InlayHintLabelPart_tooltip) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder56 := json.NewDecoder(bytes.NewReader(x))
- decoder56.DisallowUnknownFields()
- var stringVal string
- if err := decoder56.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder57 := json.NewDecoder(bytes.NewReader(x))
- decoder57.DisallowUnknownFields()
- var h57 MarkupContent
- if err := decoder57.Decode(&h57); err == nil {
- t.Value = h57
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_InlayHint_label) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []InlayHintLabelPart:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]InlayHintLabelPart string]", t)
-}
-
-func (t *Or_InlayHint_label) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder9 := json.NewDecoder(bytes.NewReader(x))
- decoder9.DisallowUnknownFields()
- var stringVal string
- if err := decoder9.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder10 := json.NewDecoder(bytes.NewReader(x))
- decoder10.DisallowUnknownFields()
- var h10 []InlayHintLabelPart
- if err := decoder10.Decode(&h10); err == nil {
- t.Value = h10
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]InlayHintLabelPart string]"}
-}
-
-func (t Or_InlayHint_tooltip) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_InlayHint_tooltip) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder12 := json.NewDecoder(bytes.NewReader(x))
- decoder12.DisallowUnknownFields()
- var stringVal string
- if err := decoder12.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder13 := json.NewDecoder(bytes.NewReader(x))
- decoder13.DisallowUnknownFields()
- var h13 MarkupContent
- if err := decoder13.Decode(&h13); err == nil {
- t.Value = h13
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_InlineCompletionItem_insertText) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case StringValue:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [StringValue string]", t)
-}
-
-func (t *Or_InlineCompletionItem_insertText) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder19 := json.NewDecoder(bytes.NewReader(x))
- decoder19.DisallowUnknownFields()
- var stringVal string
- if err := decoder19.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder20 := json.NewDecoder(bytes.NewReader(x))
- decoder20.DisallowUnknownFields()
- var h20 StringValue
- if err := decoder20.Decode(&h20); err == nil {
- t.Value = h20
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [StringValue string]"}
-}
-
-func (t Or_InlineValue) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineValueEvaluatableExpression:
- return json.Marshal(x)
- case InlineValueText:
- return json.Marshal(x)
- case InlineValueVariableLookup:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]", t)
-}
-
-func (t *Or_InlineValue) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder242 := json.NewDecoder(bytes.NewReader(x))
- decoder242.DisallowUnknownFields()
- var h242 InlineValueEvaluatableExpression
- if err := decoder242.Decode(&h242); err == nil {
- t.Value = h242
- return nil
- }
- decoder243 := json.NewDecoder(bytes.NewReader(x))
- decoder243.DisallowUnknownFields()
- var h243 InlineValueText
- if err := decoder243.Decode(&h243); err == nil {
- t.Value = h243
- return nil
- }
- decoder244 := json.NewDecoder(bytes.NewReader(x))
- decoder244.DisallowUnknownFields()
- var h244 InlineValueVariableLookup
- if err := decoder244.Decode(&h244); err == nil {
- t.Value = h244
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]"}
-}
-
-func (t Or_LSPAny) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case LSPArray:
- return json.Marshal(x)
- case LSPObject:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case float64:
- return json.Marshal(x)
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case uint32:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [LSPArray LSPObject bool float64 int32 string uint32]", t)
-}
-
-func (t *Or_LSPAny) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder228 := json.NewDecoder(bytes.NewReader(x))
- decoder228.DisallowUnknownFields()
- var boolVal bool
- if err := decoder228.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder229 := json.NewDecoder(bytes.NewReader(x))
- decoder229.DisallowUnknownFields()
- var float64Val float64
- if err := decoder229.Decode(&float64Val); err == nil {
- t.Value = float64Val
- return nil
- }
- decoder230 := json.NewDecoder(bytes.NewReader(x))
- decoder230.DisallowUnknownFields()
- var int32Val int32
- if err := decoder230.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder231 := json.NewDecoder(bytes.NewReader(x))
- decoder231.DisallowUnknownFields()
- var stringVal string
- if err := decoder231.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder232 := json.NewDecoder(bytes.NewReader(x))
- decoder232.DisallowUnknownFields()
- var uint32Val uint32
- if err := decoder232.Decode(&uint32Val); err == nil {
- t.Value = uint32Val
- return nil
- }
- decoder233 := json.NewDecoder(bytes.NewReader(x))
- decoder233.DisallowUnknownFields()
- var h233 LSPArray
- if err := decoder233.Decode(&h233); err == nil {
- t.Value = h233
- return nil
- }
- decoder234 := json.NewDecoder(bytes.NewReader(x))
- decoder234.DisallowUnknownFields()
- var h234 LSPObject
- if err := decoder234.Decode(&h234); err == nil {
- t.Value = h234
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [LSPArray LSPObject bool float64 int32 string uint32]"}
-}
-
-func (t Or_MarkedString) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkedStringWithLanguage:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkedStringWithLanguage string]", t)
-}
-
-func (t *Or_MarkedString) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder266 := json.NewDecoder(bytes.NewReader(x))
- decoder266.DisallowUnknownFields()
- var stringVal string
- if err := decoder266.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder267 := json.NewDecoder(bytes.NewReader(x))
- decoder267.DisallowUnknownFields()
- var h267 MarkedStringWithLanguage
- if err := decoder267.Decode(&h267); err == nil {
- t.Value = h267
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkedStringWithLanguage string]"}
-}
-
-func (t Or_NotebookCellTextDocumentFilter_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookCellTextDocumentFilter_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder208 := json.NewDecoder(bytes.NewReader(x))
- decoder208.DisallowUnknownFields()
- var stringVal string
- if err := decoder208.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder209 := json.NewDecoder(bytes.NewReader(x))
- decoder209.DisallowUnknownFields()
- var h209 NotebookDocumentFilter
- if err := decoder209.Decode(&h209); err == nil {
- t.Value = h209
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilterNotebookType:
- return json.Marshal(x)
- case NotebookDocumentFilterPattern:
- return json.Marshal(x)
- case NotebookDocumentFilterScheme:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]", t)
-}
-
-func (t *Or_NotebookDocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder285 := json.NewDecoder(bytes.NewReader(x))
- decoder285.DisallowUnknownFields()
- var h285 NotebookDocumentFilterNotebookType
- if err := decoder285.Decode(&h285); err == nil {
- t.Value = h285
- return nil
- }
- decoder286 := json.NewDecoder(bytes.NewReader(x))
- decoder286.DisallowUnknownFields()
- var h286 NotebookDocumentFilterPattern
- if err := decoder286.Decode(&h286); err == nil {
- t.Value = h286
- return nil
- }
- decoder287 := json.NewDecoder(bytes.NewReader(x))
- decoder287.DisallowUnknownFields()
- var h287 NotebookDocumentFilterScheme
- if err := decoder287.Decode(&h287); err == nil {
- t.Value = h287
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]"}
-}
-
-func (t Or_NotebookDocumentFilterWithCells_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookDocumentFilterWithCells_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder192 := json.NewDecoder(bytes.NewReader(x))
- decoder192.DisallowUnknownFields()
- var stringVal string
- if err := decoder192.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder193 := json.NewDecoder(bytes.NewReader(x))
- decoder193.DisallowUnknownFields()
- var h193 NotebookDocumentFilter
- if err := decoder193.Decode(&h193); err == nil {
- t.Value = h193
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentFilterWithNotebook_notebook) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilter:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilter string]", t)
-}
-
-func (t *Or_NotebookDocumentFilterWithNotebook_notebook) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder189 := json.NewDecoder(bytes.NewReader(x))
- decoder189.DisallowUnknownFields()
- var stringVal string
- if err := decoder189.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder190 := json.NewDecoder(bytes.NewReader(x))
- decoder190.DisallowUnknownFields()
- var h190 NotebookDocumentFilter
- if err := decoder190.Decode(&h190); err == nil {
- t.Value = h190
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilter string]"}
-}
-
-func (t Or_NotebookDocumentSyncOptions_notebookSelector_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentFilterWithCells:
- return json.Marshal(x)
- case NotebookDocumentFilterWithNotebook:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]", t)
-}
-
-func (t *Or_NotebookDocumentSyncOptions_notebookSelector_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder68 := json.NewDecoder(bytes.NewReader(x))
- decoder68.DisallowUnknownFields()
- var h68 NotebookDocumentFilterWithCells
- if err := decoder68.Decode(&h68); err == nil {
- t.Value = h68
- return nil
- }
- decoder69 := json.NewDecoder(bytes.NewReader(x))
- decoder69.DisallowUnknownFields()
- var h69 NotebookDocumentFilterWithNotebook
- if err := decoder69.Decode(&h69); err == nil {
- t.Value = h69
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]"}
-}
-
-func (t Or_ParameterInformation_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_ParameterInformation_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder205 := json.NewDecoder(bytes.NewReader(x))
- decoder205.DisallowUnknownFields()
- var stringVal string
- if err := decoder205.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder206 := json.NewDecoder(bytes.NewReader(x))
- decoder206.DisallowUnknownFields()
- var h206 MarkupContent
- if err := decoder206.Decode(&h206); err == nil {
- t.Value = h206
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_ParameterInformation_label) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Tuple_ParameterInformation_label_Item1:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Tuple_ParameterInformation_label_Item1 string]", t)
-}
-
-func (t *Or_ParameterInformation_label) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder202 := json.NewDecoder(bytes.NewReader(x))
- decoder202.DisallowUnknownFields()
- var stringVal string
- if err := decoder202.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder203 := json.NewDecoder(bytes.NewReader(x))
- decoder203.DisallowUnknownFields()
- var h203 Tuple_ParameterInformation_label_Item1
- if err := decoder203.Decode(&h203); err == nil {
- t.Value = h203
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Tuple_ParameterInformation_label_Item1 string]"}
-}
-
-func (t Or_PrepareRenameResult) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case PrepareRenameDefaultBehavior:
- return json.Marshal(x)
- case PrepareRenamePlaceholder:
- return json.Marshal(x)
- case Range:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]", t)
-}
-
-func (t *Or_PrepareRenameResult) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder252 := json.NewDecoder(bytes.NewReader(x))
- decoder252.DisallowUnknownFields()
- var h252 PrepareRenameDefaultBehavior
- if err := decoder252.Decode(&h252); err == nil {
- t.Value = h252
- return nil
- }
- decoder253 := json.NewDecoder(bytes.NewReader(x))
- decoder253.DisallowUnknownFields()
- var h253 PrepareRenamePlaceholder
- if err := decoder253.Decode(&h253); err == nil {
- t.Value = h253
- return nil
- }
- decoder254 := json.NewDecoder(bytes.NewReader(x))
- decoder254.DisallowUnknownFields()
- var h254 Range
- if err := decoder254.Decode(&h254); err == nil {
- t.Value = h254
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]"}
-}
-
-func (t Or_ProgressToken) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case int32:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [int32 string]", t)
-}
-
-func (t *Or_ProgressToken) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder255 := json.NewDecoder(bytes.NewReader(x))
- decoder255.DisallowUnknownFields()
- var int32Val int32
- if err := decoder255.Decode(&int32Val); err == nil {
- t.Value = int32Val
- return nil
- }
- decoder256 := json.NewDecoder(bytes.NewReader(x))
- decoder256.DisallowUnknownFields()
- var stringVal string
- if err := decoder256.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [int32 string]"}
-}
-
-func (t Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder60 := json.NewDecoder(bytes.NewReader(x))
- decoder60.DisallowUnknownFields()
- var h60 FullDocumentDiagnosticReport
- if err := decoder60.Decode(&h60); err == nil {
- t.Value = h60
- return nil
- }
- decoder61 := json.NewDecoder(bytes.NewReader(x))
- decoder61.DisallowUnknownFields()
- var h61 UnchangedDocumentDiagnosticReport
- if err := decoder61.Decode(&h61); err == nil {
- t.Value = h61
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FullDocumentDiagnosticReport:
- return json.Marshal(x)
- case UnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder64 := json.NewDecoder(bytes.NewReader(x))
- decoder64.DisallowUnknownFields()
- var h64 FullDocumentDiagnosticReport
- if err := decoder64.Decode(&h64); err == nil {
- t.Value = h64
- return nil
- }
- decoder65 := json.NewDecoder(bytes.NewReader(x))
- decoder65.DisallowUnknownFields()
- var h65 UnchangedDocumentDiagnosticReport
- if err := decoder65.Decode(&h65); err == nil {
- t.Value = h65
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_RelativePattern_baseUri) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case URI:
- return json.Marshal(x)
- case WorkspaceFolder:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [URI WorkspaceFolder]", t)
-}
-
-func (t *Or_RelativePattern_baseUri) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder214 := json.NewDecoder(bytes.NewReader(x))
- decoder214.DisallowUnknownFields()
- var h214 URI
- if err := decoder214.Decode(&h214); err == nil {
- t.Value = h214
- return nil
- }
- decoder215 := json.NewDecoder(bytes.NewReader(x))
- decoder215.DisallowUnknownFields()
- var h215 WorkspaceFolder
- if err := decoder215.Decode(&h215); err == nil {
- t.Value = h215
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [URI WorkspaceFolder]"}
-}
-
-func (t Or_Result_textDocument_codeAction_Item0_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CodeAction:
- return json.Marshal(x)
- case Command:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CodeAction Command]", t)
-}
-
-func (t *Or_Result_textDocument_codeAction_Item0_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder322 := json.NewDecoder(bytes.NewReader(x))
- decoder322.DisallowUnknownFields()
- var h322 CodeAction
- if err := decoder322.Decode(&h322); err == nil {
- t.Value = h322
- return nil
- }
- decoder323 := json.NewDecoder(bytes.NewReader(x))
- decoder323.DisallowUnknownFields()
- var h323 Command
- if err := decoder323.Decode(&h323); err == nil {
- t.Value = h323
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CodeAction Command]"}
-}
-
-func (t Or_Result_textDocument_completion) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CompletionList:
- return json.Marshal(x)
- case []CompletionItem:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CompletionList []CompletionItem]", t)
-}
-
-func (t *Or_Result_textDocument_completion) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder310 := json.NewDecoder(bytes.NewReader(x))
- decoder310.DisallowUnknownFields()
- var h310 CompletionList
- if err := decoder310.Decode(&h310); err == nil {
- t.Value = h310
- return nil
- }
- decoder311 := json.NewDecoder(bytes.NewReader(x))
- decoder311.DisallowUnknownFields()
- var h311 []CompletionItem
- if err := decoder311.Decode(&h311); err == nil {
- t.Value = h311
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CompletionList []CompletionItem]"}
-}
-
-func (t Or_Result_textDocument_declaration) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Declaration:
- return json.Marshal(x)
- case []DeclarationLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Declaration []DeclarationLink]", t)
-}
-
-func (t *Or_Result_textDocument_declaration) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder298 := json.NewDecoder(bytes.NewReader(x))
- decoder298.DisallowUnknownFields()
- var h298 Declaration
- if err := decoder298.Decode(&h298); err == nil {
- t.Value = h298
- return nil
- }
- decoder299 := json.NewDecoder(bytes.NewReader(x))
- decoder299.DisallowUnknownFields()
- var h299 []DeclarationLink
- if err := decoder299.Decode(&h299); err == nil {
- t.Value = h299
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Declaration []DeclarationLink]"}
-}
-
-func (t Or_Result_textDocument_definition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_definition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder314 := json.NewDecoder(bytes.NewReader(x))
- decoder314.DisallowUnknownFields()
- var h314 Definition
- if err := decoder314.Decode(&h314); err == nil {
- t.Value = h314
- return nil
- }
- decoder315 := json.NewDecoder(bytes.NewReader(x))
- decoder315.DisallowUnknownFields()
- var h315 []DefinitionLink
- if err := decoder315.Decode(&h315); err == nil {
- t.Value = h315
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_textDocument_documentSymbol) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []DocumentSymbol:
- return json.Marshal(x)
- case []SymbolInformation:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]DocumentSymbol []SymbolInformation]", t)
-}
-
-func (t *Or_Result_textDocument_documentSymbol) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder318 := json.NewDecoder(bytes.NewReader(x))
- decoder318.DisallowUnknownFields()
- var h318 []DocumentSymbol
- if err := decoder318.Decode(&h318); err == nil {
- t.Value = h318
- return nil
- }
- decoder319 := json.NewDecoder(bytes.NewReader(x))
- decoder319.DisallowUnknownFields()
- var h319 []SymbolInformation
- if err := decoder319.Decode(&h319); err == nil {
- t.Value = h319
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]DocumentSymbol []SymbolInformation]"}
-}
-
-func (t Or_Result_textDocument_implementation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_implementation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder290 := json.NewDecoder(bytes.NewReader(x))
- decoder290.DisallowUnknownFields()
- var h290 Definition
- if err := decoder290.Decode(&h290); err == nil {
- t.Value = h290
- return nil
- }
- decoder291 := json.NewDecoder(bytes.NewReader(x))
- decoder291.DisallowUnknownFields()
- var h291 []DefinitionLink
- if err := decoder291.Decode(&h291); err == nil {
- t.Value = h291
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_textDocument_inlineCompletion) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineCompletionList:
- return json.Marshal(x)
- case []InlineCompletionItem:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineCompletionList []InlineCompletionItem]", t)
-}
-
-func (t *Or_Result_textDocument_inlineCompletion) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder306 := json.NewDecoder(bytes.NewReader(x))
- decoder306.DisallowUnknownFields()
- var h306 InlineCompletionList
- if err := decoder306.Decode(&h306); err == nil {
- t.Value = h306
- return nil
- }
- decoder307 := json.NewDecoder(bytes.NewReader(x))
- decoder307.DisallowUnknownFields()
- var h307 []InlineCompletionItem
- if err := decoder307.Decode(&h307); err == nil {
- t.Value = h307
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionList []InlineCompletionItem]"}
-}
-
-func (t Or_Result_textDocument_semanticTokens_full_delta) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokens:
- return json.Marshal(x)
- case SemanticTokensDelta:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokens SemanticTokensDelta]", t)
-}
-
-func (t *Or_Result_textDocument_semanticTokens_full_delta) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder302 := json.NewDecoder(bytes.NewReader(x))
- decoder302.DisallowUnknownFields()
- var h302 SemanticTokens
- if err := decoder302.Decode(&h302); err == nil {
- t.Value = h302
- return nil
- }
- decoder303 := json.NewDecoder(bytes.NewReader(x))
- decoder303.DisallowUnknownFields()
- var h303 SemanticTokensDelta
- if err := decoder303.Decode(&h303); err == nil {
- t.Value = h303
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokens SemanticTokensDelta]"}
-}
-
-func (t Or_Result_textDocument_typeDefinition) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Definition:
- return json.Marshal(x)
- case []DefinitionLink:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Definition []DefinitionLink]", t)
-}
-
-func (t *Or_Result_textDocument_typeDefinition) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder294 := json.NewDecoder(bytes.NewReader(x))
- decoder294.DisallowUnknownFields()
- var h294 Definition
- if err := decoder294.Decode(&h294); err == nil {
- t.Value = h294
- return nil
- }
- decoder295 := json.NewDecoder(bytes.NewReader(x))
- decoder295.DisallowUnknownFields()
- var h295 []DefinitionLink
- if err := decoder295.Decode(&h295); err == nil {
- t.Value = h295
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Definition []DefinitionLink]"}
-}
-
-func (t Or_Result_workspace_symbol) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case []SymbolInformation:
- return json.Marshal(x)
- case []WorkspaceSymbol:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [[]SymbolInformation []WorkspaceSymbol]", t)
-}
-
-func (t *Or_Result_workspace_symbol) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder326 := json.NewDecoder(bytes.NewReader(x))
- decoder326.DisallowUnknownFields()
- var h326 []SymbolInformation
- if err := decoder326.Decode(&h326); err == nil {
- t.Value = h326
- return nil
- }
- decoder327 := json.NewDecoder(bytes.NewReader(x))
- decoder327.DisallowUnknownFields()
- var h327 []WorkspaceSymbol
- if err := decoder327.Decode(&h327); err == nil {
- t.Value = h327
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [[]SymbolInformation []WorkspaceSymbol]"}
-}
-
-func (t Or_SemanticTokensOptions_full) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokensFullDelta:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokensFullDelta bool]", t)
-}
-
-func (t *Or_SemanticTokensOptions_full) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder47 := json.NewDecoder(bytes.NewReader(x))
- decoder47.DisallowUnknownFields()
- var boolVal bool
- if err := decoder47.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder48 := json.NewDecoder(bytes.NewReader(x))
- decoder48.DisallowUnknownFields()
- var h48 SemanticTokensFullDelta
- if err := decoder48.Decode(&h48); err == nil {
- t.Value = h48
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensFullDelta bool]"}
-}
-
-func (t Or_SemanticTokensOptions_range) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Lit_SemanticTokensOptions_range_Item1:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Lit_SemanticTokensOptions_range_Item1 bool]", t)
-}
-
-func (t *Or_SemanticTokensOptions_range) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder44 := json.NewDecoder(bytes.NewReader(x))
- decoder44.DisallowUnknownFields()
- var boolVal bool
- if err := decoder44.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder45 := json.NewDecoder(bytes.NewReader(x))
- decoder45.DisallowUnknownFields()
- var h45 Lit_SemanticTokensOptions_range_Item1
- if err := decoder45.Decode(&h45); err == nil {
- t.Value = h45
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Lit_SemanticTokensOptions_range_Item1 bool]"}
-}
-
-func (t Or_ServerCapabilities_callHierarchyProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CallHierarchyOptions:
- return json.Marshal(x)
- case CallHierarchyRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_callHierarchyProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder140 := json.NewDecoder(bytes.NewReader(x))
- decoder140.DisallowUnknownFields()
- var boolVal bool
- if err := decoder140.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder141 := json.NewDecoder(bytes.NewReader(x))
- decoder141.DisallowUnknownFields()
- var h141 CallHierarchyOptions
- if err := decoder141.Decode(&h141); err == nil {
- t.Value = h141
- return nil
- }
- decoder142 := json.NewDecoder(bytes.NewReader(x))
- decoder142.DisallowUnknownFields()
- var h142 CallHierarchyRegistrationOptions
- if err := decoder142.Decode(&h142); err == nil {
- t.Value = h142
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CallHierarchyOptions CallHierarchyRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_codeActionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CodeActionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CodeActionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_codeActionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder109 := json.NewDecoder(bytes.NewReader(x))
- decoder109.DisallowUnknownFields()
- var boolVal bool
- if err := decoder109.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder110 := json.NewDecoder(bytes.NewReader(x))
- decoder110.DisallowUnknownFields()
- var h110 CodeActionOptions
- if err := decoder110.Decode(&h110); err == nil {
- t.Value = h110
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CodeActionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_colorProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentColorOptions:
- return json.Marshal(x)
- case DocumentColorRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentColorOptions DocumentColorRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_colorProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder113 := json.NewDecoder(bytes.NewReader(x))
- decoder113.DisallowUnknownFields()
- var boolVal bool
- if err := decoder113.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder114 := json.NewDecoder(bytes.NewReader(x))
- decoder114.DisallowUnknownFields()
- var h114 DocumentColorOptions
- if err := decoder114.Decode(&h114); err == nil {
- t.Value = h114
- return nil
- }
- decoder115 := json.NewDecoder(bytes.NewReader(x))
- decoder115.DisallowUnknownFields()
- var h115 DocumentColorRegistrationOptions
- if err := decoder115.Decode(&h115); err == nil {
- t.Value = h115
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentColorOptions DocumentColorRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_declarationProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DeclarationOptions:
- return json.Marshal(x)
- case DeclarationRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DeclarationOptions DeclarationRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_declarationProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder83 := json.NewDecoder(bytes.NewReader(x))
- decoder83.DisallowUnknownFields()
- var boolVal bool
- if err := decoder83.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder84 := json.NewDecoder(bytes.NewReader(x))
- decoder84.DisallowUnknownFields()
- var h84 DeclarationOptions
- if err := decoder84.Decode(&h84); err == nil {
- t.Value = h84
- return nil
- }
- decoder85 := json.NewDecoder(bytes.NewReader(x))
- decoder85.DisallowUnknownFields()
- var h85 DeclarationRegistrationOptions
- if err := decoder85.Decode(&h85); err == nil {
- t.Value = h85
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DeclarationOptions DeclarationRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_definitionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DefinitionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DefinitionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_definitionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder87 := json.NewDecoder(bytes.NewReader(x))
- decoder87.DisallowUnknownFields()
- var boolVal bool
- if err := decoder87.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder88 := json.NewDecoder(bytes.NewReader(x))
- decoder88.DisallowUnknownFields()
- var h88 DefinitionOptions
- if err := decoder88.Decode(&h88); err == nil {
- t.Value = h88
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DefinitionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_diagnosticProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DiagnosticOptions:
- return json.Marshal(x)
- case DiagnosticRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DiagnosticOptions DiagnosticRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_diagnosticProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder174 := json.NewDecoder(bytes.NewReader(x))
- decoder174.DisallowUnknownFields()
- var h174 DiagnosticOptions
- if err := decoder174.Decode(&h174); err == nil {
- t.Value = h174
- return nil
- }
- decoder175 := json.NewDecoder(bytes.NewReader(x))
- decoder175.DisallowUnknownFields()
- var h175 DiagnosticRegistrationOptions
- if err := decoder175.Decode(&h175); err == nil {
- t.Value = h175
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DiagnosticOptions DiagnosticRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_documentFormattingProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentFormattingOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentFormattingOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentFormattingProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder120 := json.NewDecoder(bytes.NewReader(x))
- decoder120.DisallowUnknownFields()
- var boolVal bool
- if err := decoder120.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder121 := json.NewDecoder(bytes.NewReader(x))
- decoder121.DisallowUnknownFields()
- var h121 DocumentFormattingOptions
- if err := decoder121.Decode(&h121); err == nil {
- t.Value = h121
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentFormattingOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentHighlightProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentHighlightOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentHighlightOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentHighlightProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder103 := json.NewDecoder(bytes.NewReader(x))
- decoder103.DisallowUnknownFields()
- var boolVal bool
- if err := decoder103.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder104 := json.NewDecoder(bytes.NewReader(x))
- decoder104.DisallowUnknownFields()
- var h104 DocumentHighlightOptions
- if err := decoder104.Decode(&h104); err == nil {
- t.Value = h104
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentHighlightOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentRangeFormattingProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentRangeFormattingOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentRangeFormattingOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentRangeFormattingProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder123 := json.NewDecoder(bytes.NewReader(x))
- decoder123.DisallowUnknownFields()
- var boolVal bool
- if err := decoder123.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder124 := json.NewDecoder(bytes.NewReader(x))
- decoder124.DisallowUnknownFields()
- var h124 DocumentRangeFormattingOptions
- if err := decoder124.Decode(&h124); err == nil {
- t.Value = h124
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentRangeFormattingOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_documentSymbolProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case DocumentSymbolOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [DocumentSymbolOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_documentSymbolProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder106 := json.NewDecoder(bytes.NewReader(x))
- decoder106.DisallowUnknownFields()
- var boolVal bool
- if err := decoder106.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder107 := json.NewDecoder(bytes.NewReader(x))
- decoder107.DisallowUnknownFields()
- var h107 DocumentSymbolOptions
- if err := decoder107.Decode(&h107); err == nil {
- t.Value = h107
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [DocumentSymbolOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_foldingRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case FoldingRangeOptions:
- return json.Marshal(x)
- case FoldingRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_foldingRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder130 := json.NewDecoder(bytes.NewReader(x))
- decoder130.DisallowUnknownFields()
- var boolVal bool
- if err := decoder130.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder131 := json.NewDecoder(bytes.NewReader(x))
- decoder131.DisallowUnknownFields()
- var h131 FoldingRangeOptions
- if err := decoder131.Decode(&h131); err == nil {
- t.Value = h131
- return nil
- }
- decoder132 := json.NewDecoder(bytes.NewReader(x))
- decoder132.DisallowUnknownFields()
- var h132 FoldingRangeRegistrationOptions
- if err := decoder132.Decode(&h132); err == nil {
- t.Value = h132
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [FoldingRangeOptions FoldingRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_hoverProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case HoverOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [HoverOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_hoverProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder79 := json.NewDecoder(bytes.NewReader(x))
- decoder79.DisallowUnknownFields()
- var boolVal bool
- if err := decoder79.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder80 := json.NewDecoder(bytes.NewReader(x))
- decoder80.DisallowUnknownFields()
- var h80 HoverOptions
- if err := decoder80.Decode(&h80); err == nil {
- t.Value = h80
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [HoverOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_implementationProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ImplementationOptions:
- return json.Marshal(x)
- case ImplementationRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ImplementationOptions ImplementationRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_implementationProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder96 := json.NewDecoder(bytes.NewReader(x))
- decoder96.DisallowUnknownFields()
- var boolVal bool
- if err := decoder96.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder97 := json.NewDecoder(bytes.NewReader(x))
- decoder97.DisallowUnknownFields()
- var h97 ImplementationOptions
- if err := decoder97.Decode(&h97); err == nil {
- t.Value = h97
- return nil
- }
- decoder98 := json.NewDecoder(bytes.NewReader(x))
- decoder98.DisallowUnknownFields()
- var h98 ImplementationRegistrationOptions
- if err := decoder98.Decode(&h98); err == nil {
- t.Value = h98
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ImplementationOptions ImplementationRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlayHintProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlayHintOptions:
- return json.Marshal(x)
- case InlayHintRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlayHintOptions InlayHintRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlayHintProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder169 := json.NewDecoder(bytes.NewReader(x))
- decoder169.DisallowUnknownFields()
- var boolVal bool
- if err := decoder169.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder170 := json.NewDecoder(bytes.NewReader(x))
- decoder170.DisallowUnknownFields()
- var h170 InlayHintOptions
- if err := decoder170.Decode(&h170); err == nil {
- t.Value = h170
- return nil
- }
- decoder171 := json.NewDecoder(bytes.NewReader(x))
- decoder171.DisallowUnknownFields()
- var h171 InlayHintRegistrationOptions
- if err := decoder171.Decode(&h171); err == nil {
- t.Value = h171
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlayHintOptions InlayHintRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlineCompletionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineCompletionOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineCompletionOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlineCompletionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder177 := json.NewDecoder(bytes.NewReader(x))
- decoder177.DisallowUnknownFields()
- var boolVal bool
- if err := decoder177.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder178 := json.NewDecoder(bytes.NewReader(x))
- decoder178.DisallowUnknownFields()
- var h178 InlineCompletionOptions
- if err := decoder178.Decode(&h178); err == nil {
- t.Value = h178
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineCompletionOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_inlineValueProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case InlineValueOptions:
- return json.Marshal(x)
- case InlineValueRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [InlineValueOptions InlineValueRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_inlineValueProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder164 := json.NewDecoder(bytes.NewReader(x))
- decoder164.DisallowUnknownFields()
- var boolVal bool
- if err := decoder164.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder165 := json.NewDecoder(bytes.NewReader(x))
- decoder165.DisallowUnknownFields()
- var h165 InlineValueOptions
- if err := decoder165.Decode(&h165); err == nil {
- t.Value = h165
- return nil
- }
- decoder166 := json.NewDecoder(bytes.NewReader(x))
- decoder166.DisallowUnknownFields()
- var h166 InlineValueRegistrationOptions
- if err := decoder166.Decode(&h166); err == nil {
- t.Value = h166
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [InlineValueOptions InlineValueRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_linkedEditingRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case LinkedEditingRangeOptions:
- return json.Marshal(x)
- case LinkedEditingRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_linkedEditingRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder145 := json.NewDecoder(bytes.NewReader(x))
- decoder145.DisallowUnknownFields()
- var boolVal bool
- if err := decoder145.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder146 := json.NewDecoder(bytes.NewReader(x))
- decoder146.DisallowUnknownFields()
- var h146 LinkedEditingRangeOptions
- if err := decoder146.Decode(&h146); err == nil {
- t.Value = h146
- return nil
- }
- decoder147 := json.NewDecoder(bytes.NewReader(x))
- decoder147.DisallowUnknownFields()
- var h147 LinkedEditingRangeRegistrationOptions
- if err := decoder147.Decode(&h147); err == nil {
- t.Value = h147
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_monikerProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MonikerOptions:
- return json.Marshal(x)
- case MonikerRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MonikerOptions MonikerRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_monikerProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder154 := json.NewDecoder(bytes.NewReader(x))
- decoder154.DisallowUnknownFields()
- var boolVal bool
- if err := decoder154.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder155 := json.NewDecoder(bytes.NewReader(x))
- decoder155.DisallowUnknownFields()
- var h155 MonikerOptions
- if err := decoder155.Decode(&h155); err == nil {
- t.Value = h155
- return nil
- }
- decoder156 := json.NewDecoder(bytes.NewReader(x))
- decoder156.DisallowUnknownFields()
- var h156 MonikerRegistrationOptions
- if err := decoder156.Decode(&h156); err == nil {
- t.Value = h156
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MonikerOptions MonikerRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_notebookDocumentSync) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case NotebookDocumentSyncOptions:
- return json.Marshal(x)
- case NotebookDocumentSyncRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_notebookDocumentSync) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder76 := json.NewDecoder(bytes.NewReader(x))
- decoder76.DisallowUnknownFields()
- var h76 NotebookDocumentSyncOptions
- if err := decoder76.Decode(&h76); err == nil {
- t.Value = h76
- return nil
- }
- decoder77 := json.NewDecoder(bytes.NewReader(x))
- decoder77.DisallowUnknownFields()
- var h77 NotebookDocumentSyncRegistrationOptions
- if err := decoder77.Decode(&h77); err == nil {
- t.Value = h77
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_referencesProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case ReferenceOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [ReferenceOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_referencesProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder100 := json.NewDecoder(bytes.NewReader(x))
- decoder100.DisallowUnknownFields()
- var boolVal bool
- if err := decoder100.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder101 := json.NewDecoder(bytes.NewReader(x))
- decoder101.DisallowUnknownFields()
- var h101 ReferenceOptions
- if err := decoder101.Decode(&h101); err == nil {
- t.Value = h101
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [ReferenceOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_renameProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case RenameOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [RenameOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_renameProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder126 := json.NewDecoder(bytes.NewReader(x))
- decoder126.DisallowUnknownFields()
- var boolVal bool
- if err := decoder126.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder127 := json.NewDecoder(bytes.NewReader(x))
- decoder127.DisallowUnknownFields()
- var h127 RenameOptions
- if err := decoder127.Decode(&h127); err == nil {
- t.Value = h127
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [RenameOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_selectionRangeProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SelectionRangeOptions:
- return json.Marshal(x)
- case SelectionRangeRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_selectionRangeProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder135 := json.NewDecoder(bytes.NewReader(x))
- decoder135.DisallowUnknownFields()
- var boolVal bool
- if err := decoder135.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder136 := json.NewDecoder(bytes.NewReader(x))
- decoder136.DisallowUnknownFields()
- var h136 SelectionRangeOptions
- if err := decoder136.Decode(&h136); err == nil {
- t.Value = h136
- return nil
- }
- decoder137 := json.NewDecoder(bytes.NewReader(x))
- decoder137.DisallowUnknownFields()
- var h137 SelectionRangeRegistrationOptions
- if err := decoder137.Decode(&h137); err == nil {
- t.Value = h137
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SelectionRangeOptions SelectionRangeRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_semanticTokensProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SemanticTokensOptions:
- return json.Marshal(x)
- case SemanticTokensRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SemanticTokensOptions SemanticTokensRegistrationOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_semanticTokensProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder150 := json.NewDecoder(bytes.NewReader(x))
- decoder150.DisallowUnknownFields()
- var h150 SemanticTokensOptions
- if err := decoder150.Decode(&h150); err == nil {
- t.Value = h150
- return nil
- }
- decoder151 := json.NewDecoder(bytes.NewReader(x))
- decoder151.DisallowUnknownFields()
- var h151 SemanticTokensRegistrationOptions
- if err := decoder151.Decode(&h151); err == nil {
- t.Value = h151
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SemanticTokensOptions SemanticTokensRegistrationOptions]"}
-}
-
-func (t Or_ServerCapabilities_textDocumentSync) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentSyncKind:
- return json.Marshal(x)
- case TextDocumentSyncOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentSyncKind TextDocumentSyncOptions]", t)
-}
-
-func (t *Or_ServerCapabilities_textDocumentSync) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder72 := json.NewDecoder(bytes.NewReader(x))
- decoder72.DisallowUnknownFields()
- var h72 TextDocumentSyncKind
- if err := decoder72.Decode(&h72); err == nil {
- t.Value = h72
- return nil
- }
- decoder73 := json.NewDecoder(bytes.NewReader(x))
- decoder73.DisallowUnknownFields()
- var h73 TextDocumentSyncOptions
- if err := decoder73.Decode(&h73); err == nil {
- t.Value = h73
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentSyncKind TextDocumentSyncOptions]"}
-}
-
-func (t Or_ServerCapabilities_typeDefinitionProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TypeDefinitionOptions:
- return json.Marshal(x)
- case TypeDefinitionRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_typeDefinitionProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder91 := json.NewDecoder(bytes.NewReader(x))
- decoder91.DisallowUnknownFields()
- var boolVal bool
- if err := decoder91.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder92 := json.NewDecoder(bytes.NewReader(x))
- decoder92.DisallowUnknownFields()
- var h92 TypeDefinitionOptions
- if err := decoder92.Decode(&h92); err == nil {
- t.Value = h92
- return nil
- }
- decoder93 := json.NewDecoder(bytes.NewReader(x))
- decoder93.DisallowUnknownFields()
- var h93 TypeDefinitionRegistrationOptions
- if err := decoder93.Decode(&h93); err == nil {
- t.Value = h93
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_typeHierarchyProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TypeHierarchyOptions:
- return json.Marshal(x)
- case TypeHierarchyRegistrationOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_typeHierarchyProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder159 := json.NewDecoder(bytes.NewReader(x))
- decoder159.DisallowUnknownFields()
- var boolVal bool
- if err := decoder159.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder160 := json.NewDecoder(bytes.NewReader(x))
- decoder160.DisallowUnknownFields()
- var h160 TypeHierarchyOptions
- if err := decoder160.Decode(&h160); err == nil {
- t.Value = h160
- return nil
- }
- decoder161 := json.NewDecoder(bytes.NewReader(x))
- decoder161.DisallowUnknownFields()
- var h161 TypeHierarchyRegistrationOptions
- if err := decoder161.Decode(&h161); err == nil {
- t.Value = h161
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]"}
-}
-
-func (t Or_ServerCapabilities_workspaceSymbolProvider) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case WorkspaceSymbolOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [WorkspaceSymbolOptions bool]", t)
-}
-
-func (t *Or_ServerCapabilities_workspaceSymbolProvider) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder117 := json.NewDecoder(bytes.NewReader(x))
- decoder117.DisallowUnknownFields()
- var boolVal bool
- if err := decoder117.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder118 := json.NewDecoder(bytes.NewReader(x))
- decoder118.DisallowUnknownFields()
- var h118 WorkspaceSymbolOptions
- if err := decoder118.Decode(&h118); err == nil {
- t.Value = h118
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceSymbolOptions bool]"}
-}
-
-func (t Or_SignatureInformation_documentation) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case MarkupContent:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [MarkupContent string]", t)
-}
-
-func (t *Or_SignatureInformation_documentation) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder186 := json.NewDecoder(bytes.NewReader(x))
- decoder186.DisallowUnknownFields()
- var stringVal string
- if err := decoder186.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- decoder187 := json.NewDecoder(bytes.NewReader(x))
- decoder187.DisallowUnknownFields()
- var h187 MarkupContent
- if err := decoder187.Decode(&h187); err == nil {
- t.Value = h187
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [MarkupContent string]"}
-}
-
-func (t Or_TextDocumentContentChangeEvent) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentContentChangePartial:
- return json.Marshal(x)
- case TextDocumentContentChangeWholeDocument:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]", t)
-}
-
-func (t *Or_TextDocumentContentChangeEvent) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder263 := json.NewDecoder(bytes.NewReader(x))
- decoder263.DisallowUnknownFields()
- var h263 TextDocumentContentChangePartial
- if err := decoder263.Decode(&h263); err == nil {
- t.Value = h263
- return nil
- }
- decoder264 := json.NewDecoder(bytes.NewReader(x))
- decoder264.DisallowUnknownFields()
- var h264 TextDocumentContentChangeWholeDocument
- if err := decoder264.Decode(&h264); err == nil {
- t.Value = h264
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]"}
-}
-
-func (t Or_TextDocumentEdit_edits_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case AnnotatedTextEdit:
- return json.Marshal(x)
- case SnippetTextEdit:
- return json.Marshal(x)
- case TextEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [AnnotatedTextEdit SnippetTextEdit TextEdit]", t)
-}
-
-func (t *Or_TextDocumentEdit_edits_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder52 := json.NewDecoder(bytes.NewReader(x))
- decoder52.DisallowUnknownFields()
- var h52 AnnotatedTextEdit
- if err := decoder52.Decode(&h52); err == nil {
- t.Value = h52
- return nil
- }
- decoder53 := json.NewDecoder(bytes.NewReader(x))
- decoder53.DisallowUnknownFields()
- var h53 SnippetTextEdit
- if err := decoder53.Decode(&h53); err == nil {
- t.Value = h53
- return nil
- }
- decoder54 := json.NewDecoder(bytes.NewReader(x))
- decoder54.DisallowUnknownFields()
- var h54 TextEdit
- if err := decoder54.Decode(&h54); err == nil {
- t.Value = h54
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [AnnotatedTextEdit SnippetTextEdit TextEdit]"}
-}
-
-func (t Or_TextDocumentFilter) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentFilterLanguage:
- return json.Marshal(x)
- case TextDocumentFilterPattern:
- return json.Marshal(x)
- case TextDocumentFilterScheme:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]", t)
-}
-
-func (t *Or_TextDocumentFilter) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder279 := json.NewDecoder(bytes.NewReader(x))
- decoder279.DisallowUnknownFields()
- var h279 TextDocumentFilterLanguage
- if err := decoder279.Decode(&h279); err == nil {
- t.Value = h279
- return nil
- }
- decoder280 := json.NewDecoder(bytes.NewReader(x))
- decoder280.DisallowUnknownFields()
- var h280 TextDocumentFilterPattern
- if err := decoder280.Decode(&h280); err == nil {
- t.Value = h280
- return nil
- }
- decoder281 := json.NewDecoder(bytes.NewReader(x))
- decoder281.DisallowUnknownFields()
- var h281 TextDocumentFilterScheme
- if err := decoder281.Decode(&h281); err == nil {
- t.Value = h281
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]"}
-}
-
-func (t Or_TextDocumentSyncOptions_save) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case SaveOptions:
- return json.Marshal(x)
- case bool:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [SaveOptions bool]", t)
-}
-
-func (t *Or_TextDocumentSyncOptions_save) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder195 := json.NewDecoder(bytes.NewReader(x))
- decoder195.DisallowUnknownFields()
- var boolVal bool
- if err := decoder195.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder196 := json.NewDecoder(bytes.NewReader(x))
- decoder196.DisallowUnknownFields()
- var h196 SaveOptions
- if err := decoder196.Decode(&h196); err == nil {
- t.Value = h196
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [SaveOptions bool]"}
-}
-
-func (t Or_WorkspaceDocumentDiagnosticReport) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case WorkspaceFullDocumentDiagnosticReport:
- return json.Marshal(x)
- case WorkspaceUnchangedDocumentDiagnosticReport:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]", t)
-}
-
-func (t *Or_WorkspaceDocumentDiagnosticReport) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder259 := json.NewDecoder(bytes.NewReader(x))
- decoder259.DisallowUnknownFields()
- var h259 WorkspaceFullDocumentDiagnosticReport
- if err := decoder259.Decode(&h259); err == nil {
- t.Value = h259
- return nil
- }
- decoder260 := json.NewDecoder(bytes.NewReader(x))
- decoder260.DisallowUnknownFields()
- var h260 WorkspaceUnchangedDocumentDiagnosticReport
- if err := decoder260.Decode(&h260); err == nil {
- t.Value = h260
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]"}
-}
-
-func (t Or_WorkspaceEdit_documentChanges_Elem) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case CreateFile:
- return json.Marshal(x)
- case DeleteFile:
- return json.Marshal(x)
- case RenameFile:
- return json.Marshal(x)
- case TextDocumentEdit:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [CreateFile DeleteFile RenameFile TextDocumentEdit]", t)
-}
-
-func (t *Or_WorkspaceEdit_documentChanges_Elem) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder4 := json.NewDecoder(bytes.NewReader(x))
- decoder4.DisallowUnknownFields()
- var h4 CreateFile
- if err := decoder4.Decode(&h4); err == nil {
- t.Value = h4
- return nil
- }
- decoder5 := json.NewDecoder(bytes.NewReader(x))
- decoder5.DisallowUnknownFields()
- var h5 DeleteFile
- if err := decoder5.Decode(&h5); err == nil {
- t.Value = h5
- return nil
- }
- decoder6 := json.NewDecoder(bytes.NewReader(x))
- decoder6.DisallowUnknownFields()
- var h6 RenameFile
- if err := decoder6.Decode(&h6); err == nil {
- t.Value = h6
- return nil
- }
- decoder7 := json.NewDecoder(bytes.NewReader(x))
- decoder7.DisallowUnknownFields()
- var h7 TextDocumentEdit
- if err := decoder7.Decode(&h7); err == nil {
- t.Value = h7
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [CreateFile DeleteFile RenameFile TextDocumentEdit]"}
-}
-
-func (t Or_WorkspaceFoldersServerCapabilities_changeNotifications) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case bool:
- return json.Marshal(x)
- case string:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [bool string]", t)
-}
-
-func (t *Or_WorkspaceFoldersServerCapabilities_changeNotifications) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder210 := json.NewDecoder(bytes.NewReader(x))
- decoder210.DisallowUnknownFields()
- var boolVal bool
- if err := decoder210.Decode(&boolVal); err == nil {
- t.Value = boolVal
- return nil
- }
- decoder211 := json.NewDecoder(bytes.NewReader(x))
- decoder211.DisallowUnknownFields()
- var stringVal string
- if err := decoder211.Decode(&stringVal); err == nil {
- t.Value = stringVal
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [bool string]"}
-}
-
-func (t Or_WorkspaceOptions_textDocumentContent) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case TextDocumentContentOptions:
- return json.Marshal(x)
- case TextDocumentContentRegistrationOptions:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]", t)
-}
-
-func (t *Or_WorkspaceOptions_textDocumentContent) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder199 := json.NewDecoder(bytes.NewReader(x))
- decoder199.DisallowUnknownFields()
- var h199 TextDocumentContentOptions
- if err := decoder199.Decode(&h199); err == nil {
- t.Value = h199
- return nil
- }
- decoder200 := json.NewDecoder(bytes.NewReader(x))
- decoder200.DisallowUnknownFields()
- var h200 TextDocumentContentRegistrationOptions
- if err := decoder200.Decode(&h200); err == nil {
- t.Value = h200
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [TextDocumentContentOptions TextDocumentContentRegistrationOptions]"}
-}
-
-func (t Or_WorkspaceSymbol_location) MarshalJSON() ([]byte, error) {
- switch x := t.Value.(type) {
- case Location:
- return json.Marshal(x)
- case LocationUriOnly:
- return json.Marshal(x)
- case nil:
- return []byte("null"), nil
- }
- return nil, fmt.Errorf("type %T not one of [Location LocationUriOnly]", t)
-}
-
-func (t *Or_WorkspaceSymbol_location) UnmarshalJSON(x []byte) error {
- if string(x) == "null" {
- t.Value = nil
- return nil
- }
- decoder39 := json.NewDecoder(bytes.NewReader(x))
- decoder39.DisallowUnknownFields()
- var h39 Location
- if err := decoder39.Decode(&h39); err == nil {
- t.Value = h39
- return nil
- }
- decoder40 := json.NewDecoder(bytes.NewReader(x))
- decoder40.DisallowUnknownFields()
- var h40 LocationUriOnly
- if err := decoder40.Decode(&h40); err == nil {
- t.Value = h40
- return nil
- }
- return &UnmarshalError{"unmarshal failed to match one of [Location LocationUriOnly]"}
-}
diff --git a/internal/lsp/protocol/tsprotocol.go b/internal/lsp/protocol/tsprotocol.go
deleted file mode 100644
index 0e6356cdfe6203882c208fe9447fd5d9ee56923f..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/tsprotocol.go
+++ /dev/null
@@ -1,6952 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Code generated for LSP. DO NOT EDIT.
-
-package protocol
-
-// Code generated from protocol/metaModel.json at ref release/protocol/3.17.6-next.9 (hash c94395b5da53729e6dff931293b051009ccaaaa4).
-// https://github.com/microsoft/vscode-languageserver-node/blob/release/protocol/3.17.6-next.9/protocol/metaModel.json
-// LSP metaData.version = 3.17.0.
-
-import "encoding/json"
-
-// created for And
-type And_RegOpt_textDocument_colorPresentation struct {
- WorkDoneProgressOptions
- TextDocumentRegistrationOptions
-}
-
-// A special text edit with an additional change annotation.
-//
-// @since 3.16.0.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#annotatedTextEdit
-type AnnotatedTextEdit struct {
- // The actual identifier of the change annotation
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
- TextEdit
-}
-
-// The parameters passed via an apply workspace edit request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditParams
-type ApplyWorkspaceEditParams struct {
- // An optional label of the workspace edit. This label is
- // presented in the user interface for example on an undo
- // stack to undo the workspace edit.
- Label string `json:"label,omitempty"`
- // The edits to apply.
- Edit WorkspaceEdit `json:"edit"`
- // Additional data about the edit.
- //
- // @since 3.18.0
- // @proposed
- Metadata *WorkspaceEditMetadata `json:"metadata,omitempty"`
-}
-
-// The result returned from the apply workspace edit request.
-//
-// @since 3.17 renamed from ApplyWorkspaceEditResponse
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#applyWorkspaceEditResult
-type ApplyWorkspaceEditResult struct {
- // Indicates whether the edit was applied or not.
- Applied bool `json:"applied"`
- // An optional textual description for why the edit was not applied.
- // This may be used by the server for diagnostic slog.or to provide
- // a suitable error for a request that triggered the edit.
- FailureReason string `json:"failureReason,omitempty"`
- // Depending on the client's failure handling strategy `failedChange` might
- // contain the index of the change that failed. This property is only available
- // if the client signals a `failureHandlingStrategy` in its client capabilities.
- FailedChange uint32 `json:"failedChange,omitempty"`
-}
-
-// A base for all symbol information.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#baseSymbolInformation
-type BaseSymbolInformation struct {
- // The name of this symbol.
- Name string `json:"name"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // The name of the symbol containing this symbol. This information is for
- // user interface purposes (e.g. to render a qualifier in the user interface
- // if necessary). It can't be used to re-infer a hierarchy for the document
- // symbols.
- ContainerName string `json:"containerName,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyClientCapabilities
-type CallHierarchyClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Represents an incoming call, e.g. a caller of a method or constructor.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCall
-type CallHierarchyIncomingCall struct {
- // The item that makes the call.
- From CallHierarchyItem `json:"from"`
- // The ranges at which the calls appear. This is relative to the caller
- // denoted by {@link CallHierarchyIncomingCall.from `this.from`}.
- FromRanges []Range `json:"fromRanges"`
-}
-
-// The parameter of a `callHierarchy/incomingCalls` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyIncomingCallsParams
-type CallHierarchyIncomingCallsParams struct {
- Item CallHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Represents programming constructs like functions or constructors in the context
-// of call hierarchy.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyItem
-type CallHierarchyItem struct {
- // The name of this item.
- Name string `json:"name"`
- // The kind of this item.
- Kind SymbolKind `json:"kind"`
- // Tags for this item.
- Tags []SymbolTag `json:"tags,omitempty"`
- // More detail for this item, e.g. the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The resource identifier of this item.
- URI DocumentURI `json:"uri"`
- // The range enclosing this symbol not including leading/trailing whitespace but everything else, e.g. comments and code.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being picked, e.g. the name of a function.
- // Must be contained by the {@link CallHierarchyItem.range `range`}.
- SelectionRange Range `json:"selectionRange"`
- // A data entry field that is preserved between a call hierarchy prepare and
- // incoming calls or outgoing calls requests.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Call hierarchy options used during static registration.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOptions
-type CallHierarchyOptions struct {
- WorkDoneProgressOptions
-}
-
-// Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCall
-type CallHierarchyOutgoingCall struct {
- // The item that is called.
- To CallHierarchyItem `json:"to"`
- // The range at which this item is called. This is the range relative to the caller, e.g the item
- // passed to {@link CallHierarchyItemProvider.provideCallHierarchyOutgoingCalls `provideCallHierarchyOutgoingCalls`}
- // and not {@link CallHierarchyOutgoingCall.to `this.to`}.
- FromRanges []Range `json:"fromRanges"`
-}
-
-// The parameter of a `callHierarchy/outgoingCalls` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyOutgoingCallsParams
-type CallHierarchyOutgoingCallsParams struct {
- Item CallHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The parameter of a `textDocument/prepareCallHierarchy` request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyPrepareParams
-type CallHierarchyPrepareParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Call hierarchy options used during static or dynamic registration.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#callHierarchyRegistrationOptions
-type CallHierarchyRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CallHierarchyOptions
- StaticRegistrationOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#cancelParams
-type CancelParams struct {
- // The request id to cancel.
- ID interface{} `json:"id"`
-}
-
-// Additional information that describes document changes.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotation
-type ChangeAnnotation struct {
- // A human-readable string describing the actual change. The string
- // is rendered prominent in the user interface.
- Label string `json:"label"`
- // A flag which indicates that user confirmation is needed
- // before applying the change.
- NeedsConfirmation bool `json:"needsConfirmation,omitempty"`
- // A human-readable string which is rendered less prominent in
- // the user interface.
- Description string `json:"description,omitempty"`
-}
-
-// An identifier to refer to a change annotation stored with a workspace edit.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationIdentifier
-type (
- ChangeAnnotationIdentifier = string // (alias)
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#changeAnnotationsSupportOptions
- ChangeAnnotationsSupportOptions struct {
- // Whether the client groups edits with equal labels into tree nodes,
- // for instance all edits labelled with "Changes in Strings" would
- // be a tree node.
- GroupsOnLabel bool `json:"groupsOnLabel,omitempty"`
- }
-)
-
-// Defines the capabilities provided by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCapabilities
-type ClientCapabilities struct {
- // Workspace specific client capabilities.
- Workspace WorkspaceClientCapabilities `json:"workspace,omitempty"`
- // Text document specific client capabilities.
- TextDocument TextDocumentClientCapabilities `json:"textDocument,omitempty"`
- // Capabilities specific to the notebook document support.
- //
- // @since 3.17.0
- NotebookDocument *NotebookDocumentClientCapabilities `json:"notebookDocument,omitempty"`
- // Window specific client capabilities.
- Window WindowClientCapabilities `json:"window,omitempty"`
- // General client capabilities.
- //
- // @since 3.16.0
- General *GeneralClientCapabilities `json:"general,omitempty"`
- // Experimental client capabilities.
- Experimental interface{} `json:"experimental,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionKindOptions
-type ClientCodeActionKindOptions struct {
- // The code action kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- ValueSet []CodeActionKind `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionLiteralOptions
-type ClientCodeActionLiteralOptions struct {
- // The code action kind is support with the following value
- // set.
- CodeActionKind ClientCodeActionKindOptions `json:"codeActionKind"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeActionResolveOptions
-type ClientCodeActionResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCodeLensResolveOptions
-type ClientCodeLensResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemInsertTextModeOptions
-type ClientCompletionItemInsertTextModeOptions struct {
- ValueSet []InsertTextMode `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptions
-type ClientCompletionItemOptions struct {
- // Client supports snippets as insert text.
- //
- // A snippet can define tab stops and placeholders with `$1`, `$2`
- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to
- // the end of the snippet. Placeholders with equal identifiers are linked,
- // that is typing in one will update others too.
- SnippetSupport bool `json:"snippetSupport,omitempty"`
- // Client supports commit characters on a completion item.
- CommitCharactersSupport bool `json:"commitCharactersSupport,omitempty"`
- // Client supports the following content formats for the documentation
- // property. The order describes the preferred format of the client.
- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"`
- // Client supports the deprecated property on a completion item.
- DeprecatedSupport bool `json:"deprecatedSupport,omitempty"`
- // Client supports the preselect property on a completion item.
- PreselectSupport bool `json:"preselectSupport,omitempty"`
- // Client supports the tag property on a completion item. Clients supporting
- // tags have to handle unknown tags gracefully. Clients especially need to
- // preserve unknown tags when sending a completion item back to the server in
- // a resolve call.
- //
- // @since 3.15.0
- TagSupport *CompletionItemTagOptions `json:"tagSupport,omitempty"`
- // Client support insert replace edit to control different behavior if a
- // completion item is inserted in the text or should replace text.
- //
- // @since 3.16.0
- InsertReplaceSupport bool `json:"insertReplaceSupport,omitempty"`
- // Indicates which properties a client can resolve lazily on a completion
- // item. Before version 3.16.0 only the predefined properties `documentation`
- // and `details` could be resolved lazily.
- //
- // @since 3.16.0
- ResolveSupport *ClientCompletionItemResolveOptions `json:"resolveSupport,omitempty"`
- // The client supports the `insertTextMode` property on
- // a completion item to override the whitespace handling mode
- // as defined by the client (see `insertTextMode`).
- //
- // @since 3.16.0
- InsertTextModeSupport *ClientCompletionItemInsertTextModeOptions `json:"insertTextModeSupport,omitempty"`
- // The client has support for completion item label
- // details (see also `CompletionItemLabelDetails`).
- //
- // @since 3.17.0
- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemOptionsKind
-type ClientCompletionItemOptionsKind struct {
- // The completion item kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- //
- // If this property is not present the client only supports
- // the completion items kinds from `Text` to `Reference` as defined in
- // the initial version of the protocol.
- ValueSet []CompletionItemKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientCompletionItemResolveOptions
-type ClientCompletionItemResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientDiagnosticsTagOptions
-type ClientDiagnosticsTagOptions struct {
- // The tags supported by the client.
- ValueSet []DiagnosticTag `json:"valueSet"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeKindOptions
-type ClientFoldingRangeKindOptions struct {
- // The folding range kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- ValueSet []FoldingRangeKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientFoldingRangeOptions
-type ClientFoldingRangeOptions struct {
- // If set, the client signals that it supports setting collapsedText on
- // folding ranges to display custom labels instead of the default text.
- //
- // @since 3.17.0
- CollapsedText bool `json:"collapsedText,omitempty"`
-}
-
-// Information about the client
-//
-// @since 3.15.0
-// @since 3.18.0 ClientInfo type name added.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInfo
-type ClientInfo struct {
- // The name of the client as defined by the client.
- Name string `json:"name"`
- // The client's version as defined by the client.
- Version string `json:"version,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientInlayHintResolveOptions
-type ClientInlayHintResolveOptions struct {
- // The properties that a client can resolve lazily.
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestFullDelta
-type ClientSemanticTokensRequestFullDelta struct {
- // The client will send the `textDocument/semanticTokens/full/delta` request if
- // the server provides a corresponding handler.
- Delta bool `json:"delta,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSemanticTokensRequestOptions
-type ClientSemanticTokensRequestOptions struct {
- // The client will send the `textDocument/semanticTokens/range` request if
- // the server provides a corresponding handler.
- Range *Or_ClientSemanticTokensRequestOptions_range `json:"range,omitempty"`
- // The client will send the `textDocument/semanticTokens/full` request if
- // the server provides a corresponding handler.
- Full *Or_ClientSemanticTokensRequestOptions_full `json:"full,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientShowMessageActionItemOptions
-type ClientShowMessageActionItemOptions struct {
- // Whether the client supports additional attributes which
- // are preserved and send back to the server in the
- // request's response.
- AdditionalPropertiesSupport bool `json:"additionalPropertiesSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureInformationOptions
-type ClientSignatureInformationOptions struct {
- // Client supports the following content formats for the documentation
- // property. The order describes the preferred format of the client.
- DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"`
- // Client capabilities specific to parameter information.
- ParameterInformation *ClientSignatureParameterInformationOptions `json:"parameterInformation,omitempty"`
- // The client supports the `activeParameter` property on `SignatureInformation`
- // literal.
- //
- // @since 3.16.0
- ActiveParameterSupport bool `json:"activeParameterSupport,omitempty"`
- // The client supports the `activeParameter` property on
- // `SignatureHelp`/`SignatureInformation` being set to `null` to
- // indicate that no parameter should be active.
- //
- // @since 3.18.0
- // @proposed
- NoActiveParameterSupport bool `json:"noActiveParameterSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSignatureParameterInformationOptions
-type ClientSignatureParameterInformationOptions struct {
- // The client supports processing label offsets instead of a
- // simple label string.
- //
- // @since 3.14.0
- LabelOffsetSupport bool `json:"labelOffsetSupport,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolKindOptions
-type ClientSymbolKindOptions struct {
- // The symbol kind values the client supports. When this
- // property exists the client also guarantees that it will
- // handle values outside its set gracefully and falls back
- // to a default value when unknown.
- //
- // If this property is not present the client only supports
- // the symbol kinds from `File` to `Array` as defined in
- // the initial version of the protocol.
- ValueSet []SymbolKind `json:"valueSet,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolResolveOptions
-type ClientSymbolResolveOptions struct {
- // The properties that a client can resolve lazily. Usually
- // `location.range`
- Properties []string `json:"properties"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#clientSymbolTagOptions
-type ClientSymbolTagOptions struct {
- // The tags supported by the client.
- ValueSet []SymbolTag `json:"valueSet"`
-}
-
-// A code action represents a change that can be performed in code, e.g. to fix a problem or
-// to refactor code.
-//
-// A CodeAction must set either `edit` and/or a `command`. If both are supplied, the `edit` is applied first, then the `command` is executed.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeAction
-type CodeAction struct {
- // A short, human-readable, title for this code action.
- Title string `json:"title"`
- // The kind of the code action.
- //
- // Used to filter code actions.
- Kind CodeActionKind `json:"kind,omitempty"`
- // The diagnostics that this code action resolves.
- Diagnostics []Diagnostic `json:"diagnostics,omitempty"`
- // Marks this as a preferred action. Preferred actions are used by the `auto fix` command and can be targeted
- // by keybindings.
- //
- // A quick fix should be marked preferred if it properly addresses the underlying error.
- // A refactoring should be marked preferred if it is the most reasonable choice of actions to take.
- //
- // @since 3.15.0
- IsPreferred bool `json:"isPreferred,omitempty"`
- // Marks that the code action cannot currently be applied.
- //
- // Clients should follow the following guidelines regarding disabled code actions:
- //
- // - Disabled code actions are not shown in automatic [lightbulbs](https://code.visualstudio.com/docs/editor/editingevolved#_code-action)
- // code action menus.
- //
- // - Disabled actions are shown as faded out in the code action menu when the user requests a more specific type
- // of code action, such as refactorings.
- //
- // - If the user has a [keybinding](https://code.visualstudio.com/docs/editor/refactoring#_keybindings-for-code-actions)
- // that auto applies a code action and only disabled code actions are returned, the client should show the user an
- // error message with `reason` in the editor.
- //
- // @since 3.16.0
- Disabled *CodeActionDisabled `json:"disabled,omitempty"`
- // The workspace edit this code action performs.
- Edit *WorkspaceEdit `json:"edit,omitempty"`
- // A command this code action executes. If a code action
- // provides an edit and a command, first the edit is
- // executed and then the command.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a code action between
- // a `textDocument/codeAction` and a `codeAction/resolve` request.
- //
- // @since 3.16.0
- Data *json.RawMessage `json:"data,omitempty"`
-}
-
-// The Client Capabilities of a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionClientCapabilities
-type CodeActionClientCapabilities struct {
- // Whether code action supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client support code action literals of type `CodeAction` as a valid
- // response of the `textDocument/codeAction` request. If the property is not
- // set the request can only return `Command` literals.
- //
- // @since 3.8.0
- CodeActionLiteralSupport ClientCodeActionLiteralOptions `json:"codeActionLiteralSupport,omitempty"`
- // Whether code action supports the `isPreferred` property.
- //
- // @since 3.15.0
- IsPreferredSupport bool `json:"isPreferredSupport,omitempty"`
- // Whether code action supports the `disabled` property.
- //
- // @since 3.16.0
- DisabledSupport bool `json:"disabledSupport,omitempty"`
- // Whether code action supports the `data` property which is
- // preserved between a `textDocument/codeAction` and a
- // `codeAction/resolve` request.
- //
- // @since 3.16.0
- DataSupport bool `json:"dataSupport,omitempty"`
- // Whether the client supports resolving additional code action
- // properties via a separate `codeAction/resolve` request.
- //
- // @since 3.16.0
- ResolveSupport *ClientCodeActionResolveOptions `json:"resolveSupport,omitempty"`
- // Whether the client honors the change annotations in
- // text edits and resource operations returned via the
- // `CodeAction#edit` property by for example presenting
- // the workspace edit in the user interface and asking
- // for confirmation.
- //
- // @since 3.16.0
- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"`
- // Whether the client supports documentation for a class of
- // code actions.
- //
- // @since 3.18.0
- // @proposed
- DocumentationSupport bool `json:"documentationSupport,omitempty"`
-}
-
-// Contains additional diagnostic information about the context in which
-// a {@link CodeActionProvider.provideCodeActions code action} is run.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionContext
-type CodeActionContext struct {
- // An array of diagnostics known on the client side overlapping the range provided to the
- // `textDocument/codeAction` request. They are provided so that the server knows which
- // errors are currently presented to the user for the given range. There is no guarantee
- // that these accurately reflect the error state of the resource. The primary parameter
- // to compute code actions is the provided range.
- Diagnostics []Diagnostic `json:"diagnostics"`
- // Requested kind of actions to return.
- //
- // Actions not of this kind are filtered out by the client before being shown. So servers
- // can omit computing them.
- Only []CodeActionKind `json:"only,omitempty"`
- // The reason why code actions were requested.
- //
- // @since 3.17.0
- TriggerKind *CodeActionTriggerKind `json:"triggerKind,omitempty"`
-}
-
-// Captures why the code action is currently disabled.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionDisabled
-type CodeActionDisabled struct {
- // Human readable description of why the code action is currently disabled.
- //
- // This is displayed in the code actions UI.
- Reason string `json:"reason"`
-}
-
-// A set of predefined code action kinds
-type CodeActionKind string
-
-// Documentation for a class of code actions.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionKindDocumentation
-type CodeActionKindDocumentation struct {
- // The kind of the code action being documented.
- //
- // If the kind is generic, such as `CodeActionKind.Refactor`, the documentation will be shown whenever any
- // refactorings are returned. If the kind if more specific, such as `CodeActionKind.RefactorExtract`, the
- // documentation will only be shown when extract refactoring code actions are returned.
- Kind CodeActionKind `json:"kind"`
- // Command that is ued to display the documentation to the user.
- //
- // The title of this documentation code action is taken from {@linkcode Command.title}
- Command Command `json:"command"`
-}
-
-// Provider options for a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionOptions
-type CodeActionOptions struct {
- // CodeActionKinds that this server may return.
- //
- // The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server
- // may list out every specific kind they provide.
- CodeActionKinds []CodeActionKind `json:"codeActionKinds,omitempty"`
- // Static documentation for a class of code actions.
- //
- // Documentation from the provider should be shown in the code actions menu if either:
- //
- //
- // - Code actions of `kind` are requested by the editor. In this case, the editor will show the documentation that
- // most closely matches the requested code action kind. For example, if a provider has documentation for
- // both `Refactor` and `RefactorExtract`, when the user requests code actions for `RefactorExtract`,
- // the editor will use the documentation for `RefactorExtract` instead of the documentation for `Refactor`.
- //
- //
- // - Any code actions of `kind` are returned by the provider.
- //
- // At most one documentation entry should be shown per provider.
- //
- // @since 3.18.0
- // @proposed
- Documentation []CodeActionKindDocumentation `json:"documentation,omitempty"`
- // The server provides support to resolve additional
- // information for a code action.
- //
- // @since 3.16.0
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionParams
-type CodeActionParams struct {
- // The document in which the command was invoked.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range for which the command was invoked.
- Range Range `json:"range"`
- // Context carrying additional information.
- Context CodeActionContext `json:"context"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CodeActionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeActionRegistrationOptions
-type CodeActionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CodeActionOptions
-}
-
-// The reason why code actions were requested.
-//
-// @since 3.17.0
-type CodeActionTriggerKind uint32
-
-// Structure to capture a description for an error code.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeDescription
-type CodeDescription struct {
- // An URI to open with more information about the diagnostic error.
- Href URI `json:"href"`
-}
-
-// A code lens represents a {@link Command command} that should be shown along with
-// source text, like the number of references, a way to run tests, etc.
-//
-// A code lens is _unresolved_ when no command is associated to it. For performance
-// reasons the creation of a code lens and resolving should be done in two stages.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLens
-type CodeLens struct {
- // The range in which this code lens is valid. Should only span a single line.
- Range Range `json:"range"`
- // The command this code lens represents.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a code lens item between
- // a {@link CodeLensRequest} and a {@link CodeLensResolveRequest}
- Data interface{} `json:"data,omitempty"`
-}
-
-// The client capabilities of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensClientCapabilities
-type CodeLensClientCapabilities struct {
- // Whether code lens supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports resolving additional code lens
- // properties via a separate `codeLens/resolve` request.
- //
- // @since 3.18.0
- ResolveSupport *ClientCodeLensResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Code Lens provider options of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensOptions
-type CodeLensOptions struct {
- // Code lens has a resolve provider as well.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensParams
-type CodeLensParams struct {
- // The document to request code lens for.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CodeLensRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensRegistrationOptions
-type CodeLensRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CodeLensOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#codeLensWorkspaceClientCapabilities
-type CodeLensWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // code lenses currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detect a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Represents a color in RGBA space.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#color
-type Color struct {
- // The red component of this color in the range [0-1].
- Red float64 `json:"red"`
- // The green component of this color in the range [0-1].
- Green float64 `json:"green"`
- // The blue component of this color in the range [0-1].
- Blue float64 `json:"blue"`
- // The alpha component of this color in the range [0-1].
- Alpha float64 `json:"alpha"`
-}
-
-// Represents a color range from a document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorInformation
-type ColorInformation struct {
- // The range in the document where this color appears.
- Range Range `json:"range"`
- // The actual color value for this color range.
- Color Color `json:"color"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentation
-type ColorPresentation struct {
- // The label of this color presentation. It will be shown on the color
- // picker header. By default this is also the text that is inserted when selecting
- // this color presentation.
- Label string `json:"label"`
- // An {@link TextEdit edit} which is applied to a document when selecting
- // this presentation for the color. When `falsy` the {@link ColorPresentation.label label}
- // is used.
- TextEdit *TextEdit `json:"textEdit,omitempty"`
- // An optional array of additional {@link TextEdit text edits} that are applied when
- // selecting this color presentation. Edits must not overlap with the main {@link ColorPresentation.textEdit edit} nor with themselves.
- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"`
-}
-
-// Parameters for a {@link ColorPresentationRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#colorPresentationParams
-type ColorPresentationParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The color to request presentations for.
- Color Color `json:"color"`
- // The range where the color would be inserted. Serves as a context.
- Range Range `json:"range"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Represents a reference to a command. Provides a title which
-// will be used to represent a command in the UI and, optionally,
-// an array of arguments which will be passed to the command handler
-// function when invoked.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#command
-type Command struct {
- // Title of the command, like `save`.
- Title string `json:"title"`
- // An optional tooltip.
- //
- // @since 3.18.0
- // @proposed
- Tooltip string `json:"tooltip,omitempty"`
- // The identifier of the actual command handler.
- Command string `json:"command"`
- // Arguments that the command handler should be
- // invoked with.
- Arguments []json.RawMessage `json:"arguments,omitempty"`
-}
-
-// Completion client capabilities
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionClientCapabilities
-type CompletionClientCapabilities struct {
- // Whether completion supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports the following `CompletionItem` specific
- // capabilities.
- CompletionItem ClientCompletionItemOptions `json:"completionItem,omitempty"`
- CompletionItemKind *ClientCompletionItemOptionsKind `json:"completionItemKind,omitempty"`
- // Defines how the client handles whitespace and indentation
- // when accepting a completion item that uses multi line
- // text in either `insertText` or `textEdit`.
- //
- // @since 3.17.0
- InsertTextMode InsertTextMode `json:"insertTextMode,omitempty"`
- // The client supports to send additional context information for a
- // `textDocument/completion` request.
- ContextSupport bool `json:"contextSupport,omitempty"`
- // The client supports the following `CompletionList` specific
- // capabilities.
- //
- // @since 3.17.0
- CompletionList *CompletionListCapabilities `json:"completionList,omitempty"`
-}
-
-// Contains additional information about the context in which a completion request is triggered.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionContext
-type CompletionContext struct {
- // How the completion was triggered.
- TriggerKind CompletionTriggerKind `json:"triggerKind"`
- // The trigger character (a single character) that has trigger code complete.
- // Is undefined if `triggerKind !== CompletionTriggerKind.TriggerCharacter`
- TriggerCharacter string `json:"triggerCharacter,omitempty"`
-}
-
-// A completion item represents a text snippet that is
-// proposed to complete text that is being typed.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItem
-type CompletionItem struct {
- // The label of this completion item.
- //
- // The label property is also by default the text that
- // is inserted when selecting this completion.
- //
- // If label details are provided the label itself should
- // be an unqualified name of the completion item.
- Label string `json:"label"`
- // Additional details for the label
- //
- // @since 3.17.0
- LabelDetails *CompletionItemLabelDetails `json:"labelDetails,omitempty"`
- // The kind of this completion item. Based of the kind
- // an icon is chosen by the editor.
- Kind CompletionItemKind `json:"kind,omitempty"`
- // Tags for this completion item.
- //
- // @since 3.15.0
- Tags []CompletionItemTag `json:"tags,omitempty"`
- // A human-readable string with additional information
- // about this item, like type or symbol information.
- Detail string `json:"detail,omitempty"`
- // A human-readable string that represents a doc-comment.
- Documentation *Or_CompletionItem_documentation `json:"documentation,omitempty"`
- // Indicates if this item is deprecated.
- // @deprecated Use `tags` instead.
- Deprecated bool `json:"deprecated,omitempty"`
- // Select this item when showing.
- //
- // *Note* that only one completion item can be selected and that the
- // tool / client decides which item that is. The rule is that the *first*
- // item of those that match best is selected.
- Preselect bool `json:"preselect,omitempty"`
- // A string that should be used when comparing this item
- // with other items. When `falsy` the {@link CompletionItem.label label}
- // is used.
- SortText string `json:"sortText,omitempty"`
- // A string that should be used when filtering a set of
- // completion items. When `falsy` the {@link CompletionItem.label label}
- // is used.
- FilterText string `json:"filterText,omitempty"`
- // A string that should be inserted into a document when selecting
- // this completion. When `falsy` the {@link CompletionItem.label label}
- // is used.
- //
- // The `insertText` is subject to interpretation by the client side.
- // Some tools might not take the string literally. For example
- // VS Code when code complete is requested in this example
- // `con` and a completion item with an `insertText` of
- // `console` is provided it will only insert `sole`. Therefore it is
- // recommended to use `textEdit` instead since it avoids additional client
- // side interpretation.
- InsertText string `json:"insertText,omitempty"`
- // The format of the insert text. The format applies to both the
- // `insertText` property and the `newText` property of a provided
- // `textEdit`. If omitted defaults to `InsertTextFormat.PlainText`.
- //
- // Please note that the insertTextFormat doesn't apply to
- // `additionalTextEdits`.
- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"`
- // How whitespace and indentation is handled during completion
- // item insertion. If not provided the clients default value depends on
- // the `textDocument.completion.insertTextMode` client capability.
- //
- // @since 3.16.0
- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"`
- // An {@link TextEdit edit} which is applied to a document when selecting
- // this completion. When an edit is provided the value of
- // {@link CompletionItem.insertText insertText} is ignored.
- //
- // Most editors support two different operations when accepting a completion
- // item. One is to insert a completion text and the other is to replace an
- // existing text with a completion text. Since this can usually not be
- // predetermined by a server it can report both ranges. Clients need to
- // signal support for `InsertReplaceEdits` via the
- // `textDocument.completion.insertReplaceSupport` client capability
- // property.
- //
- // *Note 1:* The text edit's range as well as both ranges from an insert
- // replace edit must be a [single line] and they must contain the position
- // at which completion has been requested.
- // *Note 2:* If an `InsertReplaceEdit` is returned the edit's insert range
- // must be a prefix of the edit's replace range, that means it must be
- // contained and starting at the same position.
- //
- // @since 3.16.0 additional type `InsertReplaceEdit`
- TextEdit *Or_CompletionItem_textEdit `json:"textEdit,omitempty"`
- // The edit text used if the completion item is part of a CompletionList and
- // CompletionList defines an item default for the text edit range.
- //
- // Clients will only honor this property if they opt into completion list
- // item defaults using the capability `completionList.itemDefaults`.
- //
- // If not provided and a list's default range is provided the label
- // property is used as a text.
- //
- // @since 3.17.0
- TextEditText string `json:"textEditText,omitempty"`
- // An optional array of additional {@link TextEdit text edits} that are applied when
- // selecting this completion. Edits must not overlap (including the same insert position)
- // with the main {@link CompletionItem.textEdit edit} nor with themselves.
- //
- // Additional text edits should be used to change text unrelated to the current cursor position
- // (for example adding an import statement at the top of the file if the completion item will
- // insert an unqualified type).
- AdditionalTextEdits []TextEdit `json:"additionalTextEdits,omitempty"`
- // An optional set of characters that when pressed while this completion is active will accept it first and
- // then type that character. *Note* that all commit characters should have `length=1` and that superfluous
- // characters will be ignored.
- CommitCharacters []string `json:"commitCharacters,omitempty"`
- // An optional {@link Command command} that is executed *after* inserting this completion. *Note* that
- // additional modifications to the current document should be described with the
- // {@link CompletionItem.additionalTextEdits additionalTextEdits}-property.
- Command *Command `json:"command,omitempty"`
- // A data entry field that is preserved on a completion item between a
- // {@link CompletionRequest} and a {@link CompletionResolveRequest}.
- Data interface{} `json:"data,omitempty"`
-}
-
-// In many cases the items of an actual completion result share the same
-// value for properties like `commitCharacters` or the range of a text
-// edit. A completion list can therefore define item defaults which will
-// be used if a completion item itself doesn't specify the value.
-//
-// If a completion list specifies a default value and a completion item
-// also specifies a corresponding value the one from the item is used.
-//
-// Servers are only allowed to return default values if the client
-// signals support for this via the `completionList.itemDefaults`
-// capability.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemDefaults
-type CompletionItemDefaults struct {
- // A default commit character set.
- //
- // @since 3.17.0
- CommitCharacters []string `json:"commitCharacters,omitempty"`
- // A default edit range.
- //
- // @since 3.17.0
- EditRange *Or_CompletionItemDefaults_editRange `json:"editRange,omitempty"`
- // A default insert text format.
- //
- // @since 3.17.0
- InsertTextFormat *InsertTextFormat `json:"insertTextFormat,omitempty"`
- // A default insert text mode.
- //
- // @since 3.17.0
- InsertTextMode *InsertTextMode `json:"insertTextMode,omitempty"`
- // A default data value.
- //
- // @since 3.17.0
- Data interface{} `json:"data,omitempty"`
-}
-
-// The kind of a completion entry.
-type CompletionItemKind uint32
-
-// Additional details for a completion item label.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemLabelDetails
-type CompletionItemLabelDetails struct {
- // An optional string which is rendered less prominently directly after {@link CompletionItem.label label},
- // without any spacing. Should be used for function signatures and type annotations.
- Detail string `json:"detail,omitempty"`
- // An optional string which is rendered less prominently after {@link CompletionItem.detail}. Should be used
- // for fully qualified names and file paths.
- Description string `json:"description,omitempty"`
-}
-
-// Completion item tags are extra annotations that tweak the rendering of a completion
-// item.
-//
-// @since 3.15.0
-type CompletionItemTag uint32
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionItemTagOptions
-type CompletionItemTagOptions struct {
- // The tags supported by the client.
- ValueSet []CompletionItemTag `json:"valueSet"`
-}
-
-// Represents a collection of {@link CompletionItem completion items} to be presented
-// in the editor.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionList
-type CompletionList struct {
- // This list it not complete. Further typing results in recomputing this list.
- //
- // Recomputed lists have all their items replaced (not appended) in the
- // incomplete completion sessions.
- IsIncomplete bool `json:"isIncomplete"`
- // In many cases the items of an actual completion result share the same
- // value for properties like `commitCharacters` or the range of a text
- // edit. A completion list can therefore define item defaults which will
- // be used if a completion item itself doesn't specify the value.
- //
- // If a completion list specifies a default value and a completion item
- // also specifies a corresponding value the one from the item is used.
- //
- // Servers are only allowed to return default values if the client
- // signals support for this via the `completionList.itemDefaults`
- // capability.
- //
- // @since 3.17.0
- ItemDefaults *CompletionItemDefaults `json:"itemDefaults,omitempty"`
- // The completion items.
- Items []CompletionItem `json:"items"`
-}
-
-// The client supports the following `CompletionList` specific
-// capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionListCapabilities
-type CompletionListCapabilities struct {
- // The client supports the following itemDefaults on
- // a completion list.
- //
- // The value lists the supported property names of the
- // `CompletionList.itemDefaults` object. If omitted
- // no properties are supported.
- //
- // @since 3.17.0
- ItemDefaults []string `json:"itemDefaults,omitempty"`
-}
-
-// Completion options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionOptions
-type CompletionOptions struct {
- // Most tools trigger completion request automatically without explicitly requesting
- // it using a keyboard shortcut (e.g. Ctrl+Space). Typically they do so when the user
- // starts to type an identifier. For example if the user types `c` in a JavaScript file
- // code complete will automatically pop up present `console` besides others as a
- // completion item. Characters that make up identifiers don't need to be listed here.
- //
- // If code complete should automatically be trigger on characters not being valid inside
- // an identifier (for example `.` in JavaScript) list them in `triggerCharacters`.
- TriggerCharacters []string `json:"triggerCharacters,omitempty"`
- // The list of all possible characters that commit a completion. This field can be used
- // if clients don't support individual commit characters per completion item. See
- // `ClientCapabilities.textDocument.completion.completionItem.commitCharactersSupport`
- //
- // If a server provides both `allCommitCharacters` and commit characters on an individual
- // completion item the ones on the completion item win.
- //
- // @since 3.2.0
- AllCommitCharacters []string `json:"allCommitCharacters,omitempty"`
- // The server provides support to resolve additional
- // information for a completion item.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- // The server supports the following `CompletionItem` specific
- // capabilities.
- //
- // @since 3.17.0
- CompletionItem *ServerCompletionItemOptions `json:"completionItem,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Completion parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionParams
-type CompletionParams struct {
- // The completion context. This is only available it the client specifies
- // to send this using the client capability `textDocument.completion.contextSupport === true`
- Context CompletionContext `json:"context,omitempty"`
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link CompletionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#completionRegistrationOptions
-type CompletionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- CompletionOptions
-}
-
-// How a completion was triggered
-type CompletionTriggerKind uint32
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationItem
-type ConfigurationItem struct {
- // The scope to get the configuration section for.
- ScopeURI *URI `json:"scopeUri,omitempty"`
- // The configuration section asked for.
- Section string `json:"section,omitempty"`
-}
-
-// The parameters of a configuration request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams
-type ConfigurationParams struct {
- Items []ConfigurationItem `json:"items"`
-}
-
-// Create file operation.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFile
-type CreateFile struct {
- // A create
- Kind string `json:"kind"`
- // The resource to create.
- URI DocumentURI `json:"uri"`
- // Additional options
- Options *CreateFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Options to create a file.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFileOptions
-type CreateFileOptions struct {
- // Overwrite existing file. Overwrite wins over `ignoreIfExists`
- Overwrite bool `json:"overwrite,omitempty"`
- // Ignore if exists.
- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated creation of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#createFilesParams
-type CreateFilesParams struct {
- // An array of all files/folders created in this operation.
- Files []FileCreate `json:"files"`
-}
-
-// The declaration of a symbol representation as one or many {@link Location locations}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declaration
-type (
- Declaration = Or_Declaration // (alias)
- // @since 3.14.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationClientCapabilities
- DeclarationClientCapabilities struct {
- // Whether declaration supports dynamic registration. If this is set to `true`
- // the client supports the new `DeclarationRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of declaration links.
- LinkSupport bool `json:"linkSupport,omitempty"`
- }
-)
-
-// Information about where a symbol is declared.
-//
-// Provides additional metadata over normal {@link Location location} declarations, including the range of
-// the declaring symbol.
-//
-// Servers should prefer returning `DeclarationLink` over `Declaration` if supported
-// by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationLink
-type (
- DeclarationLink = LocationLink // (alias)
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationOptions
- DeclarationOptions struct {
- WorkDoneProgressOptions
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationParams
-type DeclarationParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#declarationRegistrationOptions
-type DeclarationRegistrationOptions struct {
- DeclarationOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// The definition of a symbol represented as one or many {@link Location locations}.
-// For most programming languages there is only one location at which a symbol is
-// defined.
-//
-// Servers should prefer returning `DefinitionLink` over `Definition` if supported
-// by the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definition
-type (
- Definition = Or_Definition // (alias)
- // Client Capabilities for a {@link DefinitionRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionClientCapabilities
- DefinitionClientCapabilities struct {
- // Whether definition supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // @since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
- }
-)
-
-// Information about where a symbol is defined.
-//
-// Provides additional metadata over normal {@link Location location} definitions, including the range of
-// the defining symbol
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionLink
-type (
- DefinitionLink = LocationLink // (alias)
- // Server Capabilities for a {@link DefinitionRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionOptions
- DefinitionOptions struct {
- WorkDoneProgressOptions
- }
-)
-
-// Parameters for a {@link DefinitionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionParams
-type DefinitionParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DefinitionRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#definitionRegistrationOptions
-type DefinitionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DefinitionOptions
-}
-
-// Delete file operation
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFile
-type DeleteFile struct {
- // A delete
- Kind string `json:"kind"`
- // The file to delete.
- URI DocumentURI `json:"uri"`
- // Delete options.
- Options *DeleteFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Delete file options
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFileOptions
-type DeleteFileOptions struct {
- // Delete the content recursively if a folder is denoted.
- Recursive bool `json:"recursive,omitempty"`
- // Ignore the operation if the file doesn't exist.
- IgnoreIfNotExists bool `json:"ignoreIfNotExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated deletes of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#deleteFilesParams
-type DeleteFilesParams struct {
- // An array of all files/folders deleted in this operation.
- Files []FileDelete `json:"files"`
-}
-
-// Represents a diagnostic, such as a compiler error or warning. Diagnostic objects
-// are only valid in the scope of a resource.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnostic
-type Diagnostic struct {
- // The range at which the message applies
- Range Range `json:"range"`
- // The diagnostic's severity. To avoid interpretation mismatches when a
- // server is used with different clients it is highly recommended that servers
- // always provide a severity value.
- Severity DiagnosticSeverity `json:"severity,omitempty"`
- // The diagnostic's code, which usually appear in the user interface.
- Code interface{} `json:"code,omitempty"`
- // An optional property to describe the error code.
- // Requires the code field (above) to be present/not null.
- //
- // @since 3.16.0
- CodeDescription *CodeDescription `json:"codeDescription,omitempty"`
- // A human-readable string describing the source of this
- // diagnostic, e.g. 'typescript' or 'super lint'. It usually
- // appears in the user interface.
- Source string `json:"source,omitempty"`
- // The diagnostic's message. It usually appears in the user interface
- Message string `json:"message"`
- // Additional metadata about the diagnostic.
- //
- // @since 3.15.0
- Tags []DiagnosticTag `json:"tags,omitempty"`
- // An array of related diagnostic information, e.g. when symbol-names within
- // a scope collide all definitions can be marked via this property.
- RelatedInformation []DiagnosticRelatedInformation `json:"relatedInformation,omitempty"`
- // A data entry field that is preserved between a `textDocument/publishDiagnostics`
- // notification and `textDocument/codeAction` request.
- //
- // @since 3.16.0
- Data *json.RawMessage `json:"data,omitempty"`
-}
-
-// Client capabilities specific to diagnostic pull requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticClientCapabilities
-type DiagnosticClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the clients supports related documents for document diagnostic pulls.
- RelatedDocumentSupport bool `json:"relatedDocumentSupport,omitempty"`
- DiagnosticsCapabilities
-}
-
-// Diagnostic options.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticOptions
-type DiagnosticOptions struct {
- // An optional identifier under which the diagnostics are
- // managed by the client.
- Identifier string `json:"identifier,omitempty"`
- // Whether the language has inter file dependencies meaning that
- // editing code in one file can result in a different diagnostic
- // set in another file. Inter file dependencies are common for
- // most programming languages and typically uncommon for linters.
- InterFileDependencies bool `json:"interFileDependencies"`
- // The server provides support for workspace diagnostics as well.
- WorkspaceDiagnostics bool `json:"workspaceDiagnostics"`
- WorkDoneProgressOptions
-}
-
-// Diagnostic registration options.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRegistrationOptions
-type DiagnosticRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DiagnosticOptions
- StaticRegistrationOptions
-}
-
-// Represents a related message and source code location for a diagnostic. This should be
-// used to point to code locations that cause or related to a diagnostics, e.g when duplicating
-// a symbol in a scope.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticRelatedInformation
-type DiagnosticRelatedInformation struct {
- // The location of this related diagnostic information.
- Location Location `json:"location"`
- // The message of this related diagnostic information.
- Message string `json:"message"`
-}
-
-// Cancellation data returned from a diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticServerCancellationData
-type DiagnosticServerCancellationData struct {
- RetriggerRequest bool `json:"retriggerRequest"`
-}
-
-// The diagnostic's severity.
-type DiagnosticSeverity uint32
-
-// The diagnostic tags.
-//
-// @since 3.15.0
-type DiagnosticTag uint32
-
-// Workspace client capabilities specific to diagnostic pull requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticWorkspaceClientCapabilities
-type DiagnosticWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // pulled diagnostics currently shown. It should be used with absolute care and
- // is useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// General diagnostics capabilities for pull and push model.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#diagnosticsCapabilities
-type DiagnosticsCapabilities struct {
- // Whether the clients accepts diagnostics with related information.
- RelatedInformation bool `json:"relatedInformation,omitempty"`
- // Client supports the tag property to provide meta data about a diagnostic.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.15.0
- TagSupport *ClientDiagnosticsTagOptions `json:"tagSupport,omitempty"`
- // Client supports a codeDescription property
- //
- // @since 3.16.0
- CodeDescriptionSupport bool `json:"codeDescriptionSupport,omitempty"`
- // Whether code action supports the `data` property which is
- // preserved between a `textDocument/publishDiagnostics` and
- // `textDocument/codeAction` request.
- //
- // @since 3.16.0
- DataSupport bool `json:"dataSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationClientCapabilities
-type DidChangeConfigurationClientCapabilities struct {
- // Did change configuration notification supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The parameters of a change configuration notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationParams
-type DidChangeConfigurationParams struct {
- // The actual changed settings
- Settings interface{} `json:"settings"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeConfigurationRegistrationOptions
-type DidChangeConfigurationRegistrationOptions struct {
- Section *Or_DidChangeConfigurationRegistrationOptions_section `json:"section,omitempty"`
-}
-
-// The params sent in a change notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeNotebookDocumentParams
-type DidChangeNotebookDocumentParams struct {
- // The notebook document that did change. The version number points
- // to the version after all provided changes have been applied. If
- // only the text document content of a cell changes the notebook version
- // doesn't necessarily have to change.
- NotebookDocument VersionedNotebookDocumentIdentifier `json:"notebookDocument"`
- // The actual changes to the notebook document.
- //
- // The changes describe single state changes to the notebook document.
- // So if there are two changes c1 (at array index 0) and c2 (at array
- // index 1) for a notebook in state S then c1 moves the notebook from
- // S to S' and c2 from S' to S''. So c1 is computed on the state S and
- // c2 is computed on the state S'.
- //
- // To mirror the content of a notebook using change events use the following approach:
- //
- // - start with the same initial content
- // - apply the 'notebookDocument/didChange' notifications in the order you receive them.
- // - apply the `NotebookChangeEvent`s in a single notification in the order
- // you receive them.
- Change NotebookDocumentChangeEvent `json:"change"`
-}
-
-// The change text document notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeTextDocumentParams
-type DidChangeTextDocumentParams struct {
- // The document that did change. The version number points
- // to the version after all provided content changes have
- // been applied.
- TextDocument VersionedTextDocumentIdentifier `json:"textDocument"`
- // The actual content changes. The content changes describe single state changes
- // to the document. So if there are two content changes c1 (at array index 0) and
- // c2 (at array index 1) for a document in state S then c1 moves the document from
- // S to S' and c2 from S' to S''. So c1 is computed on the state S and c2 is computed
- // on the state S'.
- //
- // To mirror the content of a document using change events use the following approach:
- //
- // - start with the same initial content
- // - apply the 'textDocument/didChange' notifications in the order you receive them.
- // - apply the `TextDocumentContentChangeEvent`s in a single notification in the order
- // you receive them.
- ContentChanges []TextDocumentContentChangeEvent `json:"contentChanges"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesClientCapabilities
-type DidChangeWatchedFilesClientCapabilities struct {
- // Did change watched files notification supports dynamic registration. Please note
- // that the current protocol doesn't support static configuration for file changes
- // from the server side.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client has support for {@link RelativePattern relative pattern}
- // or not.
- //
- // @since 3.17.0
- RelativePatternSupport bool `json:"relativePatternSupport,omitempty"`
-}
-
-// The watched files change notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesParams
-type DidChangeWatchedFilesParams struct {
- // The actual file events.
- Changes []FileEvent `json:"changes"`
-}
-
-// Describe options to be used when registered for text document change events.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWatchedFilesRegistrationOptions
-type DidChangeWatchedFilesRegistrationOptions struct {
- // The watchers to register.
- Watchers []FileSystemWatcher `json:"watchers"`
-}
-
-// The parameters of a `workspace/didChangeWorkspaceFolders` notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didChangeWorkspaceFoldersParams
-type DidChangeWorkspaceFoldersParams struct {
- // The actual workspace folder change event.
- Event WorkspaceFoldersChangeEvent `json:"event"`
-}
-
-// The params sent in a close notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseNotebookDocumentParams
-type DidCloseNotebookDocumentParams struct {
- // The notebook document that got closed.
- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
- // The text documents that represent the content
- // of a notebook cell that got closed.
- CellTextDocuments []TextDocumentIdentifier `json:"cellTextDocuments"`
-}
-
-// The parameters sent in a close text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didCloseTextDocumentParams
-type DidCloseTextDocumentParams struct {
- // The document that was closed.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
-}
-
-// The params sent in an open notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenNotebookDocumentParams
-type DidOpenNotebookDocumentParams struct {
- // The notebook document that got opened.
- NotebookDocument NotebookDocument `json:"notebookDocument"`
- // The text documents that represent the content
- // of a notebook cell.
- CellTextDocuments []TextDocumentItem `json:"cellTextDocuments"`
-}
-
-// The parameters sent in an open text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didOpenTextDocumentParams
-type DidOpenTextDocumentParams struct {
- // The document that was opened.
- TextDocument TextDocumentItem `json:"textDocument"`
-}
-
-// The params sent in a save notebook document notification.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveNotebookDocumentParams
-type DidSaveNotebookDocumentParams struct {
- // The notebook document that got saved.
- NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"`
-}
-
-// The parameters sent in a save text document notification
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#didSaveTextDocumentParams
-type DidSaveTextDocumentParams struct {
- // The document that was saved.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // Optional the content when saved. Depends on the includeText value
- // when the save notification was requested.
- Text *string `json:"text,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorClientCapabilities
-type DocumentColorClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `DocumentColorRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorOptions
-type DocumentColorOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentColorRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorParams
-type DocumentColorParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentColorRegistrationOptions
-type DocumentColorRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentColorOptions
- StaticRegistrationOptions
-}
-
-// Parameters of the document diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticParams
-type DocumentDiagnosticParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The additional identifier provided during registration.
- Identifier string `json:"identifier,omitempty"`
- // The result id of a previous response if provided.
- PreviousResultID string `json:"previousResultId,omitempty"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The result of a document diagnostic pull request. A report can
-// either be a full report containing all diagnostics for the
-// requested document or an unchanged report indicating that nothing
-// has changed in terms of diagnostics in comparison to the last
-// pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReport
-type (
- DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias)
- // The document diagnostic report kinds.
- //
- // @since 3.17.0
- DocumentDiagnosticReportKind string
-)
-
-// A partial result for a document diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReportPartialResult
-type DocumentDiagnosticReportPartialResult struct {
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments"`
-}
-
-// A document filter describes a top level text document or
-// a notebook cell document.
-//
-// @since 3.17.0 - proposed support for NotebookCellTextDocumentFilter.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFilter
-type (
- DocumentFilter = Or_DocumentFilter // (alias)
- // Client capabilities of a {@link DocumentFormattingRequest}.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingClientCapabilities
- DocumentFormattingClientCapabilities struct {
- // Whether formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- }
-)
-
-// Provider options for a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingOptions
-type DocumentFormattingOptions struct {
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingParams
-type DocumentFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The format options.
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link DocumentFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentFormattingRegistrationOptions
-type DocumentFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentFormattingOptions
-}
-
-// A document highlight is a range inside a text document which deserves
-// special attention. Usually a document highlight is visualized by changing
-// the background color of its range.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlight
-type DocumentHighlight struct {
- // The range this highlight applies to.
- Range Range `json:"range"`
- // The highlight kind, default is {@link DocumentHighlightKind.Text text}.
- Kind DocumentHighlightKind `json:"kind,omitempty"`
-}
-
-// Client Capabilities for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightClientCapabilities
-type DocumentHighlightClientCapabilities struct {
- // Whether document highlight supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// A document highlight kind.
-type DocumentHighlightKind uint32
-
-// Provider options for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightOptions
-type DocumentHighlightOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightParams
-type DocumentHighlightParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentHighlightRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentHighlightRegistrationOptions
-type DocumentHighlightRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentHighlightOptions
-}
-
-// A document link is a range in a text document that links to an internal or external resource, like another
-// text document or a web site.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLink
-type DocumentLink struct {
- // The range this link applies to.
- Range Range `json:"range"`
- // The uri this link points to. If missing a resolve request is sent later.
- Target *URI `json:"target,omitempty"`
- // The tooltip text when you hover over this link.
- //
- // If a tooltip is provided, is will be displayed in a string that includes instructions on how to
- // trigger the link, such as `{0} (ctrl + click)`. The specific instructions vary depending on OS,
- // user settings, and localization.
- //
- // @since 3.15.0
- Tooltip string `json:"tooltip,omitempty"`
- // A data entry field that is preserved on a document link between a
- // DocumentLinkRequest and a DocumentLinkResolveRequest.
- Data interface{} `json:"data,omitempty"`
-}
-
-// The client capabilities of a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkClientCapabilities
-type DocumentLinkClientCapabilities struct {
- // Whether document link supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports the `tooltip` property on `DocumentLink`.
- //
- // @since 3.15.0
- TooltipSupport bool `json:"tooltipSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkOptions
-type DocumentLinkOptions struct {
- // Document links have a resolve provider as well.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkParams
-type DocumentLinkParams struct {
- // The document to provide document links for.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentLinkRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentLinkRegistrationOptions
-type DocumentLinkRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentLinkOptions
-}
-
-// Client capabilities of a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingClientCapabilities
-type DocumentOnTypeFormattingClientCapabilities struct {
- // Whether on type formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Provider options for a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingOptions
-type DocumentOnTypeFormattingOptions struct {
- // A character on which formatting should be triggered, like `{`.
- FirstTriggerCharacter string `json:"firstTriggerCharacter"`
- // More trigger characters.
- MoreTriggerCharacter []string `json:"moreTriggerCharacter,omitempty"`
-}
-
-// The parameters of a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingParams
-type DocumentOnTypeFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position around which the on type formatting should happen.
- // This is not necessarily the exact position where the character denoted
- // by the property `ch` got typed.
- Position Position `json:"position"`
- // The character that has been typed that triggered the formatting
- // on type request. That is not necessarily the last character that
- // got inserted into the document since the client could auto insert
- // characters as well (e.g. like automatic brace completion).
- Ch string `json:"ch"`
- // The formatting options.
- Options FormattingOptions `json:"options"`
-}
-
-// Registration options for a {@link DocumentOnTypeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentOnTypeFormattingRegistrationOptions
-type DocumentOnTypeFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentOnTypeFormattingOptions
-}
-
-// Client capabilities of a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingClientCapabilities
-type DocumentRangeFormattingClientCapabilities struct {
- // Whether range formatting supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Whether the client supports formatting multiple ranges at once.
- //
- // @since 3.18.0
- // @proposed
- RangesSupport bool `json:"rangesSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingOptions
-type DocumentRangeFormattingOptions struct {
- // Whether the server supports formatting multiple ranges at once.
- //
- // @since 3.18.0
- // @proposed
- RangesSupport bool `json:"rangesSupport,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingParams
-type DocumentRangeFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range to format
- Range Range `json:"range"`
- // The format options
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link DocumentRangeFormattingRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangeFormattingRegistrationOptions
-type DocumentRangeFormattingRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentRangeFormattingOptions
-}
-
-// The parameters of a {@link DocumentRangesFormattingRequest}.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentRangesFormattingParams
-type DocumentRangesFormattingParams struct {
- // The document to format.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The ranges to format
- Ranges []Range `json:"ranges"`
- // The format options
- Options FormattingOptions `json:"options"`
- WorkDoneProgressParams
-}
-
-// A document selector is the combination of one or many document filters.
-//
-// @sample `let sel:DocumentSelector = [{ language: 'typescript' }, { language: 'json', pattern: '**∕tsconfig.json' }]`;
-//
-// The use of a string as a document filter is deprecated @since 3.16.0.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSelector
-type (
- DocumentSelector = []DocumentFilter // (alias)
- // Represents programming constructs like variables, classes, interfaces etc.
- // that appear in a document. Document symbols can be hierarchical and they
- // have two ranges: one that encloses its definition and one that points to
- // its most interesting range, e.g. the range of an identifier.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbol
- DocumentSymbol struct {
- // The name of this symbol. Will be displayed in the user interface and therefore must not be
- // an empty string or a string only consisting of white spaces.
- Name string `json:"name"`
- // More detail for this symbol, e.g the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this document symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // Indicates if this symbol is deprecated.
- //
- // @deprecated Use tags instead
- Deprecated bool `json:"deprecated,omitempty"`
- // The range enclosing this symbol not including leading/trailing whitespace but everything else
- // like comments. This information is typically used to determine if the clients cursor is
- // inside the symbol to reveal in the symbol in the UI.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being picked, e.g the name of a function.
- // Must be contained by the `range`.
- SelectionRange Range `json:"selectionRange"`
- // Children of this symbol, e.g. properties of a class.
- Children []DocumentSymbol `json:"children,omitempty"`
- }
-)
-
-// Client Capabilities for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolClientCapabilities
-type DocumentSymbolClientCapabilities struct {
- // Whether document symbol supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Specific capabilities for the `SymbolKind` in the
- // `textDocument/documentSymbol` request.
- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"`
- // The client supports hierarchical document symbols.
- HierarchicalDocumentSymbolSupport bool `json:"hierarchicalDocumentSymbolSupport,omitempty"`
- // The client supports tags on `SymbolInformation`. Tags are supported on
- // `DocumentSymbol` if `hierarchicalDocumentSymbolSupport` is set to true.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.16.0
- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"`
- // The client supports an additional label presented in the UI when
- // registering a document symbol provider.
- //
- // @since 3.16.0
- LabelSupport bool `json:"labelSupport,omitempty"`
-}
-
-// Provider options for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolOptions
-type DocumentSymbolOptions struct {
- // A human-readable string that is shown when multiple outlines trees
- // are shown for the same document.
- //
- // @since 3.16.0
- Label string `json:"label,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolParams
-type DocumentSymbolParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link DocumentSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentSymbolRegistrationOptions
-type DocumentSymbolRegistrationOptions struct {
- TextDocumentRegistrationOptions
- DocumentSymbolOptions
-}
-
-// Edit range variant that includes ranges for insert and replace operations.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#editRangeWithInsertReplace
-type EditRangeWithInsertReplace struct {
- Insert Range `json:"insert"`
- Replace Range `json:"replace"`
-}
-
-// Predefined error codes.
-type ErrorCodes int32
-
-// The client capabilities of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandClientCapabilities
-type ExecuteCommandClientCapabilities struct {
- // Execute command supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The server capabilities of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandOptions
-type ExecuteCommandOptions struct {
- // The commands to be executed on the server
- Commands []string `json:"commands"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandParams
-type ExecuteCommandParams struct {
- // The identifier of the actual command handler.
- Command string `json:"command"`
- // Arguments that the command should be invoked with.
- Arguments []json.RawMessage `json:"arguments,omitempty"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link ExecuteCommandRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executeCommandRegistrationOptions
-type ExecuteCommandRegistrationOptions struct {
- ExecuteCommandOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#executionSummary
-type ExecutionSummary struct {
- // A strict monotonically increasing value
- // indicating the execution order of a cell
- // inside a notebook.
- ExecutionOrder uint32 `json:"executionOrder"`
- // Whether the execution was successful or
- // not if known by the client.
- Success bool `json:"success,omitempty"`
-}
-type FailureHandlingKind string
-
-// The file event type
-type FileChangeType uint32
-
-// Represents information on a file/folder create.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileCreate
-type FileCreate struct {
- // A file:// URI for the location of the file/folder being created.
- URI string `json:"uri"`
-}
-
-// Represents information on a file/folder delete.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileDelete
-type FileDelete struct {
- // A file:// URI for the location of the file/folder being deleted.
- URI string `json:"uri"`
-}
-
-// An event describing a file change.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileEvent
-type FileEvent struct {
- // The file's uri.
- URI DocumentURI `json:"uri"`
- // The change type.
- Type FileChangeType `json:"type"`
-}
-
-// Capabilities relating to events from file operations by the user in the client.
-//
-// These events do not come from the file system, they come from user operations
-// like renaming a file in the UI.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationClientCapabilities
-type FileOperationClientCapabilities struct {
- // Whether the client supports dynamic registration for file requests/notifications.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client has support for sending didCreateFiles notifications.
- DidCreate bool `json:"didCreate,omitempty"`
- // The client has support for sending willCreateFiles requests.
- WillCreate bool `json:"willCreate,omitempty"`
- // The client has support for sending didRenameFiles notifications.
- DidRename bool `json:"didRename,omitempty"`
- // The client has support for sending willRenameFiles requests.
- WillRename bool `json:"willRename,omitempty"`
- // The client has support for sending didDeleteFiles notifications.
- DidDelete bool `json:"didDelete,omitempty"`
- // The client has support for sending willDeleteFiles requests.
- WillDelete bool `json:"willDelete,omitempty"`
-}
-
-// A filter to describe in which file operation requests or notifications
-// the server is interested in receiving.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationFilter
-type FileOperationFilter struct {
- // A Uri scheme like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // The actual file operation pattern.
- Pattern FileOperationPattern `json:"pattern"`
-}
-
-// Options for notifications/requests for user operations on files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationOptions
-type FileOperationOptions struct {
- // The server is interested in receiving didCreateFiles notifications.
- DidCreate *FileOperationRegistrationOptions `json:"didCreate,omitempty"`
- // The server is interested in receiving willCreateFiles requests.
- WillCreate *FileOperationRegistrationOptions `json:"willCreate,omitempty"`
- // The server is interested in receiving didRenameFiles notifications.
- DidRename *FileOperationRegistrationOptions `json:"didRename,omitempty"`
- // The server is interested in receiving willRenameFiles requests.
- WillRename *FileOperationRegistrationOptions `json:"willRename,omitempty"`
- // The server is interested in receiving didDeleteFiles file notifications.
- DidDelete *FileOperationRegistrationOptions `json:"didDelete,omitempty"`
- // The server is interested in receiving willDeleteFiles file requests.
- WillDelete *FileOperationRegistrationOptions `json:"willDelete,omitempty"`
-}
-
-// A pattern to describe in which file operation requests or notifications
-// the server is interested in receiving.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPattern
-type FileOperationPattern struct {
- // The glob pattern to match. Glob patterns can have the following syntax:
- //
- // - `*` to match one or more characters in a path segment
- // - `?` to match on one character in a path segment
- // - `**` to match any number of path segments, including none
- // - `{}` to group sub patterns into an OR expression. (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
- // - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
- // - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
- Glob string `json:"glob"`
- // Whether to match files or folders with this pattern.
- //
- // Matches both if undefined.
- Matches *FileOperationPatternKind `json:"matches,omitempty"`
- // Additional options used during matching.
- Options *FileOperationPatternOptions `json:"options,omitempty"`
-}
-
-// A pattern kind describing if a glob pattern matches a file a folder or
-// both.
-//
-// @since 3.16.0
-type FileOperationPatternKind string
-
-// Matching options for the file operation pattern.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationPatternOptions
-type FileOperationPatternOptions struct {
- // The pattern should be matched ignoring casing.
- IgnoreCase bool `json:"ignoreCase,omitempty"`
-}
-
-// The options to register for file operations.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileOperationRegistrationOptions
-type FileOperationRegistrationOptions struct {
- // The actual filters.
- Filters []FileOperationFilter `json:"filters"`
-}
-
-// Represents information on a file/folder rename.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileRename
-type FileRename struct {
- // A file:// URI for the original location of the file/folder being renamed.
- OldURI string `json:"oldUri"`
- // A file:// URI for the new location of the file/folder being renamed.
- NewURI string `json:"newUri"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fileSystemWatcher
-type FileSystemWatcher struct {
- // The glob pattern to watch. See {@link GlobPattern glob pattern} for more detail.
- //
- // @since 3.17.0 support for relative patterns.
- GlobPattern GlobPattern `json:"globPattern"`
- // The kind of events of interest. If omitted it defaults
- // to WatchKind.Create | WatchKind.Change | WatchKind.Delete
- // which is 7.
- Kind *WatchKind `json:"kind,omitempty"`
-}
-
-// Represents a folding range. To be valid, start and end line must be bigger than zero and smaller
-// than the number of lines in the document. Clients are free to ignore invalid ranges.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRange
-type FoldingRange struct {
- // The zero-based start line of the range to fold. The folded area starts after the line's last character.
- // To be valid, the end must be zero or larger and smaller than the number of lines in the document.
- StartLine uint32 `json:"startLine"`
- // The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line.
- StartCharacter uint32 `json:"startCharacter,omitempty"`
- // The zero-based end line of the range to fold. The folded area ends with the line's last character.
- // To be valid, the end must be zero or larger and smaller than the number of lines in the document.
- EndLine uint32 `json:"endLine"`
- // The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line.
- EndCharacter uint32 `json:"endCharacter,omitempty"`
- // Describes the kind of the folding range such as 'comment' or 'region'. The kind
- // is used to categorize folding ranges and used by commands like 'Fold all comments'.
- // See {@link FoldingRangeKind} for an enumeration of standardized kinds.
- Kind string `json:"kind,omitempty"`
- // The text that the client should show when the specified range is
- // collapsed. If not defined or not supported by the client, a default
- // will be chosen by the client.
- //
- // @since 3.17.0
- CollapsedText string `json:"collapsedText,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeClientCapabilities
-type FoldingRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration for folding range
- // providers. If this is set to `true` the client supports the new
- // `FoldingRangeRegistrationOptions` return value for the corresponding
- // server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The maximum number of folding ranges that the client prefers to receive
- // per document. The value serves as a hint, servers are free to follow the
- // limit.
- RangeLimit uint32 `json:"rangeLimit,omitempty"`
- // If set, the client signals that it only supports folding complete lines.
- // If set, client will ignore specified `startCharacter` and `endCharacter`
- // properties in a FoldingRange.
- LineFoldingOnly bool `json:"lineFoldingOnly,omitempty"`
- // Specific options for the folding range kind.
- //
- // @since 3.17.0
- FoldingRangeKind *ClientFoldingRangeKindOptions `json:"foldingRangeKind,omitempty"`
- // Specific options for the folding range.
- //
- // @since 3.17.0
- FoldingRange *ClientFoldingRangeOptions `json:"foldingRange,omitempty"`
-}
-
-// A set of predefined range kinds.
-type FoldingRangeKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeOptions
-type FoldingRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link FoldingRangeRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeParams
-type FoldingRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeRegistrationOptions
-type FoldingRangeRegistrationOptions struct {
- TextDocumentRegistrationOptions
- FoldingRangeOptions
- StaticRegistrationOptions
-}
-
-// Client workspace capabilities specific to folding ranges
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#foldingRangeWorkspaceClientCapabilities
-type FoldingRangeWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // folding ranges currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- //
- // @since 3.18.0
- // @proposed
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Value-object describing what options formatting should use.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#formattingOptions
-type FormattingOptions struct {
- // Size of a tab in spaces.
- TabSize uint32 `json:"tabSize"`
- // Prefer spaces over tabs.
- InsertSpaces bool `json:"insertSpaces"`
- // Trim trailing whitespace on a line.
- //
- // @since 3.15.0
- TrimTrailingWhitespace bool `json:"trimTrailingWhitespace,omitempty"`
- // Insert a newline character at the end of the file if one does not exist.
- //
- // @since 3.15.0
- InsertFinalNewline bool `json:"insertFinalNewline,omitempty"`
- // Trim all newlines after the final newline at the end of the file.
- //
- // @since 3.15.0
- TrimFinalNewlines bool `json:"trimFinalNewlines,omitempty"`
-}
-
-// A diagnostic report with a full set of problems.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#fullDocumentDiagnosticReport
-type FullDocumentDiagnosticReport struct {
- // A full document diagnostic report.
- Kind string `json:"kind"`
- // An optional result id. If provided it will
- // be sent on the next diagnostic request for the
- // same document.
- ResultID string `json:"resultId,omitempty"`
- // The actual items.
- Items []Diagnostic `json:"items"`
-}
-
-// General client capabilities.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#generalClientCapabilities
-type GeneralClientCapabilities struct {
- // Client capability that signals how the client
- // handles stale requests (e.g. a request
- // for which the client will not process the response
- // anymore since the information is outdated).
- //
- // @since 3.17.0
- StaleRequestSupport *StaleRequestSupportOptions `json:"staleRequestSupport,omitempty"`
- // Client capabilities specific to regular expressions.
- //
- // @since 3.16.0
- RegularExpressions *RegularExpressionsClientCapabilities `json:"regularExpressions,omitempty"`
- // Client capabilities specific to the client's markdown parser.
- //
- // @since 3.16.0
- Markdown *MarkdownClientCapabilities `json:"markdown,omitempty"`
- // The position encodings supported by the client. Client and server
- // have to agree on the same position encoding to ensure that offsets
- // (e.g. character position in a line) are interpreted the same on both
- // sides.
- //
- // To keep the protocol backwards compatible the following applies: if
- // the value 'utf-16' is missing from the array of position encodings
- // servers can assume that the client supports UTF-16. UTF-16 is
- // therefore a mandatory encoding.
- //
- // If omitted it defaults to ['utf-16'].
- //
- // Implementation considerations: since the conversion from one encoding
- // into another requires the content of the file / line the conversion
- // is best done where the file is read which is usually on the server
- // side.
- //
- // @since 3.17.0
- PositionEncodings []PositionEncodingKind `json:"positionEncodings,omitempty"`
-}
-
-// The glob pattern. Either a string pattern or a relative pattern.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#globPattern
-type (
- GlobPattern = Or_GlobPattern // (alias)
- // The result of a hover request.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hover
- Hover struct {
- // The hover's content
- Contents MarkupContent `json:"contents"`
- // An optional range inside the text document that is used to
- // visualize the hover, e.g. by changing the background color.
- Range Range `json:"range,omitempty"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverClientCapabilities
-type HoverClientCapabilities struct {
- // Whether hover supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Client supports the following content formats for the content
- // property. The order describes the preferred format of the client.
- ContentFormat []MarkupKind `json:"contentFormat,omitempty"`
-}
-
-// Hover options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverOptions
-type HoverOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link HoverRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverParams
-type HoverParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link HoverRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#hoverRegistrationOptions
-type HoverRegistrationOptions struct {
- TextDocumentRegistrationOptions
- HoverOptions
-}
-
-// @since 3.6.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationClientCapabilities
-type ImplementationClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `ImplementationRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // @since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationOptions
-type ImplementationOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationParams
-type ImplementationParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#implementationRegistrationOptions
-type ImplementationRegistrationOptions struct {
- TextDocumentRegistrationOptions
- ImplementationOptions
- StaticRegistrationOptions
-}
-
-// The data type of the ResponseError if the
-// initialize request fails.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeError
-type InitializeError struct {
- // Indicates whether the client execute the following retry logic:
- // (1) show the message provided by the ResponseError to the user
- // (2) user selects retry or cancel
- // (3) if user selected retry the initialize method is sent again.
- Retry bool `json:"retry"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams
-type InitializeParams struct {
- XInitializeParams
- WorkspaceFoldersInitializeParams
-}
-
-// The result returned from an initialize request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeResult
-type InitializeResult struct {
- // The capabilities the language server provides.
- Capabilities ServerCapabilities `json:"capabilities"`
- // Information about the server.
- //
- // @since 3.15.0
- ServerInfo *ServerInfo `json:"serverInfo,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializedParams
-type InitializedParams struct{}
-
-// Inlay hint information.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHint
-type InlayHint struct {
- // The position of this hint.
- //
- // If multiple hints have the same position, they will be shown in the order
- // they appear in the response.
- Position Position `json:"position"`
- // The label of this hint. A human readable string or an array of
- // InlayHintLabelPart label parts.
- //
- // *Note* that neither the string nor the label part can be empty.
- Label []InlayHintLabelPart `json:"label"`
- // The kind of this hint. Can be omitted in which case the client
- // should fall back to a reasonable default.
- Kind InlayHintKind `json:"kind,omitempty"`
- // Optional text edits that are performed when accepting this inlay hint.
- //
- // *Note* that edits are expected to change the document so that the inlay
- // hint (or its nearest variant) is now part of the document and the inlay
- // hint itself is now obsolete.
- TextEdits []TextEdit `json:"textEdits,omitempty"`
- // The tooltip text when you hover over this item.
- Tooltip *Or_InlayHint_tooltip `json:"tooltip,omitempty"`
- // Render padding before the hint.
- //
- // Note: Padding should use the editor's background color, not the
- // background color of the hint itself. That means padding can be used
- // to visually align/separate an inlay hint.
- PaddingLeft bool `json:"paddingLeft,omitempty"`
- // Render padding after the hint.
- //
- // Note: Padding should use the editor's background color, not the
- // background color of the hint itself. That means padding can be used
- // to visually align/separate an inlay hint.
- PaddingRight bool `json:"paddingRight,omitempty"`
- // A data entry field that is preserved on an inlay hint between
- // a `textDocument/inlayHint` and a `inlayHint/resolve` request.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Inlay hint client capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintClientCapabilities
-type InlayHintClientCapabilities struct {
- // Whether inlay hints support dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Indicates which properties a client can resolve lazily on an inlay
- // hint.
- ResolveSupport *ClientInlayHintResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Inlay hint kinds.
-//
-// @since 3.17.0
-type InlayHintKind uint32
-
-// An inlay hint label part allows for interactive and composite labels
-// of inlay hints.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintLabelPart
-type InlayHintLabelPart struct {
- // The value of this label part.
- Value string `json:"value"`
- // The tooltip text when you hover over this label part. Depending on
- // the client capability `inlayHint.resolveSupport` clients might resolve
- // this property late using the resolve request.
- Tooltip *Or_InlayHintLabelPart_tooltip `json:"tooltip,omitempty"`
- // An optional source code location that represents this
- // label part.
- //
- // The editor will use this location for the hover and for code navigation
- // features: This part will become a clickable link that resolves to the
- // definition of the symbol at the given location (not necessarily the
- // location itself), it shows the hover that shows at the given location,
- // and it shows a context menu with further code navigation commands.
- //
- // Depending on the client capability `inlayHint.resolveSupport` clients
- // might resolve this property late using the resolve request.
- Location *Location `json:"location,omitempty"`
- // An optional command for this label part.
- //
- // Depending on the client capability `inlayHint.resolveSupport` clients
- // might resolve this property late using the resolve request.
- Command *Command `json:"command,omitempty"`
-}
-
-// Inlay hint options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintOptions
-type InlayHintOptions struct {
- // The server provides support to resolve additional
- // information for an inlay hint item.
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inlay hint requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintParams
-type InlayHintParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The document range for which inlay hints should be computed.
- Range Range `json:"range"`
- WorkDoneProgressParams
-}
-
-// Inlay hint options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintRegistrationOptions
-type InlayHintRegistrationOptions struct {
- InlayHintOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Client workspace capabilities specific to inlay hints.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlayHintWorkspaceClientCapabilities
-type InlayHintWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // inlay hints currently shown. It should be used with absolute care and
- // is useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Client capabilities specific to inline completions.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionClientCapabilities
-type InlineCompletionClientCapabilities struct {
- // Whether implementation supports dynamic registration for inline completion providers.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Provides information about the context in which an inline completion was requested.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionContext
-type InlineCompletionContext struct {
- // Describes how the inline completion was triggered.
- TriggerKind InlineCompletionTriggerKind `json:"triggerKind"`
- // Provides information about the currently selected item in the autocomplete widget if it is visible.
- SelectedCompletionInfo *SelectedCompletionInfo `json:"selectedCompletionInfo,omitempty"`
-}
-
-// An inline completion item represents a text snippet that is proposed inline to complete text that is being typed.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionItem
-type InlineCompletionItem struct {
- // The text to replace the range with. Must be set.
- InsertText Or_InlineCompletionItem_insertText `json:"insertText"`
- // A text that is used to decide if this inline completion should be shown. When `falsy` the {@link InlineCompletionItem.insertText} is used.
- FilterText string `json:"filterText,omitempty"`
- // The range to replace. Must begin and end on the same line.
- Range *Range `json:"range,omitempty"`
- // An optional {@link Command} that is executed *after* inserting this completion.
- Command *Command `json:"command,omitempty"`
-}
-
-// Represents a collection of {@link InlineCompletionItem inline completion items} to be presented in the editor.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionList
-type InlineCompletionList struct {
- // The inline completion items
- Items []InlineCompletionItem `json:"items"`
-}
-
-// Inline completion options used during static registration.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionOptions
-type InlineCompletionOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inline completion requests.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionParams
-type InlineCompletionParams struct {
- // Additional information about the context in which inline completions were
- // requested.
- Context InlineCompletionContext `json:"context"`
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Inline completion options used during static or dynamic registration.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineCompletionRegistrationOptions
-type InlineCompletionRegistrationOptions struct {
- InlineCompletionOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.
-//
-// @since 3.18.0
-// @proposed
-type InlineCompletionTriggerKind uint32
-
-// Inline value information can be provided by different means:
-//
-// - directly as a text value (class InlineValueText).
-// - as a name to use for a variable lookup (class InlineValueVariableLookup)
-// - as an evaluatable expression (class InlineValueEvaluatableExpression)
-//
-// The InlineValue types combines all inline value types into one type.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValue
-type (
- InlineValue = Or_InlineValue // (alias)
- // Client capabilities specific to inline values.
- //
- // @since 3.17.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueClientCapabilities
- InlineValueClientCapabilities struct {
- // Whether implementation supports dynamic registration for inline value providers.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- }
-)
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueContext
-type InlineValueContext struct {
- // The stack frame (as a DAP Id) where the execution has stopped.
- FrameID int32 `json:"frameId"`
- // The document range where execution has stopped.
- // Typically the end position of the range denotes the line where the inline values are shown.
- StoppedLocation Range `json:"stoppedLocation"`
-}
-
-// Provide an inline value through an expression evaluation.
-// If only a range is specified, the expression will be extracted from the underlying document.
-// An optional expression can be used to override the extracted expression.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueEvaluatableExpression
-type InlineValueEvaluatableExpression struct {
- // The document range for which the inline value applies.
- // The range is used to extract the evaluatable expression from the underlying document.
- Range Range `json:"range"`
- // If specified the expression overrides the extracted expression.
- Expression string `json:"expression,omitempty"`
-}
-
-// Inline value options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueOptions
-type InlineValueOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in inline value requests.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueParams
-type InlineValueParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The document range for which inline values should be computed.
- Range Range `json:"range"`
- // Additional information about the context in which inline values were
- // requested.
- Context InlineValueContext `json:"context"`
- WorkDoneProgressParams
-}
-
-// Inline value options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueRegistrationOptions
-type InlineValueRegistrationOptions struct {
- InlineValueOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// Provide inline value as text.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueText
-type InlineValueText struct {
- // The document range for which the inline value applies.
- Range Range `json:"range"`
- // The text of the inline value.
- Text string `json:"text"`
-}
-
-// Provide inline value through a variable lookup.
-// If only a range is specified, the variable name will be extracted from the underlying document.
-// An optional variable name can be used to override the extracted name.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueVariableLookup
-type InlineValueVariableLookup struct {
- // The document range for which the inline value applies.
- // The range is used to extract the variable name from the underlying document.
- Range Range `json:"range"`
- // If specified the name of the variable to look up.
- VariableName string `json:"variableName,omitempty"`
- // How to perform the lookup.
- CaseSensitiveLookup bool `json:"caseSensitiveLookup"`
-}
-
-// Client workspace capabilities specific to inline values.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#inlineValueWorkspaceClientCapabilities
-type InlineValueWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from the
- // server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // inline values currently shown. It should be used with absolute care and is
- // useful for situation where a server for example detects a project wide
- // change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// A special text edit to provide an insert and a replace operation.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#insertReplaceEdit
-type InsertReplaceEdit struct {
- // The string to be inserted.
- NewText string `json:"newText"`
- // The range if the insert is requested
- Insert Range `json:"insert"`
- // The range if the replace is requested.
- Replace Range `json:"replace"`
-}
-
-// Defines whether the insert text in a completion item should be interpreted as
-// plain text or a snippet.
-type InsertTextFormat uint32
-
-// How whitespace and indentation is handled during completion
-// item insertion.
-//
-// @since 3.16.0
-type (
- InsertTextMode uint32
- LSPAny = interface{}
-)
-
-// LSP arrays.
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPArray
-type (
- LSPArray = []interface{} // (alias)
- LSPErrorCodes int32
-)
-
-// LSP object definition.
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#lSPObject
-type (
- LSPObject = map[string]LSPAny // (alias)
- // Predefined Language kinds
- // @since 3.18.0
- // @proposed
- LanguageKind string
-)
-
-// Client capabilities for the linked editing range request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeClientCapabilities
-type LinkedEditingRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeOptions
-type LinkedEditingRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeParams
-type LinkedEditingRangeParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRangeRegistrationOptions
-type LinkedEditingRangeRegistrationOptions struct {
- TextDocumentRegistrationOptions
- LinkedEditingRangeOptions
- StaticRegistrationOptions
-}
-
-// The result of a linked editing range request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#linkedEditingRanges
-type LinkedEditingRanges struct {
- // A list of ranges that can be edited together. The ranges must have
- // identical length and contain identical text content. The ranges cannot overlap.
- Ranges []Range `json:"ranges"`
- // An optional word pattern (regular expression) that describes valid contents for
- // the given ranges. If no pattern is provided, the client configuration's word
- // pattern will be used.
- WordPattern string `json:"wordPattern,omitempty"`
-}
-
-// created for Literal (Lit_ClientSemanticTokensRequestOptions_range_Item1)
-type Lit_ClientSemanticTokensRequestOptions_range_Item1 struct{}
-
-// created for Literal (Lit_SemanticTokensOptions_range_Item1)
-type Lit_SemanticTokensOptions_range_Item1 struct{}
-
-// Represents a location inside a resource, such as a line
-// inside a text file.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#location
-type Location struct {
- URI DocumentURI `json:"uri"`
- Range Range `json:"range"`
-}
-
-// Represents the connection of two locations. Provides additional metadata over normal {@link Location locations},
-// including an origin range.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationLink
-type LocationLink struct {
- // Span of the origin of this link.
- //
- // Used as the underlined span for mouse interaction. Defaults to the word range at
- // the definition position.
- OriginSelectionRange *Range `json:"originSelectionRange,omitempty"`
- // The target resource identifier of this link.
- TargetURI DocumentURI `json:"targetUri"`
- // The full target range of this link. If the target for example is a symbol then target range is the
- // range enclosing this symbol not including leading/trailing whitespace but everything else
- // like comments. This information is typically used to highlight the range in the editor.
- TargetRange Range `json:"targetRange"`
- // The range that should be selected and revealed when this link is being followed, e.g the name of a function.
- // Must be contained by the `targetRange`. See also `DocumentSymbol#range`
- TargetSelectionRange Range `json:"targetSelectionRange"`
-}
-
-// Location with only uri and does not include range.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#locationUriOnly
-type LocationUriOnly struct {
- URI DocumentURI `json:"uri"`
-}
-
-// The log message parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logMessageParams
-type LogMessageParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#logTraceParams
-type LogTraceParams struct {
- Message string `json:"message"`
- Verbose string `json:"verbose,omitempty"`
-}
-
-// Client capabilities specific to the used markdown parser.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markdownClientCapabilities
-type MarkdownClientCapabilities struct {
- // The name of the parser.
- Parser string `json:"parser"`
- // The version of the parser.
- Version string `json:"version,omitempty"`
- // A list of HTML tags that the client allows / supports in
- // Markdown.
- //
- // @since 3.17.0
- AllowedTags []string `json:"allowedTags,omitempty"`
-}
-
-// MarkedString can be used to render human readable text. It is either a markdown string
-// or a code-block that provides a language and a code snippet. The language identifier
-// is semantically equal to the optional language identifier in fenced code blocks in GitHub
-// issues. See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
-//
-// The pair of a language and a value is an equivalent to markdown:
-// ```${language}
-// ${value}
-// ```
-//
-// Note that markdown strings will be sanitized - that means html will be escaped.
-// @deprecated use MarkupContent instead.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedString
-type (
- MarkedString = Or_MarkedString // (alias)
- // @since 3.18.0
- // @deprecated use MarkupContent instead.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markedStringWithLanguage
- MarkedStringWithLanguage struct {
- Language string `json:"language"`
- Value string `json:"value"`
- }
-)
-
-// A `MarkupContent` literal represents a string value which content is interpreted base on its
-// kind flag. Currently the protocol supports `plaintext` and `markdown` as markup kinds.
-//
-// If the kind is `markdown` then the value can contain fenced code blocks like in GitHub issues.
-// See https://help.github.com/articles/creating-and-highlighting-code-blocks/#syntax-highlighting
-//
-// Here is an example how such a string can be constructed using JavaScript / TypeScript:
-// ```ts
-//
-// let markdown: MarkdownContent = {
-// kind: MarkupKind.Markdown,
-// value: [
-// '# Header',
-// 'Some text',
-// '```typescript',
-// 'someCode();',
-// '```'
-// ].join('\n')
-// };
-//
-// ```
-//
-// *Please Note* that clients might sanitize the return markdown. A client could decide to
-// remove HTML from the markdown to avoid script execution.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#markupContent
-type MarkupContent struct {
- // The type of the Markup
- Kind MarkupKind `json:"kind"`
- // The content itself
- Value string `json:"value"`
-}
-
-// Describes the content type that a client supports in various
-// result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
-//
-// Please note that `MarkupKinds` must not start with a `$`. This kinds
-// are reserved for internal usage.
-type MarkupKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#messageActionItem
-type MessageActionItem struct {
- // A short title like 'Retry', 'Open Log' etc.
- Title string `json:"title"`
-}
-
-// The message type
-type MessageType uint32
-
-// Moniker definition to match LSIF 0.5 moniker definition.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#moniker
-type Moniker struct {
- // The scheme of the moniker. For example tsc or .Net
- Scheme string `json:"scheme"`
- // The identifier of the moniker. The value is opaque in LSIF however
- // schema owners are allowed to define the structure if they want.
- Identifier string `json:"identifier"`
- // The scope in which the moniker is unique
- Unique UniquenessLevel `json:"unique"`
- // The moniker kind if known.
- Kind *MonikerKind `json:"kind,omitempty"`
-}
-
-// Client capabilities specific to the moniker request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerClientCapabilities
-type MonikerClientCapabilities struct {
- // Whether moniker supports dynamic registration. If this is set to `true`
- // the client supports the new `MonikerRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// The moniker kind.
-//
-// @since 3.16.0
-type MonikerKind string
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerOptions
-type MonikerOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerParams
-type MonikerParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#monikerRegistrationOptions
-type MonikerRegistrationOptions struct {
- TextDocumentRegistrationOptions
- MonikerOptions
-}
-
-// A notebook cell.
-//
-// A cell's document URI must be unique across ALL notebook
-// cells and can therefore be used to uniquely identify a
-// notebook cell or the cell's text document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCell
-type NotebookCell struct {
- // The cell's kind
- Kind NotebookCellKind `json:"kind"`
- // The URI of the cell's text document
- // content.
- Document DocumentURI `json:"document"`
- // Additional metadata stored with the cell.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // Additional execution summary information
- // if supported by the client.
- ExecutionSummary *ExecutionSummary `json:"executionSummary,omitempty"`
-}
-
-// A change describing how to move a `NotebookCell`
-// array from state S to S'.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellArrayChange
-type NotebookCellArrayChange struct {
- // The start oftest of the cell that changed.
- Start uint32 `json:"start"`
- // The deleted cells
- DeleteCount uint32 `json:"deleteCount"`
- // The new cells, if any
- Cells []NotebookCell `json:"cells,omitempty"`
-}
-
-// A notebook cell kind.
-//
-// @since 3.17.0
-type NotebookCellKind uint32
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellLanguage
-type NotebookCellLanguage struct {
- Language string `json:"language"`
-}
-
-// A notebook cell text document filter denotes a cell text
-// document by different properties.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookCellTextDocumentFilter
-type NotebookCellTextDocumentFilter struct {
- // A filter that matches against the notebook
- // containing the notebook cell. If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook Or_NotebookCellTextDocumentFilter_notebook `json:"notebook"`
- // A language id like `python`.
- //
- // Will be matched against the language id of the
- // notebook cell document. '*' matches every language.
- Language string `json:"language,omitempty"`
-}
-
-// A notebook document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocument
-type NotebookDocument struct {
- // The notebook document's uri.
- URI URI `json:"uri"`
- // The type of the notebook.
- NotebookType string `json:"notebookType"`
- // The version number of this document (it will increase after each
- // change, including undo/redo).
- Version int32 `json:"version"`
- // Additional metadata stored with the notebook
- // document.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // The cells of a notebook.
- Cells []NotebookCell `json:"cells"`
-}
-
-// Structural changes to cells in a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChangeStructure
-type NotebookDocumentCellChangeStructure struct {
- // The change to the cell array.
- Array NotebookCellArrayChange `json:"array"`
- // Additional opened cell text documents.
- DidOpen []TextDocumentItem `json:"didOpen,omitempty"`
- // Additional closed cell text documents.
- DidClose []TextDocumentIdentifier `json:"didClose,omitempty"`
-}
-
-// Cell changes to a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellChanges
-type NotebookDocumentCellChanges struct {
- // Changes to the cell structure to add or
- // remove cells.
- Structure *NotebookDocumentCellChangeStructure `json:"structure,omitempty"`
- // Changes to notebook cells properties like its
- // kind, execution summary or metadata.
- Data []NotebookCell `json:"data,omitempty"`
- // Changes to the text content of notebook cells.
- TextContent []NotebookDocumentCellContentChanges `json:"textContent,omitempty"`
-}
-
-// Content changes to a cell in a notebook document.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentCellContentChanges
-type NotebookDocumentCellContentChanges struct {
- Document VersionedTextDocumentIdentifier `json:"document"`
- Changes []TextDocumentContentChangeEvent `json:"changes"`
-}
-
-// A change event for a notebook document.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentChangeEvent
-type NotebookDocumentChangeEvent struct {
- // The changed meta data if any.
- //
- // Note: should always be an object literal (e.g. LSPObject)
- Metadata *LSPObject `json:"metadata,omitempty"`
- // Changes to cells
- Cells *NotebookDocumentCellChanges `json:"cells,omitempty"`
-}
-
-// Capabilities specific to the notebook document support.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentClientCapabilities
-type NotebookDocumentClientCapabilities struct {
- // Capabilities specific to notebook document synchronization
- //
- // @since 3.17.0
- Synchronization NotebookDocumentSyncClientCapabilities `json:"synchronization"`
-}
-
-// A notebook document filter denotes a notebook document by
-// different properties. The properties will be match
-// against the notebook's URI (same as with documents)
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilter
-type (
- NotebookDocumentFilter = Or_NotebookDocumentFilter // (alias)
- // A notebook document filter where `notebookType` is required field.
- //
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterNotebookType
- NotebookDocumentFilterNotebookType struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern.
- Pattern *GlobPattern `json:"pattern,omitempty"`
- }
-)
-
-// A notebook document filter where `pattern` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterPattern
-type NotebookDocumentFilterPattern struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern.
- Pattern GlobPattern `json:"pattern"`
-}
-
-// A notebook document filter where `scheme` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterScheme
-type NotebookDocumentFilterScheme struct {
- // The type of the enclosing notebook.
- NotebookType string `json:"notebookType,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme"`
- // A glob pattern.
- Pattern *GlobPattern `json:"pattern,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithCells
-type NotebookDocumentFilterWithCells struct {
- // The notebook to be synced If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook *Or_NotebookDocumentFilterWithCells_notebook `json:"notebook,omitempty"`
- // The cells of the matching notebook to be synced.
- Cells []NotebookCellLanguage `json:"cells"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentFilterWithNotebook
-type NotebookDocumentFilterWithNotebook struct {
- // The notebook to be synced If a string
- // value is provided it matches against the
- // notebook type. '*' matches every notebook.
- Notebook Or_NotebookDocumentFilterWithNotebook_notebook `json:"notebook"`
- // The cells of the matching notebook to be synced.
- Cells []NotebookCellLanguage `json:"cells,omitempty"`
-}
-
-// A literal to identify a notebook document in the client.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentIdentifier
-type NotebookDocumentIdentifier struct {
- // The notebook document's uri.
- URI URI `json:"uri"`
-}
-
-// Notebook specific client capabilities.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncClientCapabilities
-type NotebookDocumentSyncClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is
- // set to `true` the client supports the new
- // `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports sending execution summary data per cell.
- ExecutionSummarySupport bool `json:"executionSummarySupport,omitempty"`
-}
-
-// Options specific to a notebook plus its cells
-// to be synced to the server.
-//
-// If a selector provides a notebook document
-// filter but no cell selector all cells of a
-// matching notebook document will be synced.
-//
-// If a selector provides no notebook document
-// filter but only a cell selector all notebook
-// document that contain at least one matching
-// cell will be synced.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncOptions
-type NotebookDocumentSyncOptions struct {
- // The notebooks to be synced
- NotebookSelector []Or_NotebookDocumentSyncOptions_notebookSelector_Elem `json:"notebookSelector"`
- // Whether save notification should be forwarded to
- // the server. Will only be honored if mode === `notebook`.
- Save bool `json:"save,omitempty"`
-}
-
-// Registration options specific to a notebook.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#notebookDocumentSyncRegistrationOptions
-type NotebookDocumentSyncRegistrationOptions struct {
- NotebookDocumentSyncOptions
- StaticRegistrationOptions
-}
-
-// A text document identifier to optionally denote a specific version of a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#optionalVersionedTextDocumentIdentifier
-type OptionalVersionedTextDocumentIdentifier struct {
- // The version number of this document. If a versioned text document identifier
- // is sent from the server to the client and the file is not open in the editor
- // (the server has not received an open notification before) the server can send
- // `null` to indicate that the version is unknown and the content on disk is the
- // truth (as specified with document content ownership).
- Version int32 `json:"version"`
- TextDocumentIdentifier
-}
-
-// created for Or [int32 string]
-type Or_CancelParams_id struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ClientSemanticTokensRequestFullDelta bool]
-type Or_ClientSemanticTokensRequestOptions_full struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Lit_ClientSemanticTokensRequestOptions_range_Item1 bool]
-type Or_ClientSemanticTokensRequestOptions_range struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [EditRangeWithInsertReplace Range]
-type Or_CompletionItemDefaults_editRange struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_CompletionItem_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InsertReplaceEdit TextEdit]
-type Or_CompletionItem_textEdit struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location []Location]
-type Or_Declaration struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location []Location]
-type Or_Definition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [int32 string]
-type Or_Diagnostic_code struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]string string]
-type Or_DidChangeConfigurationRegistrationOptions_section struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport]
-type Or_DocumentDiagnosticReport struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookCellTextDocumentFilter TextDocumentFilter]
-type Or_DocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Pattern RelativePattern]
-type Or_GlobPattern struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkedString MarkupContent []MarkedString]
-type Or_Hover_contents struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_InlayHintLabelPart_tooltip struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]InlayHintLabelPart string]
-type Or_InlayHint_label struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_InlayHint_tooltip struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [StringValue string]
-type Or_InlineCompletionItem_insertText struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup]
-type Or_InlineValue struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [LSPArray LSPObject bool float64 int32 string uint32]
-type Or_LSPAny struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkedStringWithLanguage string]
-type Or_MarkedString struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookCellTextDocumentFilter_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilterNotebookType NotebookDocumentFilterPattern NotebookDocumentFilterScheme]
-type Or_NotebookDocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookDocumentFilterWithCells_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilter string]
-type Or_NotebookDocumentFilterWithNotebook_notebook struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentFilterWithCells NotebookDocumentFilterWithNotebook]
-type Or_NotebookDocumentSyncOptions_notebookSelector_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_ParameterInformation_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Tuple_ParameterInformation_label_Item1 string]
-type Or_ParameterInformation_label struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [PrepareRenameDefaultBehavior PrepareRenamePlaceholder Range]
-type Or_PrepareRenameResult struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [int32 string]
-type Or_ProgressToken struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport]
-type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [URI WorkspaceFolder]
-type Or_RelativePattern_baseUri struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CodeAction Command]
-type Or_Result_textDocument_codeAction_Item0_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CompletionList []CompletionItem]
-type Or_Result_textDocument_completion struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Declaration []DeclarationLink]
-type Or_Result_textDocument_declaration struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_definition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]DocumentSymbol []SymbolInformation]
-type Or_Result_textDocument_documentSymbol struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_implementation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineCompletionList []InlineCompletionItem]
-type Or_Result_textDocument_inlineCompletion struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokens SemanticTokensDelta]
-type Or_Result_textDocument_semanticTokens_full_delta struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Definition []DefinitionLink]
-type Or_Result_textDocument_typeDefinition struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [[]SymbolInformation []WorkspaceSymbol]
-type Or_Result_workspace_symbol struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokensFullDelta bool]
-type Or_SemanticTokensOptions_full struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Lit_SemanticTokensOptions_range_Item1 bool]
-type Or_SemanticTokensOptions_range struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CallHierarchyOptions CallHierarchyRegistrationOptions bool]
-type Or_ServerCapabilities_callHierarchyProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CodeActionOptions bool]
-type Or_ServerCapabilities_codeActionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentColorOptions DocumentColorRegistrationOptions bool]
-type Or_ServerCapabilities_colorProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DeclarationOptions DeclarationRegistrationOptions bool]
-type Or_ServerCapabilities_declarationProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DefinitionOptions bool]
-type Or_ServerCapabilities_definitionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DiagnosticOptions DiagnosticRegistrationOptions]
-type Or_ServerCapabilities_diagnosticProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentFormattingOptions bool]
-type Or_ServerCapabilities_documentFormattingProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentHighlightOptions bool]
-type Or_ServerCapabilities_documentHighlightProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentRangeFormattingOptions bool]
-type Or_ServerCapabilities_documentRangeFormattingProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [DocumentSymbolOptions bool]
-type Or_ServerCapabilities_documentSymbolProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [FoldingRangeOptions FoldingRangeRegistrationOptions bool]
-type Or_ServerCapabilities_foldingRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [HoverOptions bool]
-type Or_ServerCapabilities_hoverProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ImplementationOptions ImplementationRegistrationOptions bool]
-type Or_ServerCapabilities_implementationProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlayHintOptions InlayHintRegistrationOptions bool]
-type Or_ServerCapabilities_inlayHintProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineCompletionOptions bool]
-type Or_ServerCapabilities_inlineCompletionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [InlineValueOptions InlineValueRegistrationOptions bool]
-type Or_ServerCapabilities_inlineValueProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool]
-type Or_ServerCapabilities_linkedEditingRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MonikerOptions MonikerRegistrationOptions bool]
-type Or_ServerCapabilities_monikerProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions]
-type Or_ServerCapabilities_notebookDocumentSync struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [ReferenceOptions bool]
-type Or_ServerCapabilities_referencesProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [RenameOptions bool]
-type Or_ServerCapabilities_renameProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool]
-type Or_ServerCapabilities_selectionRangeProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions]
-type Or_ServerCapabilities_semanticTokensProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentSyncKind TextDocumentSyncOptions]
-type Or_ServerCapabilities_textDocumentSync struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool]
-type Or_ServerCapabilities_typeDefinitionProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool]
-type Or_ServerCapabilities_typeHierarchyProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [WorkspaceSymbolOptions bool]
-type Or_ServerCapabilities_workspaceSymbolProvider struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [MarkupContent string]
-type Or_SignatureInformation_documentation struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentContentChangePartial TextDocumentContentChangeWholeDocument]
-type Or_TextDocumentContentChangeEvent struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [AnnotatedTextEdit SnippetTextEdit TextEdit]
-type Or_TextDocumentEdit_edits_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentFilterLanguage TextDocumentFilterPattern TextDocumentFilterScheme]
-type Or_TextDocumentFilter struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [SaveOptions bool]
-type Or_TextDocumentSyncOptions_save struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport]
-type Or_WorkspaceDocumentDiagnosticReport struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit]
-type Or_WorkspaceEdit_documentChanges_Elem struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [bool string]
-type Or_WorkspaceFoldersServerCapabilities_changeNotifications struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [TextDocumentContentOptions TextDocumentContentRegistrationOptions]
-type Or_WorkspaceOptions_textDocumentContent struct {
- Value interface{} `json:"value"`
-}
-
-// created for Or [Location LocationUriOnly]
-type Or_WorkspaceSymbol_location struct {
- Value interface{} `json:"value"`
-}
-
-// The parameters of a configuration request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#configurationParams
-type ParamConfiguration struct {
- Items []ConfigurationItem `json:"items"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#initializeParams
-type ParamInitialize struct {
- XInitializeParams
- WorkspaceFoldersInitializeParams
-}
-
-// Represents a parameter of a callable-signature. A parameter can
-// have a label and a doc-comment.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#parameterInformation
-type ParameterInformation struct {
- // The label of this parameter information.
- //
- // Either a string or an inclusive start and exclusive end offsets within its containing
- // signature label. (see SignatureInformation.label). The offsets are based on a UTF-16
- // string representation as `Position` and `Range` does.
- //
- // To avoid ambiguities a server should use the [start, end] offset value instead of using
- // a substring. Whether a client support this is controlled via `labelOffsetSupport` client
- // capability.
- //
- // *Note*: a label of type string should be a substring of its containing signature label.
- // Its intended use case is to highlight the parameter label part in the `SignatureInformation.label`.
- Label Or_ParameterInformation_label `json:"label"`
- // The human-readable doc-comment of this parameter. Will be shown
- // in the UI but can be omitted.
- Documentation *Or_ParameterInformation_documentation `json:"documentation,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#partialResultParams
-type PartialResultParams struct {
- // An optional token that a server can use to report partial results (e.g. streaming) to
- // the client.
- PartialResultToken *ProgressToken `json:"partialResultToken,omitempty"`
-}
-
-// The glob pattern to watch relative to the base path. Glob patterns can have the following syntax:
-//
-// - `*` to match one or more characters in a path segment
-// - `?` to match on one character in a path segment
-// - `**` to match any number of path segments, including none
-// - `{}` to group conditions (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#pattern
-type (
- Pattern = string // (alias)
- // Position in a text document expressed as zero-based line and character
- // offset. Prior to 3.17 the offsets were always based on a UTF-16 string
- // representation. So a string of the form `a𐐀b` the character offset of the
- // character `a` is 0, the character offset of `𐐀` is 1 and the character
- // offset of b is 3 since `𐐀` is represented using two code units in UTF-16.
- // Since 3.17 clients and servers can agree on a different string encoding
- // representation (e.g. UTF-8). The client announces it's supported encoding
- // via the client capability [`general.positionEncodings`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#clientCapabilities).
- // The value is an array of position encodings the client supports, with
- // decreasing preference (e.g. the encoding at index `0` is the most preferred
- // one). To stay backwards compatible the only mandatory encoding is UTF-16
- // represented via the string `utf-16`. The server can pick one of the
- // encodings offered by the client and signals that encoding back to the
- // client via the initialize result's property
- // [`capabilities.positionEncoding`](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#serverCapabilities). If the string value
- // `utf-16` is missing from the client's capability `general.positionEncodings`
- // servers can safely assume that the client supports UTF-16. If the server
- // omits the position encoding in its initialize result the encoding defaults
- // to the string value `utf-16`. Implementation considerations: since the
- // conversion from one encoding into another requires the content of the
- // file / line the conversion is best done where the file is read which is
- // usually on the server side.
- //
- // Positions are line end character agnostic. So you can not specify a position
- // that denotes `\r|\n` or `\n|` where `|` represents the character offset.
- //
- // @since 3.17.0 - support for negotiated position encoding.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#position
- Position struct {
- // Line position in a document (zero-based).
- //
- // If a line number is greater than the number of lines in a document, it defaults back to the number of lines in the document.
- // If a line number is negative, it defaults to 0.
- Line uint32 `json:"line"`
- // Character offset on a line in a document (zero-based).
- //
- // The meaning of this offset is determined by the negotiated
- // `PositionEncodingKind`.
- //
- // If the character value is greater than the line length it defaults back to the
- // line length.
- Character uint32 `json:"character"`
- }
-)
-
-// A set of predefined position encoding kinds.
-//
-// @since 3.17.0
-type PositionEncodingKind string
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameDefaultBehavior
-type PrepareRenameDefaultBehavior struct {
- DefaultBehavior bool `json:"defaultBehavior"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameParams
-type PrepareRenameParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenamePlaceholder
-type PrepareRenamePlaceholder struct {
- Range Range `json:"range"`
- Placeholder string `json:"placeholder"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#prepareRenameResult
-type (
- PrepareRenameResult = Or_PrepareRenameResult // (alias)
- PrepareSupportDefaultBehavior uint32
-)
-
-// A previous result id in a workspace pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId
-type PreviousResultID struct {
- // The URI for which the client knowns a
- // result id.
- URI DocumentURI `json:"uri"`
- // The value of the previous result id.
- Value string `json:"value"`
-}
-
-// A previous result id in a workspace pull request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#previousResultId
-type PreviousResultId struct {
- // The URI for which the client knowns a
- // result id.
- URI DocumentURI `json:"uri"`
- // The value of the previous result id.
- Value string `json:"value"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressParams
-type ProgressParams struct {
- // The progress token provided by the client or server.
- Token ProgressToken `json:"token"`
- // The progress data.
- Value interface{} `json:"value"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#progressToken
-type (
- ProgressToken = Or_ProgressToken // (alias)
- // The publish diagnostic client capabilities.
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsClientCapabilities
- PublishDiagnosticsClientCapabilities struct {
- // Whether the client interprets the version property of the
- // `textDocument/publishDiagnostics` notification's parameter.
- //
- // @since 3.15.0
- VersionSupport bool `json:"versionSupport,omitempty"`
- DiagnosticsCapabilities
- }
-)
-
-// The publish diagnostic notification's parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#publishDiagnosticsParams
-type PublishDiagnosticsParams struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // Optional the version number of the document the diagnostics are published for.
- //
- // @since 3.15.0
- Version int32 `json:"version,omitempty"`
- // An array of diagnostic information items.
- Diagnostics []Diagnostic `json:"diagnostics"`
-}
-
-// A range in a text document expressed as (zero-based) start and end positions.
-//
-// If you want to specify a range that contains a line including the line ending
-// character(s) then use an end position denoting the start of the next line.
-// For example:
-// ```ts
-//
-// {
-// start: { line: 5, character: 23 }
-// end : { line 6, character : 0 }
-// }
-//
-// ```
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#range
-type Range struct {
- // The range's start position.
- Start Position `json:"start"`
- // The range's end position.
- End Position `json:"end"`
-}
-
-// Client Capabilities for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceClientCapabilities
-type ReferenceClientCapabilities struct {
- // Whether references supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Value-object that contains additional information when
-// requesting references.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceContext
-type ReferenceContext struct {
- // Include the declaration of the current symbol.
- IncludeDeclaration bool `json:"includeDeclaration"`
-}
-
-// Reference options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceOptions
-type ReferenceOptions struct {
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceParams
-type ReferenceParams struct {
- Context ReferenceContext `json:"context"`
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link ReferencesRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#referenceRegistrationOptions
-type ReferenceRegistrationOptions struct {
- TextDocumentRegistrationOptions
- ReferenceOptions
-}
-
-// General parameters to register for a notification or to register a provider.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registration
-type Registration struct {
- // The id used to register the request. The id can be used to deregister
- // the request again.
- ID string `json:"id"`
- // The method / capability to register for.
- Method string `json:"method"`
- // Options necessary for the registration.
- RegisterOptions interface{} `json:"registerOptions,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#registrationParams
-type RegistrationParams struct {
- Registrations []Registration `json:"registrations"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionEngineKind
-type (
- RegularExpressionEngineKind = string // (alias)
- // Client capabilities specific to regular expressions.
- //
- // @since 3.16.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#regularExpressionsClientCapabilities
- RegularExpressionsClientCapabilities struct {
- // The engine's name.
- Engine RegularExpressionEngineKind `json:"engine"`
- // The engine's version.
- Version string `json:"version,omitempty"`
- }
-)
-
-// A full diagnostic report with a set of related documents.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedFullDocumentDiagnosticReport
-type RelatedFullDocumentDiagnosticReport struct {
- // Diagnostics of related documents. This information is useful
- // in programming languages where code in a file A can generate
- // diagnostics in a file B which A depends on. An example of
- // such a language is C/C++ where marco definitions in a file
- // a.cpp and result in errors in a header file b.hpp.
- //
- // @since 3.17.0
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"`
- FullDocumentDiagnosticReport
-}
-
-// An unchanged diagnostic report with a set of related documents.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relatedUnchangedDocumentDiagnosticReport
-type RelatedUnchangedDocumentDiagnosticReport struct {
- // Diagnostics of related documents. This information is useful
- // in programming languages where code in a file A can generate
- // diagnostics in a file B which A depends on. An example of
- // such a language is C/C++ where marco definitions in a file
- // a.cpp and result in errors in a header file b.hpp.
- //
- // @since 3.17.0
- RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments,omitempty"`
- UnchangedDocumentDiagnosticReport
-}
-
-// A relative pattern is a helper to construct glob patterns that are matched
-// relatively to a base URI. The common value for a `baseUri` is a workspace
-// folder root, but it can be another absolute URI as well.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#relativePattern
-type RelativePattern struct {
- // A workspace folder or a base URI to which this pattern will be matched
- // against relatively.
- BaseURI Or_RelativePattern_baseUri `json:"baseUri"`
- // The actual glob pattern;
- Pattern Pattern `json:"pattern"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameClientCapabilities
-type RenameClientCapabilities struct {
- // Whether rename supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Client supports testing for validity of rename operations
- // before execution.
- //
- // @since 3.12.0
- PrepareSupport bool `json:"prepareSupport,omitempty"`
- // Client supports the default behavior result.
- //
- // The value indicates the default behavior used by the
- // client.
- //
- // @since 3.16.0
- PrepareSupportDefaultBehavior *PrepareSupportDefaultBehavior `json:"prepareSupportDefaultBehavior,omitempty"`
- // Whether the client honors the change annotations in
- // text edits and resource operations returned via the
- // rename request's workspace edit by for example presenting
- // the workspace edit in the user interface and asking
- // for confirmation.
- //
- // @since 3.16.0
- HonorsChangeAnnotations bool `json:"honorsChangeAnnotations,omitempty"`
-}
-
-// Rename file operation
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFile
-type RenameFile struct {
- // A rename
- Kind string `json:"kind"`
- // The old (existing) location.
- OldURI DocumentURI `json:"oldUri"`
- // The new location.
- NewURI DocumentURI `json:"newUri"`
- // Rename options.
- Options *RenameFileOptions `json:"options,omitempty"`
- ResourceOperation
-}
-
-// Rename file options
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFileOptions
-type RenameFileOptions struct {
- // Overwrite target if existing. Overwrite wins over `ignoreIfExists`
- Overwrite bool `json:"overwrite,omitempty"`
- // Ignores if target exists.
- IgnoreIfExists bool `json:"ignoreIfExists,omitempty"`
-}
-
-// The parameters sent in notifications/requests for user-initiated renames of
-// files.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameFilesParams
-type RenameFilesParams struct {
- // An array of all files/folders renamed in this operation. When a folder is renamed, only
- // the folder will be included, and not its children.
- Files []FileRename `json:"files"`
-}
-
-// Provider options for a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameOptions
-type RenameOptions struct {
- // Renames should be checked and tested before being executed.
- //
- // @since version 3.12.0
- PrepareProvider bool `json:"prepareProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameParams
-type RenameParams struct {
- // The document to rename.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position at which this request was sent.
- Position Position `json:"position"`
- // The new name of the symbol. If the given name is not valid the
- // request must return a {@link ResponseError} with an
- // appropriate message set.
- NewName string `json:"newName"`
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link RenameRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#renameRegistrationOptions
-type RenameRegistrationOptions struct {
- TextDocumentRegistrationOptions
- RenameOptions
-}
-
-// A generic resource operation.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#resourceOperation
-type ResourceOperation struct {
- // The resource operation kind.
- Kind string `json:"kind"`
- // An optional annotation identifier describing the operation.
- //
- // @since 3.16.0
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
-}
-type ResourceOperationKind string
-
-// Save options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#saveOptions
-type SaveOptions struct {
- // The client is supposed to include the content on save.
- IncludeText bool `json:"includeText,omitempty"`
-}
-
-// Describes the currently selected completion item.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectedCompletionInfo
-type SelectedCompletionInfo struct {
- // The range that will be replaced if this completion item is accepted.
- Range Range `json:"range"`
- // The text the range will be replaced with if this completion is accepted.
- Text string `json:"text"`
-}
-
-// A selection range represents a part of a selection hierarchy. A selection range
-// may have a parent selection range that contains it.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRange
-type SelectionRange struct {
- // The {@link Range range} of this selection range.
- Range Range `json:"range"`
- // The parent selection range containing this range. Therefore `parent.range` must contain `this.range`.
- Parent *SelectionRange `json:"parent,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeClientCapabilities
-type SelectionRangeClientCapabilities struct {
- // Whether implementation supports dynamic registration for selection range providers. If this is set to `true`
- // the client supports the new `SelectionRangeRegistrationOptions` return value for the corresponding server
- // capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeOptions
-type SelectionRangeOptions struct {
- WorkDoneProgressOptions
-}
-
-// A parameter literal used in selection range requests.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeParams
-type SelectionRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The positions inside the text document.
- Positions []Position `json:"positions"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#selectionRangeRegistrationOptions
-type SelectionRangeRegistrationOptions struct {
- SelectionRangeOptions
- TextDocumentRegistrationOptions
- StaticRegistrationOptions
-}
-
-// A set of predefined token modifiers. This set is not fixed
-// an clients can specify additional token types via the
-// corresponding client capabilities.
-//
-// @since 3.16.0
-type SemanticTokenModifiers string
-
-// A set of predefined token types. This set is not fixed
-// an clients can specify additional token types via the
-// corresponding client capabilities.
-//
-// @since 3.16.0
-type SemanticTokenTypes string
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokens
-type SemanticTokens struct {
- // An optional result id. If provided and clients support delta updating
- // the client will include the result id in the next semantic token request.
- // A server can then instead of computing all semantic tokens again simply
- // send a delta.
- ResultID string `json:"resultId,omitempty"`
- // The actual tokens.
- Data []uint32 `json:"data"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensClientCapabilities
-type SemanticTokensClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Which requests the client supports and might send to the server
- // depending on the server's capability. Please note that clients might not
- // show semantic tokens or degrade some of the user experience if a range
- // or full request is advertised by the client but not provided by the
- // server. If for example the client capability `requests.full` and
- // `request.range` are both set to true but the server only provides a
- // range provider the client might not render a minimap correctly or might
- // even decide to not show any semantic tokens at all.
- Requests ClientSemanticTokensRequestOptions `json:"requests"`
- // The token types that the client supports.
- TokenTypes []string `json:"tokenTypes"`
- // The token modifiers that the client supports.
- TokenModifiers []string `json:"tokenModifiers"`
- // The token formats the clients supports.
- Formats []TokenFormat `json:"formats"`
- // Whether the client supports tokens that can overlap each other.
- OverlappingTokenSupport bool `json:"overlappingTokenSupport,omitempty"`
- // Whether the client supports tokens that can span multiple lines.
- MultilineTokenSupport bool `json:"multilineTokenSupport,omitempty"`
- // Whether the client allows the server to actively cancel a
- // semantic token request, e.g. supports returning
- // LSPErrorCodes.ServerCancelled. If a server does the client
- // needs to retrigger the request.
- //
- // @since 3.17.0
- ServerCancelSupport bool `json:"serverCancelSupport,omitempty"`
- // Whether the client uses semantic tokens to augment existing
- // syntax tokens. If set to `true` client side created syntax
- // tokens and semantic tokens are both used for colorization. If
- // set to `false` the client only uses the returned semantic tokens
- // for colorization.
- //
- // If the value is `undefined` then the client behavior is not
- // specified.
- //
- // @since 3.17.0
- AugmentsSyntaxTokens bool `json:"augmentsSyntaxTokens,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDelta
-type SemanticTokensDelta struct {
- ResultID string `json:"resultId,omitempty"`
- // The semantic token edits to transform a previous result into a new result.
- Edits []SemanticTokensEdit `json:"edits"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaParams
-type SemanticTokensDeltaParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The result id of a previous response. The result Id can either point to a full response
- // or a delta response depending on what was received last.
- PreviousResultID string `json:"previousResultId"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensDeltaPartialResult
-type SemanticTokensDeltaPartialResult struct {
- Edits []SemanticTokensEdit `json:"edits"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensEdit
-type SemanticTokensEdit struct {
- // The start offset of the edit.
- Start uint32 `json:"start"`
- // The count of elements to remove.
- DeleteCount uint32 `json:"deleteCount"`
- // The elements to insert.
- Data []uint32 `json:"data,omitempty"`
-}
-
-// Semantic tokens options to support deltas for full documents
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensFullDelta
-type SemanticTokensFullDelta struct {
- // The server supports deltas for full documents.
- Delta bool `json:"delta,omitempty"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensLegend
-type SemanticTokensLegend struct {
- // The token types a server uses.
- TokenTypes []string `json:"tokenTypes"`
- // The token modifiers a server uses.
- TokenModifiers []string `json:"tokenModifiers"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensOptions
-type SemanticTokensOptions struct {
- // The legend used by the server
- Legend SemanticTokensLegend `json:"legend"`
- // Server supports providing semantic tokens for a specific range
- // of a document.
- Range *Or_SemanticTokensOptions_range `json:"range,omitempty"`
- // Server supports providing semantic tokens for a full document.
- Full *Or_SemanticTokensOptions_full `json:"full,omitempty"`
- WorkDoneProgressOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensParams
-type SemanticTokensParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensPartialResult
-type SemanticTokensPartialResult struct {
- Data []uint32 `json:"data"`
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRangeParams
-type SemanticTokensRangeParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The range the semantic tokens are requested for.
- Range Range `json:"range"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensRegistrationOptions
-type SemanticTokensRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SemanticTokensOptions
- StaticRegistrationOptions
-}
-
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#semanticTokensWorkspaceClientCapabilities
-type SemanticTokensWorkspaceClientCapabilities struct {
- // Whether the client implementation supports a refresh request sent from
- // the server to the client.
- //
- // Note that this event is global and will force the client to refresh all
- // semantic tokens currently shown. It should be used with absolute care
- // and is useful for situation where a server for example detects a project
- // wide change that requires such a calculation.
- RefreshSupport bool `json:"refreshSupport,omitempty"`
-}
-
-// Defines the capabilities provided by a language
-// server.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCapabilities
-type ServerCapabilities struct {
- // The position encoding the server picked from the encodings offered
- // by the client via the client capability `general.positionEncodings`.
- //
- // If the client didn't provide any position encodings the only valid
- // value that a server can return is 'utf-16'.
- //
- // If omitted it defaults to 'utf-16'.
- //
- // @since 3.17.0
- PositionEncoding *PositionEncodingKind `json:"positionEncoding,omitempty"`
- // Defines how text documents are synced. Is either a detailed structure
- // defining each notification or for backwards compatibility the
- // TextDocumentSyncKind number.
- TextDocumentSync interface{} `json:"textDocumentSync,omitempty"`
- // Defines how notebook documents are synced.
- //
- // @since 3.17.0
- NotebookDocumentSync *Or_ServerCapabilities_notebookDocumentSync `json:"notebookDocumentSync,omitempty"`
- // The server provides completion support.
- CompletionProvider *CompletionOptions `json:"completionProvider,omitempty"`
- // The server provides hover support.
- HoverProvider *Or_ServerCapabilities_hoverProvider `json:"hoverProvider,omitempty"`
- // The server provides signature help support.
- SignatureHelpProvider *SignatureHelpOptions `json:"signatureHelpProvider,omitempty"`
- // The server provides Goto Declaration support.
- DeclarationProvider *Or_ServerCapabilities_declarationProvider `json:"declarationProvider,omitempty"`
- // The server provides goto definition support.
- DefinitionProvider *Or_ServerCapabilities_definitionProvider `json:"definitionProvider,omitempty"`
- // The server provides Goto Type Definition support.
- TypeDefinitionProvider *Or_ServerCapabilities_typeDefinitionProvider `json:"typeDefinitionProvider,omitempty"`
- // The server provides Goto Implementation support.
- ImplementationProvider *Or_ServerCapabilities_implementationProvider `json:"implementationProvider,omitempty"`
- // The server provides find references support.
- ReferencesProvider *Or_ServerCapabilities_referencesProvider `json:"referencesProvider,omitempty"`
- // The server provides document highlight support.
- DocumentHighlightProvider *Or_ServerCapabilities_documentHighlightProvider `json:"documentHighlightProvider,omitempty"`
- // The server provides document symbol support.
- DocumentSymbolProvider *Or_ServerCapabilities_documentSymbolProvider `json:"documentSymbolProvider,omitempty"`
- // The server provides code actions. CodeActionOptions may only be
- // specified if the client states that it supports
- // `codeActionLiteralSupport` in its initial `initialize` request.
- CodeActionProvider interface{} `json:"codeActionProvider,omitempty"`
- // The server provides code lens.
- CodeLensProvider *CodeLensOptions `json:"codeLensProvider,omitempty"`
- // The server provides document link support.
- DocumentLinkProvider *DocumentLinkOptions `json:"documentLinkProvider,omitempty"`
- // The server provides color provider support.
- ColorProvider *Or_ServerCapabilities_colorProvider `json:"colorProvider,omitempty"`
- // The server provides workspace symbol support.
- WorkspaceSymbolProvider *Or_ServerCapabilities_workspaceSymbolProvider `json:"workspaceSymbolProvider,omitempty"`
- // The server provides document formatting.
- DocumentFormattingProvider *Or_ServerCapabilities_documentFormattingProvider `json:"documentFormattingProvider,omitempty"`
- // The server provides document range formatting.
- DocumentRangeFormattingProvider *Or_ServerCapabilities_documentRangeFormattingProvider `json:"documentRangeFormattingProvider,omitempty"`
- // The server provides document formatting on typing.
- DocumentOnTypeFormattingProvider *DocumentOnTypeFormattingOptions `json:"documentOnTypeFormattingProvider,omitempty"`
- // The server provides rename support. RenameOptions may only be
- // specified if the client states that it supports
- // `prepareSupport` in its initial `initialize` request.
- RenameProvider interface{} `json:"renameProvider,omitempty"`
- // The server provides folding provider support.
- FoldingRangeProvider *Or_ServerCapabilities_foldingRangeProvider `json:"foldingRangeProvider,omitempty"`
- // The server provides selection range support.
- SelectionRangeProvider *Or_ServerCapabilities_selectionRangeProvider `json:"selectionRangeProvider,omitempty"`
- // The server provides execute command support.
- ExecuteCommandProvider *ExecuteCommandOptions `json:"executeCommandProvider,omitempty"`
- // The server provides call hierarchy support.
- //
- // @since 3.16.0
- CallHierarchyProvider *Or_ServerCapabilities_callHierarchyProvider `json:"callHierarchyProvider,omitempty"`
- // The server provides linked editing range support.
- //
- // @since 3.16.0
- LinkedEditingRangeProvider *Or_ServerCapabilities_linkedEditingRangeProvider `json:"linkedEditingRangeProvider,omitempty"`
- // The server provides semantic tokens support.
- //
- // @since 3.16.0
- SemanticTokensProvider interface{} `json:"semanticTokensProvider,omitempty"`
- // The server provides moniker support.
- //
- // @since 3.16.0
- MonikerProvider *Or_ServerCapabilities_monikerProvider `json:"monikerProvider,omitempty"`
- // The server provides type hierarchy support.
- //
- // @since 3.17.0
- TypeHierarchyProvider *Or_ServerCapabilities_typeHierarchyProvider `json:"typeHierarchyProvider,omitempty"`
- // The server provides inline values.
- //
- // @since 3.17.0
- InlineValueProvider *Or_ServerCapabilities_inlineValueProvider `json:"inlineValueProvider,omitempty"`
- // The server provides inlay hints.
- //
- // @since 3.17.0
- InlayHintProvider interface{} `json:"inlayHintProvider,omitempty"`
- // The server has support for pull model diagnostics.
- //
- // @since 3.17.0
- DiagnosticProvider *Or_ServerCapabilities_diagnosticProvider `json:"diagnosticProvider,omitempty"`
- // Inline completion options used during static registration.
- //
- // @since 3.18.0
- // @proposed
- InlineCompletionProvider *Or_ServerCapabilities_inlineCompletionProvider `json:"inlineCompletionProvider,omitempty"`
- // Workspace specific server capabilities.
- Workspace *WorkspaceOptions `json:"workspace,omitempty"`
- // Experimental server capabilities.
- Experimental interface{} `json:"experimental,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverCompletionItemOptions
-type ServerCompletionItemOptions struct {
- // The server has support for completion item label
- // details (see also `CompletionItemLabelDetails`) when
- // receiving a completion item in a resolve call.
- //
- // @since 3.17.0
- LabelDetailsSupport bool `json:"labelDetailsSupport,omitempty"`
-}
-
-// Information about the server
-//
-// @since 3.15.0
-// @since 3.18.0 ServerInfo type name added.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#serverInfo
-type ServerInfo struct {
- // The name of the server as defined by the server.
- Name string `json:"name"`
- // The server's version as defined by the server.
- Version string `json:"version,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#setTraceParams
-type SetTraceParams struct {
- Value TraceValue `json:"value"`
-}
-
-// Client capabilities for the showDocument request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentClientCapabilities
-type ShowDocumentClientCapabilities struct {
- // The client has support for the showDocument
- // request.
- Support bool `json:"support"`
-}
-
-// Params to show a resource in the UI.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentParams
-type ShowDocumentParams struct {
- // The uri to show.
- URI URI `json:"uri"`
- // Indicates to show the resource in an external program.
- // To show, for example, `https://code.visualstudio.com/`
- // in the default WEB browser set `external` to `true`.
- External bool `json:"external,omitempty"`
- // An optional property to indicate whether the editor
- // showing the document should take focus or not.
- // Clients might ignore this property if an external
- // program is started.
- TakeFocus bool `json:"takeFocus,omitempty"`
- // An optional selection range if the document is a text
- // document. Clients might ignore the property if an
- // external program is started or the file is not a text
- // file.
- Selection *Range `json:"selection,omitempty"`
-}
-
-// The result of a showDocument request.
-//
-// @since 3.16.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showDocumentResult
-type ShowDocumentResult struct {
- // A boolean indicating if the show was successful.
- Success bool `json:"success"`
-}
-
-// The parameters of a notification message.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageParams
-type ShowMessageParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
-}
-
-// Show message request client capabilities
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestClientCapabilities
-type ShowMessageRequestClientCapabilities struct {
- // Capabilities specific to the `MessageActionItem` type.
- MessageActionItem *ClientShowMessageActionItemOptions `json:"messageActionItem,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#showMessageRequestParams
-type ShowMessageRequestParams struct {
- // The message type. See {@link MessageType}
- Type MessageType `json:"type"`
- // The actual message.
- Message string `json:"message"`
- // The message action items to present.
- Actions []MessageActionItem `json:"actions,omitempty"`
-}
-
-// Signature help represents the signature of something
-// callable. There can be multiple signature but only one
-// active and only one active parameter.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelp
-type SignatureHelp struct {
- // One or more signatures.
- Signatures []SignatureInformation `json:"signatures"`
- // The active signature. If omitted or the value lies outside the
- // range of `signatures` the value defaults to zero or is ignored if
- // the `SignatureHelp` has no signatures.
- //
- // Whenever possible implementors should make an active decision about
- // the active signature and shouldn't rely on a default value.
- //
- // In future version of the protocol this property might become
- // mandatory to better express this.
- ActiveSignature uint32 `json:"activeSignature,omitempty"`
- // The active parameter of the active signature.
- //
- // If `null`, no parameter of the signature is active (for example a named
- // argument that does not match any declared parameters). This is only valid
- // if the client specifies the client capability
- // `textDocument.signatureHelp.noActiveParameterSupport === true`
- //
- // If omitted or the value lies outside the range of
- // `signatures[activeSignature].parameters` defaults to 0 if the active
- // signature has parameters.
- //
- // If the active signature has no parameters it is ignored.
- //
- // In future version of the protocol this property might become
- // mandatory (but still nullable) to better express the active parameter if
- // the active signature does have any.
- ActiveParameter uint32 `json:"activeParameter,omitempty"`
-}
-
-// Client Capabilities for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpClientCapabilities
-type SignatureHelpClientCapabilities struct {
- // Whether signature help supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports the following `SignatureInformation`
- // specific properties.
- SignatureInformation *ClientSignatureInformationOptions `json:"signatureInformation,omitempty"`
- // The client supports to send additional context information for a
- // `textDocument/signatureHelp` request. A client that opts into
- // contextSupport will also support the `retriggerCharacters` on
- // `SignatureHelpOptions`.
- //
- // @since 3.15.0
- ContextSupport bool `json:"contextSupport,omitempty"`
-}
-
-// Additional information about the context in which a signature help request was triggered.
-//
-// @since 3.15.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpContext
-type SignatureHelpContext struct {
- // Action that caused signature help to be triggered.
- TriggerKind SignatureHelpTriggerKind `json:"triggerKind"`
- // Character that caused signature help to be triggered.
- //
- // This is undefined when `triggerKind !== SignatureHelpTriggerKind.TriggerCharacter`
- TriggerCharacter string `json:"triggerCharacter,omitempty"`
- // `true` if signature help was already showing when it was triggered.
- //
- // Retriggers occurs when the signature help is already active and can be caused by actions such as
- // typing a trigger character, a cursor move, or document content changes.
- IsRetrigger bool `json:"isRetrigger"`
- // The currently active `SignatureHelp`.
- //
- // The `activeSignatureHelp` has its `SignatureHelp.activeSignature` field updated based on
- // the user navigating through available signatures.
- ActiveSignatureHelp *SignatureHelp `json:"activeSignatureHelp,omitempty"`
-}
-
-// Server Capabilities for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpOptions
-type SignatureHelpOptions struct {
- // List of characters that trigger signature help automatically.
- TriggerCharacters []string `json:"triggerCharacters,omitempty"`
- // List of characters that re-trigger signature help.
- //
- // These trigger characters are only active when signature help is already showing. All trigger characters
- // are also counted as re-trigger characters.
- //
- // @since 3.15.0
- RetriggerCharacters []string `json:"retriggerCharacters,omitempty"`
- WorkDoneProgressOptions
-}
-
-// Parameters for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpParams
-type SignatureHelpParams struct {
- // The signature help context. This is only available if the client specifies
- // to send this using the client capability `textDocument.signatureHelp.contextSupport === true`
- //
- // @since 3.15.0
- Context *SignatureHelpContext `json:"context,omitempty"`
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Registration options for a {@link SignatureHelpRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureHelpRegistrationOptions
-type SignatureHelpRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SignatureHelpOptions
-}
-
-// How a signature help was triggered.
-//
-// @since 3.15.0
-type SignatureHelpTriggerKind uint32
-
-// Represents the signature of something callable. A signature
-// can have a label, like a function-name, a doc-comment, and
-// a set of parameters.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#signatureInformation
-type SignatureInformation struct {
- // The label of this signature. Will be shown in
- // the UI.
- Label string `json:"label"`
- // The human-readable doc-comment of this signature. Will be shown
- // in the UI but can be omitted.
- Documentation *Or_SignatureInformation_documentation `json:"documentation,omitempty"`
- // The parameters of this signature.
- Parameters []ParameterInformation `json:"parameters,omitempty"`
- // The index of the active parameter.
- //
- // If `null`, no parameter of the signature is active (for example a named
- // argument that does not match any declared parameters). This is only valid
- // if the client specifies the client capability
- // `textDocument.signatureHelp.noActiveParameterSupport === true`
- //
- // If provided (or `null`), this is used in place of
- // `SignatureHelp.activeParameter`.
- //
- // @since 3.16.0
- ActiveParameter uint32 `json:"activeParameter,omitempty"`
-}
-
-// An interactive text edit.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#snippetTextEdit
-type SnippetTextEdit struct {
- // The range of the text document to be manipulated.
- Range Range `json:"range"`
- // The snippet to be inserted.
- Snippet StringValue `json:"snippet"`
- // The actual identifier of the snippet edit.
- AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"`
-}
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staleRequestSupportOptions
-type StaleRequestSupportOptions struct {
- // The client will actively cancel the request.
- Cancel bool `json:"cancel"`
- // The list of requests for which the client
- // will retry the request if it receives a
- // response with error code `ContentModified`
- RetryOnContentModified []string `json:"retryOnContentModified"`
-}
-
-// Static registration options to be returned in the initialize
-// request.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#staticRegistrationOptions
-type StaticRegistrationOptions struct {
- // The id used to register the request. The id can be used to deregister
- // the request again. See also Registration#id.
- ID string `json:"id,omitempty"`
-}
-
-// A string value used as a snippet is a template which allows to insert text
-// and to control the editor cursor when insertion happens.
-//
-// A snippet can define tab stops and placeholders with `$1`, `$2`
-// and `${3:foo}`. `$0` defines the final tab stop, it defaults to
-// the end of the snippet. Variables are defined with `$name` and
-// `${name:default value}`.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#stringValue
-type StringValue struct {
- // The kind of string value.
- Kind string `json:"kind"`
- // The snippet string.
- Value string `json:"value"`
-}
-
-// Represents information about programming constructs like variables, classes,
-// interfaces etc.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#symbolInformation
-type SymbolInformation struct {
- // extends BaseSymbolInformation
- // Indicates if this symbol is deprecated.
- //
- // @deprecated Use tags instead
- Deprecated bool `json:"deprecated,omitempty"`
- // The location of this symbol. The location's range is used by a tool
- // to reveal the location in the editor. If the symbol is selected in the
- // tool the range's start information is used to position the cursor. So
- // the range usually spans more than the actual symbol's name and does
- // normally include things like visibility modifiers.
- //
- // The range doesn't have to denote a node range in the sense of an abstract
- // syntax tree. It can therefore not be used to re-construct a hierarchy of
- // the symbols.
- Location Location `json:"location"`
- // The name of this symbol.
- Name string `json:"name"`
- // The kind of this symbol.
- Kind SymbolKind `json:"kind"`
- // Tags for this symbol.
- //
- // @since 3.16.0
- Tags []SymbolTag `json:"tags,omitempty"`
- // The name of the symbol containing this symbol. This information is for
- // user interface purposes (e.g. to render a qualifier in the user interface
- // if necessary). It can't be used to re-infer a hierarchy for the document
- // symbols.
- ContainerName string `json:"containerName,omitempty"`
-}
-
-// A symbol kind.
-type SymbolKind uint32
-
-// Symbol tags are extra annotations that tweak the rendering of a symbol.
-//
-// @since 3.16
-type SymbolTag uint32
-
-// Describe options to be used when registered for text document change events.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentChangeRegistrationOptions
-type TextDocumentChangeRegistrationOptions struct {
- // How documents are synced to the server.
- SyncKind TextDocumentSyncKind `json:"syncKind"`
- TextDocumentRegistrationOptions
-}
-
-// Text document specific client capabilities.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentClientCapabilities
-type TextDocumentClientCapabilities struct {
- // Defines which synchronization capabilities the client supports.
- Synchronization *TextDocumentSyncClientCapabilities `json:"synchronization,omitempty"`
- // Capabilities specific to the `textDocument/completion` request.
- Completion CompletionClientCapabilities `json:"completion,omitempty"`
- // Capabilities specific to the `textDocument/hover` request.
- Hover *HoverClientCapabilities `json:"hover,omitempty"`
- // Capabilities specific to the `textDocument/signatureHelp` request.
- SignatureHelp *SignatureHelpClientCapabilities `json:"signatureHelp,omitempty"`
- // Capabilities specific to the `textDocument/declaration` request.
- //
- // @since 3.14.0
- Declaration *DeclarationClientCapabilities `json:"declaration,omitempty"`
- // Capabilities specific to the `textDocument/definition` request.
- Definition *DefinitionClientCapabilities `json:"definition,omitempty"`
- // Capabilities specific to the `textDocument/typeDefinition` request.
- //
- // @since 3.6.0
- TypeDefinition *TypeDefinitionClientCapabilities `json:"typeDefinition,omitempty"`
- // Capabilities specific to the `textDocument/implementation` request.
- //
- // @since 3.6.0
- Implementation *ImplementationClientCapabilities `json:"implementation,omitempty"`
- // Capabilities specific to the `textDocument/references` request.
- References *ReferenceClientCapabilities `json:"references,omitempty"`
- // Capabilities specific to the `textDocument/documentHighlight` request.
- DocumentHighlight *DocumentHighlightClientCapabilities `json:"documentHighlight,omitempty"`
- // Capabilities specific to the `textDocument/documentSymbol` request.
- DocumentSymbol DocumentSymbolClientCapabilities `json:"documentSymbol,omitempty"`
- // Capabilities specific to the `textDocument/codeAction` request.
- CodeAction CodeActionClientCapabilities `json:"codeAction,omitempty"`
- // Capabilities specific to the `textDocument/codeLens` request.
- CodeLens *CodeLensClientCapabilities `json:"codeLens,omitempty"`
- // Capabilities specific to the `textDocument/documentLink` request.
- DocumentLink *DocumentLinkClientCapabilities `json:"documentLink,omitempty"`
- // Capabilities specific to the `textDocument/documentColor` and the
- // `textDocument/colorPresentation` request.
- //
- // @since 3.6.0
- ColorProvider *DocumentColorClientCapabilities `json:"colorProvider,omitempty"`
- // Capabilities specific to the `textDocument/formatting` request.
- Formatting *DocumentFormattingClientCapabilities `json:"formatting,omitempty"`
- // Capabilities specific to the `textDocument/rangeFormatting` request.
- RangeFormatting *DocumentRangeFormattingClientCapabilities `json:"rangeFormatting,omitempty"`
- // Capabilities specific to the `textDocument/onTypeFormatting` request.
- OnTypeFormatting *DocumentOnTypeFormattingClientCapabilities `json:"onTypeFormatting,omitempty"`
- // Capabilities specific to the `textDocument/rename` request.
- Rename *RenameClientCapabilities `json:"rename,omitempty"`
- // Capabilities specific to the `textDocument/foldingRange` request.
- //
- // @since 3.10.0
- FoldingRange *FoldingRangeClientCapabilities `json:"foldingRange,omitempty"`
- // Capabilities specific to the `textDocument/selectionRange` request.
- //
- // @since 3.15.0
- SelectionRange *SelectionRangeClientCapabilities `json:"selectionRange,omitempty"`
- // Capabilities specific to the `textDocument/publishDiagnostics` notification.
- PublishDiagnostics PublishDiagnosticsClientCapabilities `json:"publishDiagnostics,omitempty"`
- // Capabilities specific to the various call hierarchy requests.
- //
- // @since 3.16.0
- CallHierarchy *CallHierarchyClientCapabilities `json:"callHierarchy,omitempty"`
- // Capabilities specific to the various semantic token request.
- //
- // @since 3.16.0
- SemanticTokens SemanticTokensClientCapabilities `json:"semanticTokens,omitempty"`
- // Capabilities specific to the `textDocument/linkedEditingRange` request.
- //
- // @since 3.16.0
- LinkedEditingRange *LinkedEditingRangeClientCapabilities `json:"linkedEditingRange,omitempty"`
- // Client capabilities specific to the `textDocument/moniker` request.
- //
- // @since 3.16.0
- Moniker *MonikerClientCapabilities `json:"moniker,omitempty"`
- // Capabilities specific to the various type hierarchy requests.
- //
- // @since 3.17.0
- TypeHierarchy *TypeHierarchyClientCapabilities `json:"typeHierarchy,omitempty"`
- // Capabilities specific to the `textDocument/inlineValue` request.
- //
- // @since 3.17.0
- InlineValue *InlineValueClientCapabilities `json:"inlineValue,omitempty"`
- // Capabilities specific to the `textDocument/inlayHint` request.
- //
- // @since 3.17.0
- InlayHint *InlayHintClientCapabilities `json:"inlayHint,omitempty"`
- // Capabilities specific to the diagnostic pull model.
- //
- // @since 3.17.0
- Diagnostic *DiagnosticClientCapabilities `json:"diagnostic,omitempty"`
- // Client capabilities specific to inline completions.
- //
- // @since 3.18.0
- // @proposed
- InlineCompletion *InlineCompletionClientCapabilities `json:"inlineCompletion,omitempty"`
-}
-
-// An event describing a change to a text document. If only a text is provided
-// it is considered to be the full content of the document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeEvent
-type (
- TextDocumentContentChangeEvent = Or_TextDocumentContentChangeEvent // (alias)
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangePartial
- TextDocumentContentChangePartial struct {
- // The range of the document that changed.
- Range *Range `json:"range,omitempty"`
- // The optional length of the range that got replaced.
- //
- // @deprecated use range instead.
- RangeLength uint32 `json:"rangeLength,omitempty"`
- // The new text for the provided range.
- Text string `json:"text"`
- }
-)
-
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentChangeWholeDocument
-type TextDocumentContentChangeWholeDocument struct {
- // The new text of the whole document.
- Text string `json:"text"`
-}
-
-// Client capabilities for a text document content provider.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentClientCapabilities
-type TextDocumentContentClientCapabilities struct {
- // Text document content provider supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// Text document content provider options.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentOptions
-type TextDocumentContentOptions struct {
- // The scheme for which the server provides content.
- Scheme string `json:"scheme"`
-}
-
-// Parameters for the `workspace/textDocumentContent` request.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentParams
-type TextDocumentContentParams struct {
- // The uri of the text document.
- URI DocumentURI `json:"uri"`
-}
-
-// Parameters for the `workspace/textDocumentContent/refresh` request.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRefreshParams
-type TextDocumentContentRefreshParams struct {
- // The uri of the text document to refresh.
- URI DocumentURI `json:"uri"`
-}
-
-// Text document content provider registration options.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentContentRegistrationOptions
-type TextDocumentContentRegistrationOptions struct {
- TextDocumentContentOptions
- StaticRegistrationOptions
-}
-
-// Describes textual changes on a text document. A TextDocumentEdit describes all changes
-// on a document version Si and after they are applied move the document to version Si+1.
-// So the creator of a TextDocumentEdit doesn't need to sort the array of edits or do any
-// kind of ordering. However the edits must be non overlapping.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentEdit
-type TextDocumentEdit struct {
- // The text document to change.
- TextDocument OptionalVersionedTextDocumentIdentifier `json:"textDocument"`
- // The edits to be applied.
- //
- // @since 3.16.0 - support for AnnotatedTextEdit. This is guarded using a
- // client capability.
- //
- // @since 3.18.0 - support for SnippetTextEdit. This is guarded using a
- // client capability.
- Edits []Or_TextDocumentEdit_edits_Elem `json:"edits"`
-}
-
-// A document filter denotes a document by different properties like
-// the {@link TextDocument.languageId language}, the {@link Uri.scheme scheme} of
-// its resource, or a glob-pattern that is applied to the {@link TextDocument.fileName path}.
-//
-// Glob patterns can have the following syntax:
-//
-// - `*` to match one or more characters in a path segment
-// - `?` to match on one character in a path segment
-// - `**` to match any number of path segments, including none
-// - `{}` to group sub patterns into an OR expression. (e.g. `**/*.{ts,js}` matches all TypeScript and JavaScript files)
-// - `[]` to declare a range of characters to match in a path segment (e.g., `example.[0-9]` to match on `example.0`, `example.1`, …)
-// - `[!...]` to negate a range of characters to match in a path segment (e.g., `example.[!0-9]` to match on `example.a`, `example.b`, but not `example.0`)
-//
-// @sample A language filter that applies to typescript files on disk: `{ language: 'typescript', scheme: 'file' }`
-// @sample A language filter that applies to all package.json paths: `{ language: 'json', pattern: '**package.json' }`
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilter
-type (
- TextDocumentFilter = Or_TextDocumentFilter // (alias)
- // A document filter where `language` is required field.
- //
- // @since 3.18.0
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterLanguage
- TextDocumentFilterLanguage struct {
- // A language id, like `typescript`.
- Language string `json:"language"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern *GlobPattern `json:"pattern,omitempty"`
- }
-)
-
-// A document filter where `pattern` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterPattern
-type TextDocumentFilterPattern struct {
- // A language id, like `typescript`.
- Language string `json:"language,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme,omitempty"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern GlobPattern `json:"pattern"`
-}
-
-// A document filter where `scheme` is required field.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentFilterScheme
-type TextDocumentFilterScheme struct {
- // A language id, like `typescript`.
- Language string `json:"language,omitempty"`
- // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`.
- Scheme string `json:"scheme"`
- // A glob pattern, like **/*.{ts,js}. See TextDocumentFilter for examples.
- //
- // @since 3.18.0 - support for relative patterns.
- Pattern *GlobPattern `json:"pattern,omitempty"`
-}
-
-// A literal to identify a text document in the client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentIdentifier
-type TextDocumentIdentifier struct {
- // The text document's uri.
- URI DocumentURI `json:"uri"`
-}
-
-// An item to transfer a text document from the client to the
-// server.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentItem
-type TextDocumentItem struct {
- // The text document's uri.
- URI DocumentURI `json:"uri"`
- // The text document's language identifier.
- LanguageID LanguageKind `json:"languageId"`
- // The version number of this document (it will increase after each
- // change, including undo/redo).
- Version int32 `json:"version"`
- // The content of the opened text document.
- Text string `json:"text"`
-}
-
-// A parameter literal used in requests to pass a text document and a position inside that
-// document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentPositionParams
-type TextDocumentPositionParams struct {
- // The text document.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The position inside the text document.
- Position Position `json:"position"`
-}
-
-// General text document registration options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentRegistrationOptions
-type TextDocumentRegistrationOptions struct {
- // A document selector to identify the scope of the registration. If set to null
- // the document selector provided on the client side will be used.
- DocumentSelector DocumentSelector `json:"documentSelector"`
-}
-
-// Represents reasons why a text document is saved.
-type TextDocumentSaveReason uint32
-
-// Save registration options.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSaveRegistrationOptions
-type TextDocumentSaveRegistrationOptions struct {
- TextDocumentRegistrationOptions
- SaveOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncClientCapabilities
-type TextDocumentSyncClientCapabilities struct {
- // Whether text document synchronization supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports sending will save notifications.
- WillSave bool `json:"willSave,omitempty"`
- // The client supports sending a will save request and
- // waits for a response providing text edits which will
- // be applied to the document before it is saved.
- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"`
- // The client supports did save notifications.
- DidSave bool `json:"didSave,omitempty"`
-}
-
-// Defines how the host (editor) should sync
-// document changes to the language server.
-type TextDocumentSyncKind uint32
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocumentSyncOptions
-type TextDocumentSyncOptions struct {
- // Open and close notifications are sent to the server. If omitted open close notification should not
- // be sent.
- OpenClose bool `json:"openClose,omitempty"`
- // Change notifications are sent to the server. See TextDocumentSyncKind.None, TextDocumentSyncKind.Full
- // and TextDocumentSyncKind.Incremental. If omitted it defaults to TextDocumentSyncKind.None.
- Change TextDocumentSyncKind `json:"change,omitempty"`
- // If present will save notifications are sent to the server. If omitted the notification should not be
- // sent.
- WillSave bool `json:"willSave,omitempty"`
- // If present will save wait until requests are sent to the server. If omitted the request should not be
- // sent.
- WillSaveWaitUntil bool `json:"willSaveWaitUntil,omitempty"`
- // If present save notifications are sent to the server. If omitted the notification should not be
- // sent.
- Save *SaveOptions `json:"save,omitempty"`
-}
-
-// A text edit applicable to a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textEdit
-type TextEdit struct {
- // The range of the text document to be manipulated. To insert
- // text into a document create a range where start === end.
- Range Range `json:"range"`
- // The string to be inserted. For delete operations use an
- // empty string.
- NewText string `json:"newText"`
-}
-type (
- TokenFormat string
- TraceValue string
-)
-
-// created for Tuple
-type Tuple_ParameterInformation_label_Item1 struct {
- Fld0 uint32 `json:"fld0"`
- Fld1 uint32 `json:"fld1"`
-}
-
-// Since 3.6.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionClientCapabilities
-type TypeDefinitionClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `TypeDefinitionRegistrationOptions` return value
- // for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // The client supports additional metadata in the form of definition links.
- //
- // Since 3.14.0
- LinkSupport bool `json:"linkSupport,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionOptions
-type TypeDefinitionOptions struct {
- WorkDoneProgressOptions
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionParams
-type TypeDefinitionParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeDefinitionRegistrationOptions
-type TypeDefinitionRegistrationOptions struct {
- TextDocumentRegistrationOptions
- TypeDefinitionOptions
- StaticRegistrationOptions
-}
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyClientCapabilities
-type TypeHierarchyClientCapabilities struct {
- // Whether implementation supports dynamic registration. If this is set to `true`
- // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
- // return value for the corresponding server capability as well.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
-}
-
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyItem
-type TypeHierarchyItem struct {
- // The name of this item.
- Name string `json:"name"`
- // The kind of this item.
- Kind SymbolKind `json:"kind"`
- // Tags for this item.
- Tags []SymbolTag `json:"tags,omitempty"`
- // More detail for this item, e.g. the signature of a function.
- Detail string `json:"detail,omitempty"`
- // The resource identifier of this item.
- URI DocumentURI `json:"uri"`
- // The range enclosing this symbol not including leading/trailing whitespace
- // but everything else, e.g. comments and code.
- Range Range `json:"range"`
- // The range that should be selected and revealed when this symbol is being
- // picked, e.g. the name of a function. Must be contained by the
- // {@link TypeHierarchyItem.range `range`}.
- SelectionRange Range `json:"selectionRange"`
- // A data entry field that is preserved between a type hierarchy prepare and
- // supertypes or subtypes requests. It could also be used to identify the
- // type hierarchy in the server, helping improve the performance on
- // resolving supertypes and subtypes.
- Data interface{} `json:"data,omitempty"`
-}
-
-// Type hierarchy options used during static registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyOptions
-type TypeHierarchyOptions struct {
- WorkDoneProgressOptions
-}
-
-// The parameter of a `textDocument/prepareTypeHierarchy` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyPrepareParams
-type TypeHierarchyPrepareParams struct {
- TextDocumentPositionParams
- WorkDoneProgressParams
-}
-
-// Type hierarchy options used during static or dynamic registration.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyRegistrationOptions
-type TypeHierarchyRegistrationOptions struct {
- TextDocumentRegistrationOptions
- TypeHierarchyOptions
- StaticRegistrationOptions
-}
-
-// The parameter of a `typeHierarchy/subtypes` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySubtypesParams
-type TypeHierarchySubtypesParams struct {
- Item TypeHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// The parameter of a `typeHierarchy/supertypes` request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchySupertypesParams
-type TypeHierarchySupertypesParams struct {
- Item TypeHierarchyItem `json:"item"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// A diagnostic report indicating that the last returned
-// report is still accurate.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unchangedDocumentDiagnosticReport
-type UnchangedDocumentDiagnosticReport struct {
- // A document diagnostic report indicating
- // no changes to the last result. A server can
- // only return `unchanged` if result ids are
- // provided.
- Kind string `json:"kind"`
- // A result id which will be sent on the next
- // diagnostic request for the same document.
- ResultID string `json:"resultId"`
-}
-
-// Moniker uniqueness level to define scope of the moniker.
-//
-// @since 3.16.0
-type UniquenessLevel string
-
-// General parameters to unregister a request or notification.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistration
-type Unregistration struct {
- // The id used to unregister the request or notification. Usually an id
- // provided during the register request.
- ID string `json:"id"`
- // The method to unregister for.
- Method string `json:"method"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#unregistrationParams
-type UnregistrationParams struct {
- Unregisterations []Unregistration `json:"unregisterations"`
-}
-
-// A versioned notebook document identifier.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedNotebookDocumentIdentifier
-type VersionedNotebookDocumentIdentifier struct {
- // The version number of this notebook document.
- Version int32 `json:"version"`
- // The notebook document's uri.
- URI URI `json:"uri"`
-}
-
-// A text document identifier to denote a specific version of a text document.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#versionedTextDocumentIdentifier
-type VersionedTextDocumentIdentifier struct {
- // The version number of this document.
- Version int32 `json:"version"`
- TextDocumentIdentifier
-}
-type (
- WatchKind = uint32 // The parameters sent in a will save text document notification.
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#willSaveTextDocumentParams
- WillSaveTextDocumentParams struct {
- // The document that will be saved.
- TextDocument TextDocumentIdentifier `json:"textDocument"`
- // The 'TextDocumentSaveReason'.
- Reason TextDocumentSaveReason `json:"reason"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#windowClientCapabilities
-type WindowClientCapabilities struct {
- // It indicates whether the client supports server initiated
- // progress using the `window/workDoneProgress/create` request.
- //
- // The capability also controls Whether client supports handling
- // of progress notifications. If set servers are allowed to report a
- // `workDoneProgress` property in the request specific server
- // capabilities.
- //
- // @since 3.15.0
- WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
- // Capabilities specific to the showMessage request.
- //
- // @since 3.16.0
- ShowMessage *ShowMessageRequestClientCapabilities `json:"showMessage,omitempty"`
- // Capabilities specific to the showDocument request.
- //
- // @since 3.16.0
- ShowDocument *ShowDocumentClientCapabilities `json:"showDocument,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressBegin
-type WorkDoneProgressBegin struct {
- Kind string `json:"kind"`
- // Mandatory title of the progress operation. Used to briefly inform about
- // the kind of operation being performed.
- //
- // Examples: "Indexing" or "Linking dependencies".
- Title string `json:"title"`
- // Controls if a cancel button should show to allow the user to cancel the
- // long running operation. Clients that don't support cancellation are allowed
- // to ignore the setting.
- Cancellable bool `json:"cancellable,omitempty"`
- // Optional, more detailed associated progress message. Contains
- // complementary information to the `title`.
- //
- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- // If unset, the previous progress message (if any) is still valid.
- Message string `json:"message,omitempty"`
- // Optional progress percentage to display (value 100 is considered 100%).
- // If not provided infinite progress is assumed and clients are allowed
- // to ignore the `percentage` value in subsequent in report notifications.
- //
- // The value should be steadily rising. Clients are free to ignore values
- // that are not following this rule. The value range is [0, 100].
- Percentage uint32 `json:"percentage,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCancelParams
-type WorkDoneProgressCancelParams struct {
- // The token to be used to report progress.
- Token ProgressToken `json:"token"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCreateParams
-type WorkDoneProgressCreateParams struct {
- // The token to be used to report progress.
- Token ProgressToken `json:"token"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressEnd
-type WorkDoneProgressEnd struct {
- Kind string `json:"kind"`
- // Optional, a final message indicating to for example indicate the outcome
- // of the operation.
- Message string `json:"message,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressOptions
-type WorkDoneProgressOptions struct {
- WorkDoneProgress bool `json:"workDoneProgress,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressParams
-type WorkDoneProgressParams struct {
- // An optional token that a server can use to report work done progress.
- WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressReport
-type WorkDoneProgressReport struct {
- Kind string `json:"kind"`
- // Controls enablement state of a cancel button.
- //
- // Clients that don't support cancellation or don't support controlling the button's
- // enablement state are allowed to ignore the property.
- Cancellable bool `json:"cancellable,omitempty"`
- // Optional, more detailed associated progress message. Contains
- // complementary information to the `title`.
- //
- // Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
- // If unset, the previous progress message (if any) is still valid.
- Message string `json:"message,omitempty"`
- // Optional progress percentage to display (value 100 is considered 100%).
- // If not provided infinite progress is assumed and clients are allowed
- // to ignore the `percentage` value in subsequent in report notifications.
- //
- // The value should be steadily rising. Clients are free to ignore values
- // that are not following this rule. The value range is [0, 100]
- Percentage uint32 `json:"percentage,omitempty"`
-}
-
-// Workspace specific client capabilities.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceClientCapabilities
-type WorkspaceClientCapabilities struct {
- // The client supports applying batch edits
- // to the workspace by supporting the request
- // 'workspace/applyEdit'
- ApplyEdit bool `json:"applyEdit,omitempty"`
- // Capabilities specific to `WorkspaceEdit`s.
- WorkspaceEdit *WorkspaceEditClientCapabilities `json:"workspaceEdit,omitempty"`
- // Capabilities specific to the `workspace/didChangeConfiguration` notification.
- DidChangeConfiguration DidChangeConfigurationClientCapabilities `json:"didChangeConfiguration,omitempty"`
- // Capabilities specific to the `workspace/didChangeWatchedFiles` notification.
- DidChangeWatchedFiles DidChangeWatchedFilesClientCapabilities `json:"didChangeWatchedFiles,omitempty"`
- // Capabilities specific to the `workspace/symbol` request.
- Symbol *WorkspaceSymbolClientCapabilities `json:"symbol,omitempty"`
- // Capabilities specific to the `workspace/executeCommand` request.
- ExecuteCommand *ExecuteCommandClientCapabilities `json:"executeCommand,omitempty"`
- // The client has support for workspace folders.
- //
- // @since 3.6.0
- WorkspaceFolders bool `json:"workspaceFolders,omitempty"`
- // The client supports `workspace/configuration` requests.
- //
- // @since 3.6.0
- Configuration bool `json:"configuration,omitempty"`
- // Capabilities specific to the semantic token requests scoped to the
- // workspace.
- //
- // @since 3.16.0.
- SemanticTokens *SemanticTokensWorkspaceClientCapabilities `json:"semanticTokens,omitempty"`
- // Capabilities specific to the code lens requests scoped to the
- // workspace.
- //
- // @since 3.16.0.
- CodeLens *CodeLensWorkspaceClientCapabilities `json:"codeLens,omitempty"`
- // The client has support for file notifications/requests for user operations on files.
- //
- // Since 3.16.0
- FileOperations *FileOperationClientCapabilities `json:"fileOperations,omitempty"`
- // Capabilities specific to the inline values requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- InlineValue *InlineValueWorkspaceClientCapabilities `json:"inlineValue,omitempty"`
- // Capabilities specific to the inlay hint requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- InlayHint *InlayHintWorkspaceClientCapabilities `json:"inlayHint,omitempty"`
- // Capabilities specific to the diagnostic requests scoped to the
- // workspace.
- //
- // @since 3.17.0.
- Diagnostics *DiagnosticWorkspaceClientCapabilities `json:"diagnostics,omitempty"`
- // Capabilities specific to the folding range requests scoped to the workspace.
- //
- // @since 3.18.0
- // @proposed
- FoldingRange *FoldingRangeWorkspaceClientCapabilities `json:"foldingRange,omitempty"`
- // Capabilities specific to the `workspace/textDocumentContent` request.
- //
- // @since 3.18.0
- // @proposed
- TextDocumentContent *TextDocumentContentClientCapabilities `json:"textDocumentContent,omitempty"`
-}
-
-// Parameters of the workspace diagnostic request.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticParams
-type WorkspaceDiagnosticParams struct {
- // The additional identifier provided during registration.
- Identifier string `json:"identifier,omitempty"`
- // The currently known diagnostic reports with their
- // previous result ids.
- PreviousResultIds []PreviousResultId `json:"previousResultIds"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// A workspace diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReport
-type WorkspaceDiagnosticReport struct {
- Items []WorkspaceDocumentDiagnosticReport `json:"items"`
-}
-
-// A partial result for a workspace diagnostic report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDiagnosticReportPartialResult
-type WorkspaceDiagnosticReportPartialResult struct {
- Items []WorkspaceDocumentDiagnosticReport `json:"items"`
-}
-
-// A workspace diagnostic document report.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceDocumentDiagnosticReport
-type (
- WorkspaceDocumentDiagnosticReport = Or_WorkspaceDocumentDiagnosticReport // (alias)
- // A workspace edit represents changes to many resources managed in the workspace. The edit
- // should either provide `changes` or `documentChanges`. If documentChanges are present
- // they are preferred over `changes` if the client can handle versioned document edits.
- //
- // Since version 3.13.0 a workspace edit can contain resource operations as well. If resource
- // operations are present clients need to execute the operations in the order in which they
- // are provided. So a workspace edit for example can consist of the following two changes:
- // (1) a create file a.txt and (2) a text document edit which insert text into file a.txt.
- //
- // An invalid sequence (e.g. (1) delete file a.txt and (2) insert text into file a.txt) will
- // cause failure of the operation. How the client recovers from the failure is described by
- // the client capability: `workspace.workspaceEdit.failureHandling`
- //
- // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEdit
- WorkspaceEdit struct {
- // Holds changes to existing resources.
- Changes map[DocumentURI][]TextEdit `json:"changes,omitempty"`
- // Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes
- // are either an array of `TextDocumentEdit`s to express changes to n different text documents
- // where each text document edit addresses a specific version of a text document. Or it can contain
- // above `TextDocumentEdit`s mixed with create, rename and delete file / folder operations.
- //
- // Whether a client supports versioned document edits is expressed via
- // `workspace.workspaceEdit.documentChanges` client capability.
- //
- // If a client neither supports `documentChanges` nor `workspace.workspaceEdit.resourceOperations` then
- // only plain `TextEdit`s using the `changes` property are supported.
- DocumentChanges []DocumentChange `json:"documentChanges,omitempty"`
- // A map of change annotations that can be referenced in `AnnotatedTextEdit`s or create, rename and
- // delete file / folder operations.
- //
- // Whether clients honor this property depends on the client capability `workspace.changeAnnotationSupport`.
- //
- // @since 3.16.0
- ChangeAnnotations map[ChangeAnnotationIdentifier]ChangeAnnotation `json:"changeAnnotations,omitempty"`
- }
-)
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditClientCapabilities
-type WorkspaceEditClientCapabilities struct {
- // The client supports versioned document changes in `WorkspaceEdit`s
- DocumentChanges bool `json:"documentChanges,omitempty"`
- // The resource operations the client supports. Clients should at least
- // support 'create', 'rename' and 'delete' files and folders.
- //
- // @since 3.13.0
- ResourceOperations []ResourceOperationKind `json:"resourceOperations,omitempty"`
- // The failure handling strategy of a client if applying the workspace edit
- // fails.
- //
- // @since 3.13.0
- FailureHandling *FailureHandlingKind `json:"failureHandling,omitempty"`
- // Whether the client normalizes line endings to the client specific
- // setting.
- // If set to `true` the client will normalize line ending characters
- // in a workspace edit to the client-specified new line
- // character.
- //
- // @since 3.16.0
- NormalizesLineEndings bool `json:"normalizesLineEndings,omitempty"`
- // Whether the client in general supports change annotations on text edits,
- // create file, rename file and delete file changes.
- //
- // @since 3.16.0
- ChangeAnnotationSupport *ChangeAnnotationsSupportOptions `json:"changeAnnotationSupport,omitempty"`
- // Whether the client supports `WorkspaceEditMetadata` in `WorkspaceEdit`s.
- //
- // @since 3.18.0
- // @proposed
- MetadataSupport bool `json:"metadataSupport,omitempty"`
- // Whether the client supports snippets as text edits.
- //
- // @since 3.18.0
- // @proposed
- SnippetEditSupport bool `json:"snippetEditSupport,omitempty"`
-}
-
-// Additional data about a workspace edit.
-//
-// @since 3.18.0
-// @proposed
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceEditMetadata
-type WorkspaceEditMetadata struct {
- // Signal to the editor that this edit is a refactoring.
- IsRefactoring bool `json:"isRefactoring,omitempty"`
-}
-
-// A workspace folder inside a client.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFolder
-type WorkspaceFolder struct {
- // The associated URI for this workspace folder.
- URI URI `json:"uri"`
- // The name of the workspace folder. Used to refer to this
- // workspace folder in the user interface.
- Name string `json:"name"`
-}
-
-// The workspace folder change event.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersChangeEvent
-type WorkspaceFoldersChangeEvent struct {
- // The array of added workspace folders
- Added []WorkspaceFolder `json:"added"`
- // The array of the removed workspace folders
- Removed []WorkspaceFolder `json:"removed"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersInitializeParams
-type WorkspaceFoldersInitializeParams struct {
- // The workspace folders configured in the client when the server starts.
- //
- // This property is only available if the client supports workspace folders.
- // It can be `null` if the client supports workspace folders but none are
- // configured.
- //
- // @since 3.6.0
- WorkspaceFolders []WorkspaceFolder `json:"workspaceFolders,omitempty"`
-}
-
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFoldersServerCapabilities
-type WorkspaceFoldersServerCapabilities struct {
- // The server has support for workspace folders
- Supported bool `json:"supported,omitempty"`
- // Whether the server wants to receive workspace folder
- // change notifications.
- //
- // If a string is provided the string is treated as an ID
- // under which the notification is registered on the client
- // side. The ID can be used to unregister for these events
- // using the `client/unregisterCapability` request.
- ChangeNotifications *Or_WorkspaceFoldersServerCapabilities_changeNotifications `json:"changeNotifications,omitempty"`
-}
-
-// A full document diagnostic report for a workspace diagnostic result.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceFullDocumentDiagnosticReport
-type WorkspaceFullDocumentDiagnosticReport struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // The version number for which the diagnostics are reported.
- // If the document is not marked as open `null` can be provided.
- Version int32 `json:"version"`
- FullDocumentDiagnosticReport
-}
-
-// Defines workspace specific capabilities of the server.
-//
-// @since 3.18.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceOptions
-type WorkspaceOptions struct {
- // The server supports workspace folder.
- //
- // @since 3.6.0
- WorkspaceFolders *WorkspaceFoldersServerCapabilities `json:"workspaceFolders,omitempty"`
- // The server is interested in notifications/requests for operations on files.
- //
- // @since 3.16.0
- FileOperations *FileOperationOptions `json:"fileOperations,omitempty"`
- // The server supports the `workspace/textDocumentContent` request.
- //
- // @since 3.18.0
- // @proposed
- TextDocumentContent *Or_WorkspaceOptions_textDocumentContent `json:"textDocumentContent,omitempty"`
-}
-
-// A special workspace symbol that supports locations without a range.
-//
-// See also SymbolInformation.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbol
-type WorkspaceSymbol struct {
- // The location of the symbol. Whether a server is allowed to
- // return a location without a range depends on the client
- // capability `workspace.symbol.resolveSupport`.
- //
- // See SymbolInformation#location for more details.
- Location Or_WorkspaceSymbol_location `json:"location"`
- // A data entry field that is preserved on a workspace symbol between a
- // workspace symbol request and a workspace symbol resolve request.
- Data interface{} `json:"data,omitempty"`
- BaseSymbolInformation
-}
-
-// Client capabilities for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolClientCapabilities
-type WorkspaceSymbolClientCapabilities struct {
- // Symbol request supports dynamic registration.
- DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
- // Specific capabilities for the `SymbolKind` in the `workspace/symbol` request.
- SymbolKind *ClientSymbolKindOptions `json:"symbolKind,omitempty"`
- // The client supports tags on `SymbolInformation`.
- // Clients supporting tags have to handle unknown tags gracefully.
- //
- // @since 3.16.0
- TagSupport *ClientSymbolTagOptions `json:"tagSupport,omitempty"`
- // The client support partial workspace symbols. The client will send the
- // request `workspaceSymbol/resolve` to the server to resolve additional
- // properties.
- //
- // @since 3.17.0
- ResolveSupport *ClientSymbolResolveOptions `json:"resolveSupport,omitempty"`
-}
-
-// Server capabilities for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolOptions
-type WorkspaceSymbolOptions struct {
- // The server provides support to resolve additional
- // information for a workspace symbol.
- //
- // @since 3.17.0
- ResolveProvider bool `json:"resolveProvider,omitempty"`
- WorkDoneProgressOptions
-}
-
-// The parameters of a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolParams
-type WorkspaceSymbolParams struct {
- // A query string to filter symbols by. Clients may send an empty
- // string here to request all symbols.
- //
- // The `query`-parameter should be interpreted in a *relaxed way* as editors
- // will apply their own highlighting and scoring on the results. A good rule
- // of thumb is to match case-insensitive and to simply check that the
- // characters of *query* appear in their order in a candidate symbol.
- // Servers shouldn't use prefix, substring, or similar strict matching.
- Query string `json:"query"`
- WorkDoneProgressParams
- PartialResultParams
-}
-
-// Registration options for a {@link WorkspaceSymbolRequest}.
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceSymbolRegistrationOptions
-type WorkspaceSymbolRegistrationOptions struct {
- WorkspaceSymbolOptions
-}
-
-// An unchanged document diagnostic report for a workspace diagnostic result.
-//
-// @since 3.17.0
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workspaceUnchangedDocumentDiagnosticReport
-type WorkspaceUnchangedDocumentDiagnosticReport struct {
- // The URI for which diagnostic information is reported.
- URI DocumentURI `json:"uri"`
- // The version number for which the diagnostics are reported.
- // If the document is not marked as open `null` can be provided.
- Version int32 `json:"version"`
- UnchangedDocumentDiagnosticReport
-}
-
-// The initialize parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams
-type XInitializeParams struct {
- // The process Id of the parent process that started
- // the server.
- //
- // Is `null` if the process has not been started by another process.
- // If the parent process is not alive then the server should exit.
- ProcessID int32 `json:"processId"`
- // Information about the client
- //
- // @since 3.15.0
- ClientInfo *ClientInfo `json:"clientInfo,omitempty"`
- // The locale the client is currently showing the user interface
- // in. This must not necessarily be the locale of the operating
- // system.
- //
- // Uses IETF language tags as the value's syntax
- // (See https://en.wikipedia.org/wiki/IETF_language_tag)
- //
- // @since 3.16.0
- Locale string `json:"locale,omitempty"`
- // The rootPath of the workspace. Is null
- // if no folder is open.
- //
- // @deprecated in favour of rootUri.
- RootPath string `json:"rootPath,omitempty"`
- // The rootUri of the workspace. Is null if no
- // folder is open. If both `rootPath` and `rootUri` are set
- // `rootUri` wins.
- //
- // @deprecated in favour of workspaceFolders.
- RootURI DocumentURI `json:"rootUri"`
- // The capabilities provided by the client (editor or tool)
- Capabilities ClientCapabilities `json:"capabilities"`
- // User provided initialization options.
- InitializationOptions interface{} `json:"initializationOptions,omitempty"`
- // The initial trace setting. If omitted trace is disabled ('off').
- Trace *TraceValue `json:"trace,omitempty"`
- WorkDoneProgressParams
-}
-
-// The initialize parameters
-//
-// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#_InitializeParams
-type _InitializeParams struct {
- // The process Id of the parent process that started
- // the server.
- //
- // Is `null` if the process has not been started by another process.
- // If the parent process is not alive then the server should exit.
- ProcessID int32 `json:"processId"`
- // Information about the client
- //
- // @since 3.15.0
- ClientInfo *ClientInfo `json:"clientInfo,omitempty"`
- // The locale the client is currently showing the user interface
- // in. This must not necessarily be the locale of the operating
- // system.
- //
- // Uses IETF language tags as the value's syntax
- // (See https://en.wikipedia.org/wiki/IETF_language_tag)
- //
- // @since 3.16.0
- Locale string `json:"locale,omitempty"`
- // The rootPath of the workspace. Is null
- // if no folder is open.
- //
- // @deprecated in favour of rootUri.
- RootPath string `json:"rootPath,omitempty"`
- // The rootUri of the workspace. Is null if no
- // folder is open. If both `rootPath` and `rootUri` are set
- // `rootUri` wins.
- //
- // @deprecated in favour of workspaceFolders.
- RootURI DocumentURI `json:"rootUri"`
- // The capabilities provided by the client (editor or tool)
- Capabilities ClientCapabilities `json:"capabilities"`
- // User provided initialization options.
- InitializationOptions interface{} `json:"initializationOptions,omitempty"`
- // The initial trace setting. If omitted trace is disabled ('off').
- Trace *TraceValue `json:"trace,omitempty"`
- WorkDoneProgressParams
-}
-
-const (
- // A set of predefined code action kinds
- // Empty kind.
- Empty CodeActionKind = ""
- // Base kind for quickfix actions: 'quickfix'
- QuickFix CodeActionKind = "quickfix"
- // Base kind for refactoring actions: 'refactor'
- Refactor CodeActionKind = "refactor"
- // Base kind for refactoring extraction actions: 'refactor.extract'
- //
- // Example extract actions:
- //
- //
- // - Extract method
- // - Extract function
- // - Extract variable
- // - Extract interface from class
- // - ...
- RefactorExtract CodeActionKind = "refactor.extract"
- // Base kind for refactoring inline actions: 'refactor.inline'
- //
- // Example inline actions:
- //
- //
- // - Inline function
- // - Inline variable
- // - Inline constant
- // - ...
- RefactorInline CodeActionKind = "refactor.inline"
- // Base kind for refactoring move actions: `refactor.move`
- //
- // Example move actions:
- //
- //
- // - Move a function to a new file
- // - Move a property between classes
- // - Move method to base class
- // - ...
- //
- // @since 3.18.0
- // @proposed
- RefactorMove CodeActionKind = "refactor.move"
- // Base kind for refactoring rewrite actions: 'refactor.rewrite'
- //
- // Example rewrite actions:
- //
- //
- // - Convert JavaScript function to class
- // - Add or remove parameter
- // - Encapsulate field
- // - Make method static
- // - Move method to base class
- // - ...
- RefactorRewrite CodeActionKind = "refactor.rewrite"
- // Base kind for source actions: `source`
- //
- // Source code actions apply to the entire file.
- Source CodeActionKind = "source"
- // Base kind for an organize imports source action: `source.organizeImports`
- SourceOrganizeImports CodeActionKind = "source.organizeImports"
- // Base kind for auto-fix source actions: `source.fixAll`.
- //
- // Fix all actions automatically fix errors that have a clear fix that do not require user input.
- // They should not suppress errors or perform unsafe fixes such as generating new types or classes.
- //
- // @since 3.15.0
- SourceFixAll CodeActionKind = "source.fixAll"
- // Base kind for all code actions applying to the entire notebook's scope. CodeActionKinds using
- // this should always begin with `notebook.`
- //
- // @since 3.18.0
- Notebook CodeActionKind = "notebook"
- // The reason why code actions were requested.
- //
- // @since 3.17.0
- // Code actions were explicitly requested by the user or by an extension.
- CodeActionInvoked CodeActionTriggerKind = 1
- // Code actions were requested automatically.
- //
- // This typically happens when current selection in a file changes, but can
- // also be triggered when file content changes.
- CodeActionAutomatic CodeActionTriggerKind = 2
- // The kind of a completion entry.
- TextCompletion CompletionItemKind = 1
- MethodCompletion CompletionItemKind = 2
- FunctionCompletion CompletionItemKind = 3
- ConstructorCompletion CompletionItemKind = 4
- FieldCompletion CompletionItemKind = 5
- VariableCompletion CompletionItemKind = 6
- ClassCompletion CompletionItemKind = 7
- InterfaceCompletion CompletionItemKind = 8
- ModuleCompletion CompletionItemKind = 9
- PropertyCompletion CompletionItemKind = 10
- UnitCompletion CompletionItemKind = 11
- ValueCompletion CompletionItemKind = 12
- EnumCompletion CompletionItemKind = 13
- KeywordCompletion CompletionItemKind = 14
- SnippetCompletion CompletionItemKind = 15
- ColorCompletion CompletionItemKind = 16
- FileCompletion CompletionItemKind = 17
- ReferenceCompletion CompletionItemKind = 18
- FolderCompletion CompletionItemKind = 19
- EnumMemberCompletion CompletionItemKind = 20
- ConstantCompletion CompletionItemKind = 21
- StructCompletion CompletionItemKind = 22
- EventCompletion CompletionItemKind = 23
- OperatorCompletion CompletionItemKind = 24
- TypeParameterCompletion CompletionItemKind = 25
- // Completion item tags are extra annotations that tweak the rendering of a completion
- // item.
- //
- // @since 3.15.0
- // Render a completion as obsolete, usually using a strike-out.
- ComplDeprecated CompletionItemTag = 1
- // How a completion was triggered
- // Completion was triggered by typing an identifier (24x7 code
- // complete), manual invocation (e.g Ctrl+Space) or via API.
- Invoked CompletionTriggerKind = 1
- // Completion was triggered by a trigger character specified by
- // the `triggerCharacters` properties of the `CompletionRegistrationOptions`.
- TriggerCharacter CompletionTriggerKind = 2
- // Completion was re-triggered as current completion list is incomplete
- TriggerForIncompleteCompletions CompletionTriggerKind = 3
- // The diagnostic's severity.
- // Reports an error.
- SeverityError DiagnosticSeverity = 1
- // Reports a warning.
- SeverityWarning DiagnosticSeverity = 2
- // Reports an information.
- SeverityInformation DiagnosticSeverity = 3
- // Reports a hint.
- SeverityHint DiagnosticSeverity = 4
- // The diagnostic tags.
- //
- // @since 3.15.0
- // Unused or unnecessary code.
- //
- // Clients are allowed to render diagnostics with this tag faded out instead of having
- // an error squiggle.
- Unnecessary DiagnosticTag = 1
- // Deprecated or obsolete code.
- //
- // Clients are allowed to rendered diagnostics with this tag strike through.
- Deprecated DiagnosticTag = 2
- // The document diagnostic report kinds.
- //
- // @since 3.17.0
- // A diagnostic report with a full
- // set of problems.
- DiagnosticFull DocumentDiagnosticReportKind = "full"
- // A report indicating that the last
- // returned report is still accurate.
- DiagnosticUnchanged DocumentDiagnosticReportKind = "unchanged"
- // A document highlight kind.
- // A textual occurrence.
- Text DocumentHighlightKind = 1
- // Read-access of a symbol, like reading a variable.
- Read DocumentHighlightKind = 2
- // Write-access of a symbol, like writing to a variable.
- Write DocumentHighlightKind = 3
- // Predefined error codes.
- ParseError ErrorCodes = -32700
- InvalidRequest ErrorCodes = -32600
- MethodNotFound ErrorCodes = -32601
- InvalidParams ErrorCodes = -32602
- InternalError ErrorCodes = -32603
- // Error code indicating that a server received a notification or
- // request before the server has received the `initialize` request.
- ServerNotInitialized ErrorCodes = -32002
- UnknownErrorCode ErrorCodes = -32001
- // Applying the workspace change is simply aborted if one of the changes provided
- // fails. All operations executed before the failing operation stay executed.
- Abort FailureHandlingKind = "abort"
- // All operations are executed transactional. That means they either all
- // succeed or no changes at all are applied to the workspace.
- Transactional FailureHandlingKind = "transactional"
- // If the workspace edit contains only textual file changes they are executed transactional.
- // If resource changes (create, rename or delete file) are part of the change the failure
- // handling strategy is abort.
- TextOnlyTransactional FailureHandlingKind = "textOnlyTransactional"
- // The client tries to undo the operations already executed. But there is no
- // guarantee that this is succeeding.
- Undo FailureHandlingKind = "undo"
- // The file event type
- // The file got created.
- Created FileChangeType = 1
- // The file got changed.
- Changed FileChangeType = 2
- // The file got deleted.
- Deleted FileChangeType = 3
- // A pattern kind describing if a glob pattern matches a file a folder or
- // both.
- //
- // @since 3.16.0
- // The pattern matches a file only.
- FilePattern FileOperationPatternKind = "file"
- // The pattern matches a folder only.
- FolderPattern FileOperationPatternKind = "folder"
- // A set of predefined range kinds.
- // Folding range for a comment
- Comment FoldingRangeKind = "comment"
- // Folding range for an import or include
- Imports FoldingRangeKind = "imports"
- // Folding range for a region (e.g. `#region`)
- Region FoldingRangeKind = "region"
- // Inlay hint kinds.
- //
- // @since 3.17.0
- // An inlay hint that for a type annotation.
- Type InlayHintKind = 1
- // An inlay hint that is for a parameter.
- Parameter InlayHintKind = 2
- // Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.
- //
- // @since 3.18.0
- // @proposed
- // Completion was triggered explicitly by a user gesture.
- InlineInvoked InlineCompletionTriggerKind = 1
- // Completion was triggered automatically while editing.
- InlineAutomatic InlineCompletionTriggerKind = 2
- // Defines whether the insert text in a completion item should be interpreted as
- // plain text or a snippet.
- // The primary text to be inserted is treated as a plain string.
- PlainTextTextFormat InsertTextFormat = 1
- // The primary text to be inserted is treated as a snippet.
- //
- // A snippet can define tab stops and placeholders with `$1`, `$2`
- // and `${3:foo}`. `$0` defines the final tab stop, it defaults to
- // the end of the snippet. Placeholders with equal identifiers are linked,
- // that is typing in one will update others too.
- //
- // See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax
- SnippetTextFormat InsertTextFormat = 2
- // How whitespace and indentation is handled during completion
- // item insertion.
- //
- // @since 3.16.0
- // The insertion or replace strings is taken as it is. If the
- // value is multi line the lines below the cursor will be
- // inserted using the indentation defined in the string value.
- // The client will not apply any kind of adjustments to the
- // string.
- AsIs InsertTextMode = 1
- // The editor adjusts leading whitespace of new lines so that
- // they match the indentation up to the cursor of the line for
- // which the item is accepted.
- //
- // Consider a line like this: <2tabs><3tabs>foo. Accepting a
- // multi line completion item is indented using 2 tabs and all
- // following lines inserted will be indented using 2 tabs as well.
- AdjustIndentation InsertTextMode = 2
- // A request failed but it was syntactically correct, e.g the
- // method name was known and the parameters were valid. The error
- // message should contain human readable information about why
- // the request failed.
- //
- // @since 3.17.0
- RequestFailed LSPErrorCodes = -32803
- // The server cancelled the request. This error code should
- // only be used for requests that explicitly support being
- // server cancellable.
- //
- // @since 3.17.0
- ServerCancelled LSPErrorCodes = -32802
- // The server detected that the content of a document got
- // modified outside normal conditions. A server should
- // NOT send this error code if it detects a content change
- // in it unprocessed messages. The result even computed
- // on an older state might still be useful for the client.
- //
- // If a client decides that a result is not of any use anymore
- // the client should cancel the request.
- ContentModified LSPErrorCodes = -32801
- // The client has canceled a request and a server has detected
- // the cancel.
- RequestCancelled LSPErrorCodes = -32800
- // Predefined Language kinds
- // @since 3.18.0
- // @proposed
- LangABAP LanguageKind = "abap"
- LangWindowsBat LanguageKind = "bat"
- LangBibTeX LanguageKind = "bibtex"
- LangClojure LanguageKind = "clojure"
- LangCoffeescript LanguageKind = "coffeescript"
- LangC LanguageKind = "c"
- LangCPP LanguageKind = "cpp"
- LangCSharp LanguageKind = "csharp"
- LangCSS LanguageKind = "css"
- // @since 3.18.0
- // @proposed
- LangD LanguageKind = "d"
- // @since 3.18.0
- // @proposed
- LangDelphi LanguageKind = "pascal"
- LangDiff LanguageKind = "diff"
- LangDart LanguageKind = "dart"
- LangDockerfile LanguageKind = "dockerfile"
- LangElixir LanguageKind = "elixir"
- LangErlang LanguageKind = "erlang"
- LangFSharp LanguageKind = "fsharp"
- LangGitCommit LanguageKind = "git-commit"
- LangGitRebase LanguageKind = "rebase"
- LangGo LanguageKind = "go"
- LangGroovy LanguageKind = "groovy"
- LangHandlebars LanguageKind = "handlebars"
- LangHaskell LanguageKind = "haskell"
- LangHTML LanguageKind = "html"
- LangIni LanguageKind = "ini"
- LangJava LanguageKind = "java"
- LangJavaScript LanguageKind = "javascript"
- LangJavaScriptReact LanguageKind = "javascriptreact"
- LangJSON LanguageKind = "json"
- LangLaTeX LanguageKind = "latex"
- LangLess LanguageKind = "less"
- LangLua LanguageKind = "lua"
- LangMakefile LanguageKind = "makefile"
- LangMarkdown LanguageKind = "markdown"
- LangObjectiveC LanguageKind = "objective-c"
- LangObjectiveCPP LanguageKind = "objective-cpp"
- // @since 3.18.0
- // @proposed
- LangPascal LanguageKind = "pascal"
- LangPerl LanguageKind = "perl"
- LangPerl6 LanguageKind = "perl6"
- LangPHP LanguageKind = "php"
- LangPowershell LanguageKind = "powershell"
- LangPug LanguageKind = "jade"
- LangPython LanguageKind = "python"
- LangR LanguageKind = "r"
- LangRazor LanguageKind = "razor"
- LangRuby LanguageKind = "ruby"
- LangRust LanguageKind = "rust"
- LangSCSS LanguageKind = "scss"
- LangSASS LanguageKind = "sass"
- LangScala LanguageKind = "scala"
- LangShaderLab LanguageKind = "shaderlab"
- LangShellScript LanguageKind = "shellscript"
- LangSQL LanguageKind = "sql"
- LangSwift LanguageKind = "swift"
- LangTypeScript LanguageKind = "typescript"
- LangTypeScriptReact LanguageKind = "typescriptreact"
- LangTeX LanguageKind = "tex"
- LangVisualBasic LanguageKind = "vb"
- LangXML LanguageKind = "xml"
- LangXSL LanguageKind = "xsl"
- LangYAML LanguageKind = "yaml"
- // Describes the content type that a client supports in various
- // result literals like `Hover`, `ParameterInfo` or `CompletionItem`.
- //
- // Please note that `MarkupKinds` must not start with a `$`. This kinds
- // are reserved for internal usage.
- // Plain text is supported as a content format
- PlainText MarkupKind = "plaintext"
- // Markdown is supported as a content format
- Markdown MarkupKind = "markdown"
- // The message type
- // An error message.
- Error MessageType = 1
- // A warning message.
- Warning MessageType = 2
- // An information message.
- Info MessageType = 3
- // A log message.
- Log MessageType = 4
- // A debug message.
- //
- // @since 3.18.0
- // @proposed
- Debug MessageType = 5
- // The moniker kind.
- //
- // @since 3.16.0
- // The moniker represent a symbol that is imported into a project
- Import MonikerKind = "import"
- // The moniker represents a symbol that is exported from a project
- Export MonikerKind = "export"
- // The moniker represents a symbol that is local to a project (e.g. a local
- // variable of a function, a class not visible outside the project, ...)
- Local MonikerKind = "local"
- // A notebook cell kind.
- //
- // @since 3.17.0
- // A markup-cell is formatted source that is used for display.
- Markup NotebookCellKind = 1
- // A code-cell is source code.
- Code NotebookCellKind = 2
- // A set of predefined position encoding kinds.
- //
- // @since 3.17.0
- // Character offsets count UTF-8 code units (e.g. bytes).
- UTF8 PositionEncodingKind = "utf-8"
- // Character offsets count UTF-16 code units.
- //
- // This is the default and must always be supported
- // by servers
- UTF16 PositionEncodingKind = "utf-16"
- // Character offsets count UTF-32 code units.
- //
- // Implementation note: these are the same as Unicode codepoints,
- // so this `PositionEncodingKind` may also be used for an
- // encoding-agnostic representation of character offsets.
- UTF32 PositionEncodingKind = "utf-32"
- // The client's default behavior is to select the identifier
- // according the to language's syntax rule.
- Identifier PrepareSupportDefaultBehavior = 1
- // Supports creating new files and folders.
- Create ResourceOperationKind = "create"
- // Supports renaming existing files and folders.
- Rename ResourceOperationKind = "rename"
- // Supports deleting existing files and folders.
- Delete ResourceOperationKind = "delete"
- // A set of predefined token modifiers. This set is not fixed
- // an clients can specify additional token types via the
- // corresponding client capabilities.
- //
- // @since 3.16.0
- ModDeclaration SemanticTokenModifiers = "declaration"
- ModDefinition SemanticTokenModifiers = "definition"
- ModReadonly SemanticTokenModifiers = "readonly"
- ModStatic SemanticTokenModifiers = "static"
- ModDeprecated SemanticTokenModifiers = "deprecated"
- ModAbstract SemanticTokenModifiers = "abstract"
- ModAsync SemanticTokenModifiers = "async"
- ModModification SemanticTokenModifiers = "modification"
- ModDocumentation SemanticTokenModifiers = "documentation"
- ModDefaultLibrary SemanticTokenModifiers = "defaultLibrary"
- // A set of predefined token types. This set is not fixed
- // an clients can specify additional token types via the
- // corresponding client capabilities.
- //
- // @since 3.16.0
- NamespaceType SemanticTokenTypes = "namespace"
- // Represents a generic type. Acts as a fallback for types which can't be mapped to
- // a specific type like class or enum.
- TypeType SemanticTokenTypes = "type"
- ClassType SemanticTokenTypes = "class"
- EnumType SemanticTokenTypes = "enum"
- InterfaceType SemanticTokenTypes = "interface"
- StructType SemanticTokenTypes = "struct"
- TypeParameterType SemanticTokenTypes = "typeParameter"
- ParameterType SemanticTokenTypes = "parameter"
- VariableType SemanticTokenTypes = "variable"
- PropertyType SemanticTokenTypes = "property"
- EnumMemberType SemanticTokenTypes = "enumMember"
- EventType SemanticTokenTypes = "event"
- FunctionType SemanticTokenTypes = "function"
- MethodType SemanticTokenTypes = "method"
- MacroType SemanticTokenTypes = "macro"
- KeywordType SemanticTokenTypes = "keyword"
- ModifierType SemanticTokenTypes = "modifier"
- CommentType SemanticTokenTypes = "comment"
- StringType SemanticTokenTypes = "string"
- NumberType SemanticTokenTypes = "number"
- RegexpType SemanticTokenTypes = "regexp"
- OperatorType SemanticTokenTypes = "operator"
- // @since 3.17.0
- DecoratorType SemanticTokenTypes = "decorator"
- // @since 3.18.0
- LabelType SemanticTokenTypes = "label"
- // How a signature help was triggered.
- //
- // @since 3.15.0
- // Signature help was invoked manually by the user or by a command.
- SigInvoked SignatureHelpTriggerKind = 1
- // Signature help was triggered by a trigger character.
- SigTriggerCharacter SignatureHelpTriggerKind = 2
- // Signature help was triggered by the cursor moving or by the document content changing.
- SigContentChange SignatureHelpTriggerKind = 3
- // A symbol kind.
- File SymbolKind = 1
- Module SymbolKind = 2
- Namespace SymbolKind = 3
- Package SymbolKind = 4
- Class SymbolKind = 5
- Method SymbolKind = 6
- Property SymbolKind = 7
- Field SymbolKind = 8
- Constructor SymbolKind = 9
- Enum SymbolKind = 10
- Interface SymbolKind = 11
- Function SymbolKind = 12
- Variable SymbolKind = 13
- Constant SymbolKind = 14
- String SymbolKind = 15
- Number SymbolKind = 16
- Boolean SymbolKind = 17
- Array SymbolKind = 18
- Object SymbolKind = 19
- Key SymbolKind = 20
- Null SymbolKind = 21
- EnumMember SymbolKind = 22
- Struct SymbolKind = 23
- Event SymbolKind = 24
- Operator SymbolKind = 25
- TypeParameter SymbolKind = 26
- // Symbol tags are extra annotations that tweak the rendering of a symbol.
- //
- // @since 3.16
- // Render a symbol as obsolete, usually using a strike-out.
- DeprecatedSymbol SymbolTag = 1
- // Represents reasons why a text document is saved.
- // Manually triggered, e.g. by the user pressing save, by starting debugging,
- // or by an API call.
- Manual TextDocumentSaveReason = 1
- // Automatic after a delay.
- AfterDelay TextDocumentSaveReason = 2
- // When the editor lost focus.
- FocusOut TextDocumentSaveReason = 3
- // Defines how the host (editor) should sync
- // document changes to the language server.
- // Documents should not be synced at all.
- None TextDocumentSyncKind = 0
- // Documents are synced by always sending the full content
- // of the document.
- Full TextDocumentSyncKind = 1
- // Documents are synced by sending the full content on open.
- // After that only incremental updates to the document are
- // send.
- Incremental TextDocumentSyncKind = 2
- Relative TokenFormat = "relative"
- // Turn tracing off.
- Off TraceValue = "off"
- // Trace messages only.
- Messages TraceValue = "messages"
- // Verbose message tracing.
- Verbose TraceValue = "verbose"
- // Moniker uniqueness level to define scope of the moniker.
- //
- // @since 3.16.0
- // The moniker is only unique inside a document
- Document UniquenessLevel = "document"
- // The moniker is unique inside a project for which a dump got created
- Project UniquenessLevel = "project"
- // The moniker is unique inside the group to which a project belongs
- Group UniquenessLevel = "group"
- // The moniker is unique inside the moniker scheme.
- Scheme UniquenessLevel = "scheme"
- // The moniker is globally unique
- Global UniquenessLevel = "global"
- // Interested in create events.
- WatchCreate WatchKind = 1
- // Interested in change events
- WatchChange WatchKind = 2
- // Interested in delete events
- WatchDelete WatchKind = 4
-)
diff --git a/internal/lsp/protocol/uri.go b/internal/lsp/protocol/uri.go
deleted file mode 100644
index ccc45f23e46b3ea41ac28c525eca6c39c201695e..0000000000000000000000000000000000000000
--- a/internal/lsp/protocol/uri.go
+++ /dev/null
@@ -1,229 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package protocol
-
-// This file declares URI, DocumentUri, and its methods.
-//
-// For the LSP definition of these types, see
-// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#uri
-
-import (
- "fmt"
- "net/url"
- "path/filepath"
- "strings"
- "unicode"
-)
-
-// A DocumentURI is the URI of a client editor document.
-//
-// According to the LSP specification:
-//
-// Care should be taken to handle encoding in URIs. For
-// example, some clients (such as VS Code) may encode colons
-// in drive letters while others do not. The URIs below are
-// both valid, but clients and servers should be consistent
-// with the form they use themselves to ensure the other party
-// doesn’t interpret them as distinct URIs. Clients and
-// servers should not assume that each other are encoding the
-// same way (for example a client encoding colons in drive
-// letters cannot assume server responses will have encoded
-// colons). The same applies to casing of drive letters - one
-// party should not assume the other party will return paths
-// with drive letters cased the same as it.
-//
-// file:///c:/project/readme.md
-// file:///C%3A/project/readme.md
-//
-// This is done during JSON unmarshalling;
-// see [DocumentURI.UnmarshalText] for details.
-type DocumentURI string
-
-// A URI is an arbitrary URL (e.g. https), not necessarily a file.
-type URI = string
-
-// UnmarshalText implements decoding of DocumentUri values.
-//
-// In particular, it implements a systematic correction of various odd
-// features of the definition of DocumentUri in the LSP spec that
-// appear to be workarounds for bugs in VS Code. For example, it may
-// URI-encode the URI itself, so that colon becomes %3A, and it may
-// send file://foo.go URIs that have two slashes (not three) and no
-// hostname.
-//
-// We use UnmarshalText, not UnmarshalJSON, because it is called even
-// for non-addressable values such as keys and values of map[K]V,
-// where there is no pointer of type *K or *V on which to call
-// UnmarshalJSON. (See Go issue #28189 for more detail.)
-//
-// Non-empty DocumentUris are valid "file"-scheme URIs.
-// The empty DocumentUri is valid.
-func (uri *DocumentURI) UnmarshalText(data []byte) (err error) {
- *uri, err = ParseDocumentURI(string(data))
- return
-}
-
-// Path returns the file path for the given URI.
-//
-// DocumentUri("").Path() returns the empty string.
-//
-// Path panics if called on a URI that is not a valid filename.
-func (uri DocumentURI) Path() (string, error) {
- filename, err := filename(uri)
- if err != nil {
- // e.g. ParseRequestURI failed.
- //
- // This can only affect DocumentUris created by
- // direct string manipulation; all DocumentUris
- // received from the client pass through
- // ParseRequestURI, which ensures validity.
- return "", fmt.Errorf("invalid URI %q: %w", uri, err)
- }
- return filepath.FromSlash(filename), nil
-}
-
-// Dir returns the URI for the directory containing the receiver.
-func (uri DocumentURI) Dir() (DocumentURI, error) {
- // XXX: Legacy comment:
- // This function could be more efficiently implemented by avoiding any call
- // to Path(), but at least consolidates URI manipulation.
-
- path, err := uri.DirPath()
- if err != nil {
- return "", fmt.Errorf("invalid URI %q: %w", uri, err)
- }
-
- return URIFromPath(path), nil
-}
-
-// DirPath returns the file path to the directory containing this URI, which
-// must be a file URI.
-func (uri DocumentURI) DirPath() (string, error) {
- path, err := uri.Path()
- if err != nil {
- return "", err
- }
- return filepath.Dir(path), nil
-}
-
-func filename(uri DocumentURI) (string, error) {
- if uri == "" {
- return "", nil
- }
-
- // This conservative check for the common case
- // of a simple non-empty absolute POSIX filename
- // avoids the allocation of a net.URL.
- if strings.HasPrefix(string(uri), "file:///") {
- rest := string(uri)[len("file://"):] // leave one slash
- for i := range len(rest) {
- b := rest[i]
- // Reject these cases:
- if b < ' ' || b == 0x7f || // control character
- b == '%' || b == '+' || // URI escape
- b == ':' || // Windows drive letter
- b == '@' || b == '&' || b == '?' { // authority or query
- goto slow
- }
- }
- return rest, nil
- }
-slow:
-
- u, err := url.ParseRequestURI(string(uri))
- if err != nil {
- return "", err
- }
- if u.Scheme != fileScheme {
- return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri)
- }
- // If the URI is a Windows URI, we trim the leading "/" and uppercase
- // the drive letter, which will never be case sensitive.
- if isWindowsDriveURIPath(u.Path) {
- u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:]
- }
-
- return u.Path, nil
-}
-
-// ParseDocumentURI interprets a string as a DocumentUri, applying VS
-// Code workarounds; see [DocumentURI.UnmarshalText] for details.
-func ParseDocumentURI(s string) (DocumentURI, error) {
- if s == "" {
- return "", nil
- }
-
- if !strings.HasPrefix(s, "file://") {
- return "", fmt.Errorf("DocumentUri scheme is not 'file': %s", s)
- }
-
- // VS Code sends URLs with only two slashes,
- // which are invalid. golang/go#39789.
- if !strings.HasPrefix(s, "file:///") {
- s = "file:///" + s[len("file://"):]
- }
-
- // Even though the input is a URI, it may not be in canonical form. VS Code
- // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize.
- path, err := url.PathUnescape(s[len("file://"):])
- if err != nil {
- return "", err
- }
-
- // File URIs from Windows may have lowercase drive letters.
- // Since drive letters are guaranteed to be case insensitive,
- // we change them to uppercase to remain consistent.
- // For example, file:///c:/x/y/z becomes file:///C:/x/y/z.
- if isWindowsDriveURIPath(path) {
- path = path[:1] + strings.ToUpper(string(path[1])) + path[2:]
- }
- u := url.URL{Scheme: fileScheme, Path: path}
- return DocumentURI(u.String()), nil
-}
-
-// URIFromPath returns DocumentUri for the supplied file path.
-// Given "", it returns "".
-func URIFromPath(path string) DocumentURI {
- if path == "" {
- return ""
- }
- if !isWindowsDrivePath(path) {
- if abs, err := filepath.Abs(path); err == nil {
- path = abs
- }
- }
- // Check the file path again, in case it became absolute.
- if isWindowsDrivePath(path) {
- path = "/" + strings.ToUpper(string(path[0])) + path[1:]
- }
- path = filepath.ToSlash(path)
- u := url.URL{
- Scheme: fileScheme,
- Path: path,
- }
- return DocumentURI(u.String())
-}
-
-const fileScheme = "file"
-
-// isWindowsDrivePath returns true if the file path is of the form used by
-// Windows. We check if the path begins with a drive letter, followed by a ":".
-// For example: C:/x/y/z.
-func isWindowsDrivePath(path string) bool {
- if len(path) < 3 {
- return false
- }
- return unicode.IsLetter(rune(path[0])) && path[1] == ':'
-}
-
-// isWindowsDriveURIPath returns true if the file URI is of the format used by
-// Windows URIs. The url.Parse package does not specially handle Windows paths
-// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:").
-func isWindowsDriveURIPath(uri string) bool {
- if len(uri) < 4 {
- return false
- }
- return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
-}
diff --git a/internal/lsp/rootmarkers_test.go b/internal/lsp/rootmarkers_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..7b3a3c0905799865808b9b1ae0dff992e00ed34c
--- /dev/null
+++ b/internal/lsp/rootmarkers_test.go
@@ -0,0 +1,37 @@
+package lsp
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestHasRootMarkers(t *testing.T) {
+ t.Parallel()
+
+ // Create a temporary directory for testing
+ tmpDir := t.TempDir()
+
+ // Test with empty root markers (should return true)
+ require.True(t, HasRootMarkers(tmpDir, []string{}))
+
+ // Test with non-existent markers
+ require.False(t, HasRootMarkers(tmpDir, []string{"go.mod", "package.json"}))
+
+ // Create a go.mod file
+ goModPath := filepath.Join(tmpDir, "go.mod")
+ err := os.WriteFile(goModPath, []byte("module test"), 0o644)
+ require.NoError(t, err)
+
+ // Test with existing marker
+ require.True(t, HasRootMarkers(tmpDir, []string{"go.mod", "package.json"}))
+
+ // Test with only non-existent markers
+ require.False(t, HasRootMarkers(tmpDir, []string{"package.json", "Cargo.toml"}))
+
+ // Test with glob patterns
+ require.True(t, HasRootMarkers(tmpDir, []string{"*.mod"}))
+ require.False(t, HasRootMarkers(tmpDir, []string{"*.json"}))
+}
diff --git a/internal/lsp/transport.go b/internal/lsp/transport.go
deleted file mode 100644
index 483281d25c51a6bfb71ca3314419b570f9a6bf0d..0000000000000000000000000000000000000000
--- a/internal/lsp/transport.go
+++ /dev/null
@@ -1,284 +0,0 @@
-package lsp
-
-import (
- "bufio"
- "context"
- "encoding/json"
- "fmt"
- "io"
- "log/slog"
- "strings"
-
- "github.com/charmbracelet/crush/internal/config"
-)
-
-// WriteMessage writes an LSP message to the given writer
-func WriteMessage(w io.Writer, msg *Message) error {
- data, err := json.Marshal(msg)
- if err != nil {
- return fmt.Errorf("failed to marshal message: %w", err)
- }
- cfg := config.Get()
-
- if cfg.Options.DebugLSP {
- slog.Debug("Sending message to server", "method", msg.Method, "id", msg.ID)
- }
-
- _, err = fmt.Fprintf(w, "Content-Length: %d\r\n\r\n", len(data))
- if err != nil {
- return fmt.Errorf("failed to write header: %w", err)
- }
-
- _, err = w.Write(data)
- if err != nil {
- return fmt.Errorf("failed to write message: %w", err)
- }
-
- return nil
-}
-
-// ReadMessage reads a single LSP message from the given reader
-func ReadMessage(r *bufio.Reader) (*Message, error) {
- cfg := config.Get()
- // Read headers
- var contentLength int
- for {
- line, err := r.ReadString('\n')
- if err != nil {
- return nil, fmt.Errorf("failed to read header: %w", err)
- }
- line = strings.TrimSpace(line)
-
- if cfg.Options.DebugLSP {
- slog.Debug("Received header", "line", line)
- }
-
- if line == "" {
- break // End of headers
- }
-
- if strings.HasPrefix(line, "Content-Length: ") {
- _, err := fmt.Sscanf(line, "Content-Length: %d", &contentLength)
- if err != nil {
- return nil, fmt.Errorf("invalid Content-Length: %w", err)
- }
- }
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Content-Length", "length", contentLength)
- }
-
- // Read content
- content := make([]byte, contentLength)
- _, err := io.ReadFull(r, content)
- if err != nil {
- return nil, fmt.Errorf("failed to read content: %w", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Received content", "content", string(content))
- }
-
- // Parse message
- var msg Message
- if err := json.Unmarshal(content, &msg); err != nil {
- return nil, fmt.Errorf("failed to unmarshal message: %w", err)
- }
-
- return &msg, nil
-}
-
-// handleMessages reads and dispatches messages in a loop
-func (c *Client) handleMessages() {
- cfg := config.Get()
- for {
- msg, err := ReadMessage(c.stdout)
- if err != nil {
- if cfg.Options.DebugLSP {
- slog.Error("Error reading message", "error", err)
- }
- return
- }
-
- // Handle server->client request (has both Method and ID)
- if msg.Method != "" && msg.ID != 0 {
- if cfg.Options.DebugLSP {
- slog.Debug("Received request from server", "method", msg.Method, "id", msg.ID)
- }
-
- response := &Message{
- JSONRPC: "2.0",
- ID: msg.ID,
- }
-
- // Look up handler for this method
- c.serverHandlersMu.RLock()
- handler, ok := c.serverRequestHandlers[msg.Method]
- c.serverHandlersMu.RUnlock()
-
- if ok {
- result, err := handler(msg.Params)
- if err != nil {
- response.Error = &ResponseError{
- Code: -32603,
- Message: err.Error(),
- }
- } else {
- rawJSON, err := json.Marshal(result)
- if err != nil {
- response.Error = &ResponseError{
- Code: -32603,
- Message: fmt.Sprintf("failed to marshal response: %v", err),
- }
- } else {
- response.Result = rawJSON
- }
- }
- } else {
- response.Error = &ResponseError{
- Code: -32601,
- Message: fmt.Sprintf("method not found: %s", msg.Method),
- }
- }
-
- // Send response back to server
- if err := WriteMessage(c.stdin, response); err != nil {
- slog.Error("Error sending response to server", "error", err)
- }
-
- continue
- }
-
- // Handle notification (has Method but no ID)
- if msg.Method != "" && msg.ID == 0 {
- c.notificationMu.RLock()
- handler, ok := c.notificationHandlers[msg.Method]
- c.notificationMu.RUnlock()
-
- if ok {
- if cfg.Options.DebugLSP {
- slog.Debug("Handling notification", "method", msg.Method)
- }
- go handler(msg.Params)
- } else if cfg.Options.DebugLSP {
- slog.Debug("No handler for notification", "method", msg.Method)
- }
- continue
- }
-
- // Handle response to our request (has ID but no Method)
- if msg.ID != 0 && msg.Method == "" {
- c.handlersMu.RLock()
- ch, ok := c.handlers[msg.ID]
- c.handlersMu.RUnlock()
-
- if ok {
- if cfg.Options.DebugLSP {
- slog.Debug("Received response for request", "id", msg.ID)
- }
- ch <- msg
- close(ch)
- } else if cfg.Options.DebugLSP {
- slog.Debug("No handler for response", "id", msg.ID)
- }
- }
- }
-}
-
-// Call makes a request and waits for the response
-func (c *Client) Call(ctx context.Context, method string, params any, result any) error {
- if !c.IsMethodSupported(method) {
- return fmt.Errorf("method not supported by server: %s", method)
- }
- id := c.nextID.Add(1)
-
- cfg := config.Get()
- if cfg.Options.DebugLSP {
- slog.Debug("Making call", "method", method, "id", id)
- }
-
- msg, err := NewRequest(id, method, params)
- if err != nil {
- return fmt.Errorf("failed to create request: %w", err)
- }
-
- // Create response channel
- ch := make(chan *Message, 1)
- c.handlersMu.Lock()
- c.handlers[id] = ch
- c.handlersMu.Unlock()
-
- defer func() {
- c.handlersMu.Lock()
- delete(c.handlers, id)
- c.handlersMu.Unlock()
- }()
-
- // Send request
- if err := WriteMessage(c.stdin, msg); err != nil {
- return fmt.Errorf("failed to send request: %w", err)
- }
-
- if cfg.Options.DebugLSP {
- slog.Debug("Request sent", "method", method, "id", id)
- }
-
- // Wait for response
- select {
- case <-ctx.Done():
- return ctx.Err()
- case resp := <-ch:
- if cfg.Options.DebugLSP {
- slog.Debug("Received response", "id", id)
- }
-
- if resp.Error != nil {
- return fmt.Errorf("request failed: %s (code: %d)", resp.Error.Message, resp.Error.Code)
- }
-
- if result != nil {
- // If result is a json.RawMessage, just copy the raw bytes
- if rawMsg, ok := result.(*json.RawMessage); ok {
- *rawMsg = resp.Result
- return nil
- }
- // Otherwise unmarshal into the provided type
- if err := json.Unmarshal(resp.Result, result); err != nil {
- return fmt.Errorf("failed to unmarshal result: %w", err)
- }
- }
-
- return nil
- }
-}
-
-// Notify sends a notification (a request without an ID that doesn't expect a response)
-func (c *Client) Notify(ctx context.Context, method string, params any) error {
- cfg := config.Get()
- if !c.IsMethodSupported(method) {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping notification: method not supported by server", "method", method)
- }
- return nil
- }
- if cfg.Options.DebugLSP {
- slog.Debug("Sending notification", "method", method)
- }
-
- msg, err := NewNotification(method, params)
- if err != nil {
- return fmt.Errorf("failed to create notification: %w", err)
- }
-
- if err := WriteMessage(c.stdin, msg); err != nil {
- return fmt.Errorf("failed to send notification: %w", err)
- }
-
- return nil
-}
-
-type (
- NotificationHandler func(params json.RawMessage)
- ServerRequestHandler func(params json.RawMessage) (any, error)
-)
diff --git a/internal/lsp/util/edit.go b/internal/lsp/util/edit.go
index 12d8e428a7214338bd7ef66c6d71dd512484b243..8b500ac67489e5fbcd0981a012dcf7a0c871f67e 100644
--- a/internal/lsp/util/edit.go
+++ b/internal/lsp/util/edit.go
@@ -7,7 +7,7 @@ import (
"sort"
"strings"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
func applyTextEdits(uri protocol.DocumentURI, edits []protocol.TextEdit) error {
diff --git a/internal/lsp/watcher/global_watcher.go b/internal/lsp/watcher/global_watcher.go
deleted file mode 100644
index 29b19f316ba0f654ae779526b5926b1fe9785819..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/global_watcher.go
+++ /dev/null
@@ -1,364 +0,0 @@
-package watcher
-
-import (
- "context"
- "fmt"
- "log/slog"
- "os"
- "sync"
- "sync/atomic"
- "time"
-
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/csync"
- "github.com/charmbracelet/crush/internal/fsext"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
- "github.com/fsnotify/fsnotify"
-)
-
-// global manages a single fsnotify.Watcher instance shared across all LSP clients.
-//
-// IMPORTANT: This implementation only watches directories, not individual files.
-// The fsnotify library automatically provides events for all files within watched
-// directories, making this approach much more efficient than watching individual files.
-//
-// Key benefits of directory-only watching:
-// - Significantly fewer file descriptors used
-// - Automatic coverage of new files created in watched directories
-// - Better performance with large codebases
-// - fsnotify handles deduplication internally (no need to track watched dirs)
-type global struct {
- watcher *fsnotify.Watcher
-
- // Map of workspace watchers by client name
- watchers *csync.Map[string, *Client]
-
- // Single workspace root directory for ignore checking
- root string
-
- started atomic.Bool
-
- // Debouncing for file events (shared across all clients)
- debounceTime time.Duration
- debounceMap *csync.Map[string, *time.Timer]
-
- // Context for shutdown
- ctx context.Context
- cancel context.CancelFunc
-
- // Wait group for cleanup
- wg sync.WaitGroup
-}
-
-// instance returns the singleton global watcher instance
-var instance = sync.OnceValue(func() *global {
- ctx, cancel := context.WithCancel(context.Background())
- gw := &global{
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- // Initialize the fsnotify watcher
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- slog.Error("lsp watcher: Failed to create global file watcher", "error", err)
- return gw
- }
-
- gw.watcher = watcher
-
- return gw
-})
-
-// register registers a workspace watcher with the global watcher
-func (gw *global) register(name string, watcher *Client) {
- gw.watchers.Set(name, watcher)
- slog.Debug("lsp watcher: Registered workspace watcher", "name", name)
-}
-
-// unregister removes a workspace watcher from the global watcher
-func (gw *global) unregister(name string) {
- gw.watchers.Del(name)
- slog.Debug("lsp watcher: Unregistered workspace watcher", "name", name)
-}
-
-// Start walks the given path and sets up the watcher on it.
-//
-// Note: We only watch directories, not individual files. fsnotify automatically provides
-// events for all files within watched directories. Multiple calls with the same workspace
-// are safe since fsnotify handles directory deduplication internally.
-func Start() error {
- gw := instance()
-
- // technically workspace root is always the same...
- if gw.started.Load() {
- slog.Debug("lsp watcher: watcher already set up, skipping")
- return nil
- }
-
- cfg := config.Get()
- root := cfg.WorkingDir()
- slog.Debug("lsp watcher: set workspace directory to global watcher", "path", root)
-
- // Store the workspace root for hierarchical ignore checking
- gw.root = root
- gw.started.Store(true)
-
- // Start the event processing goroutine now that we're initialized
- gw.wg.Add(1)
- go gw.processEvents()
-
- // Walk the workspace and add only directories to the watcher
- // fsnotify will automatically provide events for all files within these directories
- // Multiple calls with the same directories are safe (fsnotify deduplicates)
- err := fsext.WalkDirectories(root, func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
-
- // Add directory to watcher (fsnotify handles deduplication automatically)
- if err := gw.addDirectoryToWatcher(path); err != nil {
- slog.Error("lsp watcher: Error watching directory", "path", path, "error", err)
- }
-
- return nil
- })
- if err != nil {
- return fmt.Errorf("lsp watcher: error walking workspace %s: %w", root, err)
- }
-
- return nil
-}
-
-// addDirectoryToWatcher adds a directory to the fsnotify watcher.
-// fsnotify handles deduplication internally, so we don't need to track watched directories.
-func (gw *global) addDirectoryToWatcher(dirPath string) error {
- if gw.watcher == nil {
- return fmt.Errorf("lsp watcher: global watcher not initialized")
- }
-
- // Add directory to fsnotify watcher - fsnotify handles deduplication
- // "A path can only be watched once; watching it more than once is a no-op"
- err := gw.watcher.Add(dirPath)
- if err != nil {
- return fmt.Errorf("lsp watcher: failed to watch directory %s: %w", dirPath, err)
- }
-
- slog.Debug("lsp watcher: watching directory", "path", dirPath)
- return nil
-}
-
-// processEvents processes file system events and handles them centrally.
-// Since we only watch directories, we automatically get events for all files
-// within those directories. When new directories are created, we add them
-// to the watcher to ensure complete coverage.
-func (gw *global) processEvents() {
- defer gw.wg.Done()
- cfg := config.Get()
-
- if gw.watcher == nil || !gw.started.Load() {
- slog.Error("lsp watcher: Global watcher not initialized")
- return
- }
-
- for {
- select {
- case <-gw.ctx.Done():
- return
-
- case event, ok := <-gw.watcher.Events:
- if !ok {
- return
- }
-
- // Handle directory creation globally (only once)
- // When new directories are created, we need to add them to the watcher
- // to ensure we get events for files created within them
- if event.Op&fsnotify.Create != 0 {
- if info, err := os.Stat(event.Name); err == nil && info.IsDir() {
- if !fsext.ShouldExcludeFile(gw.root, event.Name) {
- if err := gw.addDirectoryToWatcher(event.Name); err != nil {
- slog.Error("lsp watcher: Error adding new directory to watcher", "path", event.Name, "error", err)
- }
- } else if cfg != nil && cfg.Options.DebugLSP {
- slog.Debug("lsp watcher: Skipping ignored new directory", "path", event.Name)
- }
- }
- }
-
- if cfg != nil && cfg.Options.DebugLSP {
- slog.Debug("lsp watcher: Global watcher received event", "path", event.Name, "op", event.Op.String())
- }
-
- // Process the event centrally
- gw.handleFileEvent(event)
-
- case err, ok := <-gw.watcher.Errors:
- if !ok {
- return
- }
- slog.Error("lsp watcher: Global watcher error", "error", err)
- }
- }
-}
-
-// handleFileEvent processes a file system event and distributes notifications to relevant clients
-func (gw *global) handleFileEvent(event fsnotify.Event) {
- cfg := config.Get()
- uri := string(protocol.URIFromPath(event.Name))
-
- // Handle file creation for all relevant clients (only once)
- if event.Op&fsnotify.Create != 0 {
- if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
- if !fsext.ShouldExcludeFile(gw.root, event.Name) {
- gw.openMatchingFileForClients(event.Name)
- }
- }
- }
-
- // Process the event for each relevant client
- for client, watcher := range gw.watchers.Seq2() {
- if !watcher.client.HandlesFile(event.Name) {
- continue // client doesn't handle this filetype
- }
-
- // Debug logging per client
- if cfg.Options.DebugLSP {
- matched, kind := watcher.isPathWatched(event.Name)
- slog.Debug("lsp watcher: File event for client",
- "path", event.Name,
- "operation", event.Op.String(),
- "watched", matched,
- "kind", kind,
- "client", client,
- )
- }
-
- // Check if this path should be watched according to server registrations
- if watched, watchKind := watcher.isPathWatched(event.Name); watched {
- switch {
- case event.Op&fsnotify.Write != 0:
- if watchKind&protocol.WatchChange != 0 {
- gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Changed))
- }
- case event.Op&fsnotify.Create != 0:
- // File creation was already handled globally above
- // Just send the notification if needed
- info, err := os.Stat(event.Name)
- if err != nil {
- if !os.IsNotExist(err) {
- slog.Debug("lsp watcher: Error getting file info", "path", event.Name, "error", err)
- }
- continue
- }
- if !info.IsDir() && watchKind&protocol.WatchCreate != 0 {
- gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Created))
- }
- case event.Op&fsnotify.Remove != 0:
- if watchKind&protocol.WatchDelete != 0 {
- gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
- }
- case event.Op&fsnotify.Rename != 0:
- // For renames, first delete
- if watchKind&protocol.WatchDelete != 0 {
- gw.handleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Deleted))
- }
-
- // Then check if the new file exists and create an event
- if info, err := os.Stat(event.Name); err == nil && !info.IsDir() {
- if watchKind&protocol.WatchCreate != 0 {
- gw.debounceHandleFileEventForClient(watcher, uri, protocol.FileChangeType(protocol.Created))
- }
- }
- }
- }
- }
-}
-
-// openMatchingFileForClients opens a newly created file for all clients that handle it (only once per file)
-func (gw *global) openMatchingFileForClients(path string) {
- // Skip directories
- info, err := os.Stat(path)
- if err != nil || info.IsDir() {
- return
- }
-
- // Skip excluded files
- if fsext.ShouldExcludeFile(gw.root, path) {
- return
- }
-
- // Open the file for each client that handles it and has matching patterns
- for _, watcher := range gw.watchers.Seq2() {
- if watcher.client.HandlesFile(path) {
- watcher.openMatchingFile(gw.ctx, path)
- }
- }
-}
-
-// debounceHandleFileEventForClient handles file events with debouncing for a specific client
-func (gw *global) debounceHandleFileEventForClient(watcher *Client, uri string, changeType protocol.FileChangeType) {
- // Create a unique key based on URI, change type, and client name
- key := fmt.Sprintf("%s:%d:%s", uri, changeType, watcher.name)
-
- // Cancel existing timer if any
- if timer, exists := gw.debounceMap.Get(key); exists {
- timer.Stop()
- }
-
- // Create new timer
- gw.debounceMap.Set(key, time.AfterFunc(gw.debounceTime, func() {
- gw.handleFileEventForClient(watcher, uri, changeType)
-
- // Cleanup timer after execution
- gw.debounceMap.Del(key)
- }))
-}
-
-// handleFileEventForClient sends file change notifications to a specific client
-func (gw *global) handleFileEventForClient(watcher *Client, uri string, changeType protocol.FileChangeType) {
- // If the file is open and it's a change event, use didChange notification
- filePath, err := protocol.DocumentURI(uri).Path()
- if err != nil {
- slog.Error("lsp watcher: Error converting URI to path", "uri", uri, "error", err)
- return
- }
-
- if changeType == protocol.FileChangeType(protocol.Deleted) {
- watcher.client.ClearDiagnosticsForURI(protocol.DocumentURI(uri))
- } else if changeType == protocol.FileChangeType(protocol.Changed) && watcher.client.IsFileOpen(filePath) {
- err := watcher.client.NotifyChange(gw.ctx, filePath)
- if err != nil {
- slog.Error("lsp watcher: Error notifying change", "error", err)
- }
- return
- }
-
- // Notify LSP server about the file event using didChangeWatchedFiles
- if err := watcher.notifyFileEvent(gw.ctx, uri, changeType); err != nil {
- slog.Error("lsp watcher: Error notifying LSP server about file event", "error", err)
- }
-}
-
-// shutdown gracefully shuts down the global watcher
-func (gw *global) shutdown() {
- if gw.cancel != nil {
- gw.cancel()
- }
-
- if gw.watcher != nil {
- gw.watcher.Close()
- gw.watcher = nil
- }
-
- gw.wg.Wait()
- slog.Debug("lsp watcher: Global watcher shutdown complete")
-}
-
-// Shutdown shuts down the singleton global watcher
-func Shutdown() {
- instance().shutdown()
-}
diff --git a/internal/lsp/watcher/global_watcher_test.go b/internal/lsp/watcher/global_watcher_test.go
deleted file mode 100644
index 09124cd6a570b9b46b003b06b5f76dcbcbef22ff..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/global_watcher_test.go
+++ /dev/null
@@ -1,297 +0,0 @@
-package watcher
-
-import (
- "context"
- "os"
- "path/filepath"
- "testing"
- "time"
-
- "github.com/charmbracelet/crush/internal/csync"
- "github.com/fsnotify/fsnotify"
-)
-
-func TestGlobalWatcher(t *testing.T) {
- t.Parallel()
-
- // Test that we can get the global watcher instance
- gw1 := instance()
- if gw1 == nil {
- t.Fatal("Expected global watcher instance, got nil")
- }
-
- // Test that subsequent calls return the same instance (singleton)
- gw2 := instance()
- if gw1 != gw2 {
- t.Fatal("Expected same global watcher instance, got different instances")
- }
-
- // Test registration and unregistration
- mockWatcher := &Client{
- name: "test-watcher",
- }
-
- gw1.register("test", mockWatcher)
-
- // Check that it was registered
- registered, _ := gw1.watchers.Get("test")
-
- if registered != mockWatcher {
- t.Fatal("Expected workspace watcher to be registered")
- }
-
- // Test unregistration
- gw1.unregister("test")
-
- unregistered, _ := gw1.watchers.Get("test")
-
- if unregistered != nil {
- t.Fatal("Expected workspace watcher to be unregistered")
- }
-}
-
-func TestGlobalWatcherWorkspaceIdempotent(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory for testing
- tempDir := t.TempDir()
-
- // Create a new global watcher instance for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- // Create a real fsnotify watcher for testing
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
-
- gw := &global{
- watcher: watcher,
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- // Test that watching the same workspace multiple times is safe (idempotent)
- err1 := gw.addDirectoryToWatcher(tempDir)
- if err1 != nil {
- t.Fatalf("First addDirectoryToWatcher call failed: %v", err1)
- }
-
- err2 := gw.addDirectoryToWatcher(tempDir)
- if err2 != nil {
- t.Fatalf("Second addDirectoryToWatcher call failed: %v", err2)
- }
-
- err3 := gw.addDirectoryToWatcher(tempDir)
- if err3 != nil {
- t.Fatalf("Third addDirectoryToWatcher call failed: %v", err3)
- }
-
- // All calls should succeed - fsnotify handles deduplication internally
- // This test verifies that multiple WatchWorkspace calls are safe
-}
-
-func TestGlobalWatcherOnlyWatchesDirectories(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory structure for testing
- tempDir := t.TempDir()
- subDir := filepath.Join(tempDir, "subdir")
- if err := os.Mkdir(subDir, 0o755); err != nil {
- t.Fatalf("Failed to create subdirectory: %v", err)
- }
-
- // Create some files
- file1 := filepath.Join(tempDir, "file1.txt")
- file2 := filepath.Join(subDir, "file2.txt")
- if err := os.WriteFile(file1, []byte("content1"), 0o644); err != nil {
- t.Fatalf("Failed to create file1: %v", err)
- }
- if err := os.WriteFile(file2, []byte("content2"), 0o644); err != nil {
- t.Fatalf("Failed to create file2: %v", err)
- }
-
- // Create a new global watcher instance for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- // Create a real fsnotify watcher for testing
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
-
- gw := &global{
- watcher: watcher,
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- // Watch the workspace
- err = gw.addDirectoryToWatcher(tempDir)
- if err != nil {
- t.Fatalf("addDirectoryToWatcher failed: %v", err)
- }
-
- // Verify that our expected directories exist and can be watched
- expectedDirs := []string{tempDir, subDir}
-
- for _, expectedDir := range expectedDirs {
- info, err := os.Stat(expectedDir)
- if err != nil {
- t.Fatalf("Expected directory %s doesn't exist: %v", expectedDir, err)
- }
- if !info.IsDir() {
- t.Fatalf("Expected %s to be a directory, but it's not", expectedDir)
- }
-
- // Try to add it again - fsnotify should handle this gracefully
- err = gw.addDirectoryToWatcher(expectedDir)
- if err != nil {
- t.Fatalf("Failed to add directory %s to watcher: %v", expectedDir, err)
- }
- }
-
- // Verify that files exist but we don't try to watch them directly
- testFiles := []string{file1, file2}
- for _, file := range testFiles {
- info, err := os.Stat(file)
- if err != nil {
- t.Fatalf("Test file %s doesn't exist: %v", file, err)
- }
- if info.IsDir() {
- t.Fatalf("Expected %s to be a file, but it's a directory", file)
- }
- }
-}
-
-func TestFsnotifyDeduplication(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory for testing
- tempDir := t.TempDir()
-
- // Create a real fsnotify watcher
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
-
- // Add the same directory multiple times
- err1 := watcher.Add(tempDir)
- if err1 != nil {
- t.Fatalf("First Add failed: %v", err1)
- }
-
- err2 := watcher.Add(tempDir)
- if err2 != nil {
- t.Fatalf("Second Add failed: %v", err2)
- }
-
- err3 := watcher.Add(tempDir)
- if err3 != nil {
- t.Fatalf("Third Add failed: %v", err3)
- }
-
- // All should succeed - fsnotify handles deduplication internally
- // This test verifies the fsnotify behavior we're relying on
-}
-
-func TestGlobalWatcherRespectsIgnoreFiles(t *testing.T) {
- t.Parallel()
-
- // Create a temporary directory structure for testing
- tempDir := t.TempDir()
-
- // Create directories that should be ignored
- nodeModules := filepath.Join(tempDir, "node_modules")
- target := filepath.Join(tempDir, "target")
- customIgnored := filepath.Join(tempDir, "custom_ignored")
- normalDir := filepath.Join(tempDir, "src")
-
- for _, dir := range []string{nodeModules, target, customIgnored, normalDir} {
- if err := os.MkdirAll(dir, 0o755); err != nil {
- t.Fatalf("Failed to create directory %s: %v", dir, err)
- }
- }
-
- // Create .gitignore file
- gitignoreContent := "node_modules/\ntarget/\n"
- if err := os.WriteFile(filepath.Join(tempDir, ".gitignore"), []byte(gitignoreContent), 0o644); err != nil {
- t.Fatalf("Failed to create .gitignore: %v", err)
- }
-
- // Create .crushignore file
- crushignoreContent := "custom_ignored/\n"
- if err := os.WriteFile(filepath.Join(tempDir, ".crushignore"), []byte(crushignoreContent), 0o644); err != nil {
- t.Fatalf("Failed to create .crushignore: %v", err)
- }
-
- // Create a new global watcher instance for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- // Create a real fsnotify watcher for testing
- watcher, err := fsnotify.NewWatcher()
- if err != nil {
- t.Fatalf("Failed to create fsnotify watcher: %v", err)
- }
- defer watcher.Close()
-
- gw := &global{
- watcher: watcher,
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- // Watch the workspace
- err = gw.addDirectoryToWatcher(tempDir)
- if err != nil {
- t.Fatalf("addDirectoryToWatcher failed: %v", err)
- }
-
- // This test verifies that the watcher can successfully add directories to fsnotify
- // The actual ignore logic is tested in the fsext package
- // Here we just verify that the watcher integration works
-}
-
-func TestGlobalWatcherShutdown(t *testing.T) {
- t.Parallel()
-
- // Create a new context for this test
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- // Create a temporary global watcher for testing
- gw := &global{
- watchers: csync.NewMap[string, *Client](),
- debounceTime: 300 * time.Millisecond,
- debounceMap: csync.NewMap[string, *time.Timer](),
- ctx: ctx,
- cancel: cancel,
- }
-
- // Test shutdown doesn't panic
- gw.shutdown()
-
- // Verify context was cancelled
- select {
- case <-gw.ctx.Done():
- // Expected
- case <-time.After(100 * time.Millisecond):
- t.Fatal("Expected context to be cancelled after shutdown")
- }
-}
diff --git a/internal/lsp/watcher/ulimit_bsd.go b/internal/lsp/watcher/ulimit_bsd.go
deleted file mode 100644
index 816e82adee5e57341b7e392e117b245a7ca4a0dc..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_bsd.go
+++ /dev/null
@@ -1,25 +0,0 @@
-//go:build freebsd || openbsd || netbsd || dragonfly
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var currentLimit uint64 = 0
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit = uint64(rLimit.Cur)
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return uint64(rLimit.Cur), nil
-}
diff --git a/internal/lsp/watcher/ulimit_darwin.go b/internal/lsp/watcher/ulimit_darwin.go
deleted file mode 100644
index a53f143bd0341e5fc7ac95441c2246eb7ffb2ccb..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_darwin.go
+++ /dev/null
@@ -1,24 +0,0 @@
-//go:build darwin
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit := rLimit.Cur
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return rLimit.Cur, nil
-}
diff --git a/internal/lsp/watcher/ulimit_fallback.go b/internal/lsp/watcher/ulimit_fallback.go
deleted file mode 100644
index 118554f25a34aa5921b1773c72d87dc3975324a7..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_fallback.go
+++ /dev/null
@@ -1,8 +0,0 @@
-//go:build !linux && !darwin && !freebsd && !openbsd && !netbsd && !dragonfly && !windows
-
-package watcher
-
-func Ulimit() (uint64, error) {
- // Fallback for exotic systems - return a reasonable default
- return 2048, nil
-}
diff --git a/internal/lsp/watcher/ulimit_linux.go b/internal/lsp/watcher/ulimit_linux.go
deleted file mode 100644
index 298fcad96710eb106ee607ac823962450f892bf3..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_linux.go
+++ /dev/null
@@ -1,25 +0,0 @@
-//go:build linux
-
-package watcher
-
-import "syscall"
-
-func Ulimit() (uint64, error) {
- var currentLimit uint64 = 0
- var rLimit syscall.Rlimit
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return 0, err
- }
- currentLimit = rLimit.Cur
- rLimit.Cur = rLimit.Max / 10 * 8
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
- if err != nil {
- return currentLimit, err
- }
- return rLimit.Cur, nil
-}
diff --git a/internal/lsp/watcher/ulimit_windows.go b/internal/lsp/watcher/ulimit_windows.go
deleted file mode 100644
index 14afbabeea1ce4818bb59a3fc8c5e2ee1fa8432a..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/ulimit_windows.go
+++ /dev/null
@@ -1,38 +0,0 @@
-//go:build windows
-
-package watcher
-
-import (
- "syscall"
- "unsafe"
-
- "golang.org/x/sys/windows"
-)
-
-var (
- kernel32 = windows.NewLazyDLL("kernel32.dll")
- procGetProcessHandleCount = kernel32.NewProc("GetProcessHandleCount")
-)
-
-func Ulimit() (uint64, error) {
- // Windows doesn't have the same file descriptor limits as Unix systems
- // Instead, we can get the current handle count for monitoring purposes
- currentProcess := windows.CurrentProcess()
-
- var handleCount uint32
- ret, _, err := procGetProcessHandleCount.Call(
- uintptr(currentProcess),
- uintptr(unsafe.Pointer(&handleCount)),
- )
-
- if ret == 0 {
- // If the call failed, return a reasonable default
- if err != syscall.Errno(0) {
- return 2048, nil
- }
- }
-
- // Windows typically allows much higher handle counts than Unix file descriptors
- // Return the current count, which serves as a baseline for monitoring
- return uint64(handleCount), nil
-}
diff --git a/internal/lsp/watcher/watcher.go b/internal/lsp/watcher/watcher.go
deleted file mode 100644
index 139d144e1e5c65c11962e73201b42b15cd09f98a..0000000000000000000000000000000000000000
--- a/internal/lsp/watcher/watcher.go
+++ /dev/null
@@ -1,556 +0,0 @@
-package watcher
-
-import (
- "context"
- "fmt"
- "log/slog"
- "os"
- "path/filepath"
- "strings"
- "time"
-
- "github.com/bmatcuk/doublestar/v4"
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/csync"
-
- "github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
-)
-
-// Client manages LSP file watching for a specific client
-// It now delegates actual file watching to the GlobalWatcher
-type Client struct {
- client *lsp.Client
- name string
- workspacePath string
-
- // File watchers registered by the server
- registrations *csync.Slice[protocol.FileSystemWatcher]
-}
-
-func init() {
- // Ensure the watcher is initialized with a reasonable file limit
- if _, err := Ulimit(); err != nil {
- slog.Error("Error setting file limit", "error", err)
- }
-}
-
-// New creates a new workspace watcher for the given client.
-func New(name string, client *lsp.Client) *Client {
- return &Client{
- name: name,
- client: client,
- registrations: csync.NewSlice[protocol.FileSystemWatcher](),
- }
-}
-
-// register adds file watchers to track
-func (w *Client) register(ctx context.Context, id string, watchers []protocol.FileSystemWatcher) {
- cfg := config.Get()
-
- w.registrations.Append(watchers...)
-
- if cfg.Options.DebugLSP {
- slog.Debug("Adding file watcher registrations",
- "id", id,
- "watchers", len(watchers),
- "total", w.registrations.Len(),
- )
-
- for i, watcher := range watchers {
- slog.Debug("Registration", "index", i+1)
-
- // Log the GlobPattern
- switch v := watcher.GlobPattern.Value.(type) {
- case string:
- slog.Debug("GlobPattern", "pattern", v)
- case protocol.RelativePattern:
- slog.Debug("GlobPattern", "pattern", v.Pattern)
-
- // Log BaseURI details
- switch u := v.BaseURI.Value.(type) {
- case string:
- slog.Debug("BaseURI", "baseURI", u)
- case protocol.DocumentURI:
- slog.Debug("BaseURI", "baseURI", u)
- default:
- slog.Debug("BaseURI", "baseURI", u)
- }
- default:
- slog.Debug("GlobPattern unknown type", "type", fmt.Sprintf("%T", v))
- }
-
- // Log WatchKind
- watchKind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- if watcher.Kind != nil {
- watchKind = *watcher.Kind
- }
-
- slog.Debug("WatchKind", "kind", watchKind)
- }
- }
-
- // For servers that need file preloading, open high-priority files only
- if shouldPreloadFiles(w.name) {
- go func() {
- highPriorityFilesOpened := w.openHighPriorityFiles(ctx, w.name)
- if cfg.Options.DebugLSP {
- slog.Debug("Opened high-priority files",
- "count", highPriorityFilesOpened,
- "serverName", w.name)
- }
- }()
- }
-}
-
-// openHighPriorityFiles opens important files for the server type
-// Returns the number of files opened
-func (w *Client) openHighPriorityFiles(ctx context.Context, serverName string) int {
- cfg := config.Get()
- filesOpened := 0
-
- // Define patterns for high-priority files based on server type
- var patterns []string
-
- // TODO: move this to LSP config
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- patterns = []string{
- "**/tsconfig.json",
- "**/package.json",
- "**/jsconfig.json",
- "**/index.ts",
- "**/index.js",
- "**/main.ts",
- "**/main.js",
- }
- case "gopls":
- patterns = []string{
- "**/go.mod",
- "**/go.sum",
- "**/main.go",
- }
- case "rust-analyzer":
- patterns = []string{
- "**/Cargo.toml",
- "**/Cargo.lock",
- "**/src/lib.rs",
- "**/src/main.rs",
- }
- case "python", "pyright", "pylsp":
- patterns = []string{
- "**/pyproject.toml",
- "**/setup.py",
- "**/requirements.txt",
- "**/__init__.py",
- "**/__main__.py",
- }
- case "clangd":
- patterns = []string{
- "**/CMakeLists.txt",
- "**/Makefile",
- "**/compile_commands.json",
- }
- case "java", "jdtls":
- patterns = []string{
- "**/pom.xml",
- "**/build.gradle",
- "**/src/main/java/**/*.java",
- }
- default:
- // For unknown servers, use common configuration files
- patterns = []string{
- "**/package.json",
- "**/Makefile",
- "**/CMakeLists.txt",
- "**/.editorconfig",
- }
- }
-
- // Collect all files to open first
- var filesToOpen []string
-
- // For each pattern, find matching files
- for _, pattern := range patterns {
- // Use doublestar.Glob to find files matching the pattern (supports ** patterns)
- matches, err := doublestar.Glob(os.DirFS(w.workspacePath), pattern)
- if err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Error finding high-priority files", "pattern", pattern, "error", err)
- }
- continue
- }
-
- for _, match := range matches {
- // Convert relative path to absolute
- fullPath := filepath.Join(w.workspacePath, match)
-
- // Skip directories and excluded files
- info, err := os.Stat(fullPath)
- if err != nil || info.IsDir() || shouldExcludeFile(fullPath) {
- continue
- }
-
- filesToOpen = append(filesToOpen, fullPath)
-
- // Limit the number of files per pattern
- if len(filesToOpen) >= 5 && (serverName != "java" && serverName != "jdtls") {
- break
- }
- }
- }
-
- // Open files in batches to reduce overhead
- batchSize := 3
- for i := 0; i < len(filesToOpen); i += batchSize {
- end := min(i+batchSize, len(filesToOpen))
-
- // Open batch of files
- for j := i; j < end; j++ {
- fullPath := filesToOpen[j]
- if err := w.client.OpenFile(ctx, fullPath); err != nil {
- if cfg.Options.DebugLSP {
- slog.Debug("Error opening high-priority file", "path", fullPath, "error", err)
- }
- } else {
- filesOpened++
- if cfg.Options.DebugLSP {
- slog.Debug("Opened high-priority file", "path", fullPath)
- }
- }
- }
-
- // Only add delay between batches, not individual files
- if end < len(filesToOpen) {
- time.Sleep(50 * time.Millisecond)
- }
- }
-
- return filesOpened
-}
-
-// Watch sets up file watching for a workspace using the global watcher
-func (w *Client) Watch(ctx context.Context, workspacePath string) {
- w.workspacePath = workspacePath
-
- slog.Debug("Starting workspace watcher", "workspacePath", workspacePath, "serverName", w.name)
-
- // Register this workspace watcher with the global watcher
- instance().register(w.name, w)
- defer instance().unregister(w.name)
-
- // Register handler for file watcher registrations from the server
- lsp.RegisterFileWatchHandler(func(id string, watchers []protocol.FileSystemWatcher) {
- w.register(ctx, id, watchers)
- })
-
- // Wait for context cancellation
- <-ctx.Done()
- slog.Debug("Workspace watcher stopped", "name", w.name)
-}
-
-// isPathWatched checks if a path should be watched based on server registrations
-// If no explicit registrations, watch everything
-func (w *Client) isPathWatched(path string) (bool, protocol.WatchKind) {
- if w.registrations.Len() == 0 {
- return true, protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- }
-
- // Check each registration
- for reg := range w.registrations.Seq() {
- isMatch := w.matchesPattern(path, reg.GlobPattern)
- if isMatch {
- kind := protocol.WatchKind(protocol.WatchChange | protocol.WatchCreate | protocol.WatchDelete)
- if reg.Kind != nil {
- kind = *reg.Kind
- }
- return true, kind
- }
- }
-
- return false, 0
-}
-
-// matchesGlob handles glob patterns using the doublestar library
-func matchesGlob(pattern, path string) bool {
- // Use doublestar for all glob matching - it handles ** and other complex patterns
- matched, err := doublestar.Match(pattern, path)
- if err != nil {
- slog.Error("Error matching pattern", "pattern", pattern, "path", path, "error", err)
- return false
- }
- return matched
-}
-
-// matchesPattern checks if a path matches the glob pattern
-func (w *Client) matchesPattern(path string, pattern protocol.GlobPattern) bool {
- patternInfo, err := pattern.AsPattern()
- if err != nil {
- slog.Error("Error parsing pattern", "pattern", pattern, "error", err)
- return false
- }
-
- basePath := patternInfo.GetBasePath()
- patternText := patternInfo.GetPattern()
-
- path = filepath.ToSlash(path)
-
- // For simple patterns without base path
- if basePath == "" {
- // Check if the pattern matches the full path or just the file extension
- fullPathMatch := matchesGlob(patternText, path)
- baseNameMatch := matchesGlob(patternText, filepath.Base(path))
-
- return fullPathMatch || baseNameMatch
- }
-
- if basePath == "" {
- return false
- }
-
- // Make path relative to basePath for matching
- relPath, err := filepath.Rel(basePath, path)
- if err != nil {
- slog.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err, "server", w.name)
- return false
- }
- relPath = filepath.ToSlash(relPath)
-
- isMatch := matchesGlob(patternText, relPath)
-
- return isMatch
-}
-
-// notifyFileEvent sends a didChangeWatchedFiles notification for a file event
-func (w *Client) notifyFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) error {
- cfg := config.Get()
- if cfg.Options.DebugLSP {
- slog.Debug("Notifying file event",
- "uri", uri,
- "changeType", changeType,
- )
- }
-
- params := protocol.DidChangeWatchedFilesParams{
- Changes: []protocol.FileEvent{
- {
- URI: protocol.DocumentURI(uri),
- Type: changeType,
- },
- },
- }
-
- return w.client.DidChangeWatchedFiles(ctx, params)
-}
-
-// shouldPreloadFiles determines if we should preload files for a specific language server
-// Some servers work better with preloaded files, others don't need it
-func shouldPreloadFiles(serverName string) bool {
- // TypeScript/JavaScript servers typically need some files preloaded
- // to properly resolve imports and provide intellisense
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- return true
- case "java", "jdtls":
- // Java servers often need to see source files to build the project model
- return true
- default:
- // For most servers, we'll use lazy loading by default
- return false
- }
-}
-
-// Common patterns for directories and files to exclude
-// TODO: make configurable
-var (
- excludedFileExtensions = map[string]bool{
- ".swp": true,
- ".swo": true,
- ".tmp": true,
- ".temp": true,
- ".bak": true,
- ".log": true,
- ".o": true, // Object files
- ".so": true, // Shared libraries
- ".dylib": true, // macOS shared libraries
- ".dll": true, // Windows shared libraries
- ".a": true, // Static libraries
- ".exe": true, // Windows executables
- ".lock": true, // Lock files
- }
-
- // Large binary files that shouldn't be opened
- largeBinaryExtensions = map[string]bool{
- ".png": true,
- ".jpg": true,
- ".jpeg": true,
- ".gif": true,
- ".bmp": true,
- ".ico": true,
- ".zip": true,
- ".tar": true,
- ".gz": true,
- ".rar": true,
- ".7z": true,
- ".pdf": true,
- ".mp3": true,
- ".mp4": true,
- ".mov": true,
- ".wav": true,
- ".wasm": true,
- }
-
- // Maximum file size to open (5MB)
- maxFileSize int64 = 5 * 1024 * 1024
-)
-
-// shouldExcludeFile returns true if the file should be excluded from opening
-func shouldExcludeFile(filePath string) bool {
- fileName := filepath.Base(filePath)
- cfg := config.Get()
-
- // Skip dot files
- if strings.HasPrefix(fileName, ".") {
- return true
- }
-
- // Check file extension
- ext := strings.ToLower(filepath.Ext(filePath))
- if excludedFileExtensions[ext] || largeBinaryExtensions[ext] {
- return true
- }
-
- info, err := os.Stat(filePath)
- if err != nil {
- // If we can't stat the file, skip it
- return true
- }
-
- // Skip large files
- if info.Size() > maxFileSize {
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping large file",
- "path", filePath,
- "size", info.Size(),
- "maxSize", maxFileSize,
- "debug", cfg.Options.Debug,
- "sizeMB", float64(info.Size())/(1024*1024),
- "maxSizeMB", float64(maxFileSize)/(1024*1024),
- )
- }
- return true
- }
-
- return false
-}
-
-// openMatchingFile opens a file if it matches any of the registered patterns
-func (w *Client) openMatchingFile(ctx context.Context, path string) {
- cfg := config.Get()
- // Skip directories
- info, err := os.Stat(path)
- if err != nil || info.IsDir() {
- return
- }
-
- // Skip excluded files
- if shouldExcludeFile(path) {
- return
- }
-
- // Check if this path should be watched according to server registrations
- if watched, _ := w.isPathWatched(path); !watched {
- return
- }
-
- serverName := w.name
-
- // Get server name for specialized handling
- // Check if the file is a high-priority file that should be opened immediately
- // This helps with project initialization for certain language servers
- if isHighPriorityFile(path, serverName) {
- if cfg.Options.DebugLSP {
- slog.Debug("Opening high-priority file", "path", path, "serverName", serverName)
- }
- if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- slog.Error("Error opening high-priority file", "path", path, "error", err)
- }
- return
- }
-
- // For non-high-priority files, we'll use different strategies based on server type
- if !shouldPreloadFiles(serverName) {
- return
- }
- // For servers that benefit from preloading, open files but with limits
-
- // Check file size - for preloading we're more conservative
- if info.Size() > (1 * 1024 * 1024) { // 1MB limit for preloaded files
- if cfg.Options.DebugLSP {
- slog.Debug("Skipping large file for preloading", "path", path, "size", info.Size())
- }
- return
- }
-
- // File type is already validated by HandlesFile() and isPathWatched() checks earlier,
- // so we know this client handles this file type. Just open it.
- if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- slog.Error("Error opening file", "path", path, "error", err)
- }
-}
-
-// isHighPriorityFile determines if a file should be opened immediately
-// regardless of the preloading strategy
-func isHighPriorityFile(path string, serverName string) bool {
- fileName := filepath.Base(path)
- ext := filepath.Ext(path)
-
- switch serverName {
- case "typescript", "typescript-language-server", "tsserver", "vtsls":
- // For TypeScript, we want to open configuration files immediately
- return fileName == "tsconfig.json" ||
- fileName == "package.json" ||
- fileName == "jsconfig.json" ||
- // Also open main entry points
- fileName == "index.ts" ||
- fileName == "index.js" ||
- fileName == "main.ts" ||
- fileName == "main.js"
- case "gopls":
- // For Go, we want to open go.mod files immediately
- return fileName == "go.mod" ||
- fileName == "go.sum" ||
- // Also open main.go files
- fileName == "main.go"
- case "rust-analyzer":
- // For Rust, we want to open Cargo.toml files immediately
- return fileName == "Cargo.toml" ||
- fileName == "Cargo.lock" ||
- // Also open lib.rs and main.rs
- fileName == "lib.rs" ||
- fileName == "main.rs"
- case "python", "pyright", "pylsp":
- // For Python, open key project files
- return fileName == "pyproject.toml" ||
- fileName == "setup.py" ||
- fileName == "requirements.txt" ||
- fileName == "__init__.py" ||
- fileName == "__main__.py"
- case "clangd":
- // For C/C++, open key project files
- return fileName == "CMakeLists.txt" ||
- fileName == "Makefile" ||
- fileName == "compile_commands.json"
- case "java", "jdtls":
- // For Java, open key project files
- return fileName == "pom.xml" ||
- fileName == "build.gradle" ||
- ext == ".java" // Java servers often need to see source files
- }
-
- // For unknown servers, prioritize common configuration files
- return fileName == "package.json" ||
- fileName == "Makefile" ||
- fileName == "CMakeLists.txt" ||
- fileName == ".editorconfig"
-}
diff --git a/internal/session/session.go b/internal/session/session.go
index d988dac3414fa7dd00d13b375e1309f8d6c515dd..f83f66ffa4d1cfb75c6a0d41f09caebcb1c64cf3 100644
--- a/internal/session/session.go
+++ b/internal/session/session.go
@@ -5,6 +5,7 @@ import (
"database/sql"
"github.com/charmbracelet/crush/internal/db"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/google/uuid"
)
@@ -48,6 +49,7 @@ func (s *service) Create(ctx context.Context, title string) (Session, error) {
}
session := s.fromDBItem(dbSession)
s.Publish(pubsub.CreatedEvent, session)
+ event.SessionCreated()
return session, nil
}
@@ -89,6 +91,7 @@ func (s *service) Delete(ctx context.Context, id string) error {
return err
}
s.Publish(pubsub.DeletedEvent, session)
+ event.SessionDeleted()
return nil
}
diff --git a/internal/shell/shell.go b/internal/shell/shell.go
index ef3abf8d30d37490e452478abe38ef39efd8a7fa..5a10be9537714162e4d5ed25360b42690395793f 100644
--- a/internal/shell/shell.go
+++ b/internal/shell/shell.go
@@ -207,7 +207,7 @@ func splitArgsFlags(parts []string) (args []string, flags []string) {
args = append(args, part)
}
}
- return
+ return args, flags
}
func (s *Shell) blockHandler() func(next interp.ExecHandlerFunc) interp.ExecHandlerFunc {
diff --git a/internal/tui/components/anim/example/main.go b/internal/tui/components/anim/example/main.go
deleted file mode 100644
index 0bf47654ecbeeb3293c8ad59b40ec35016607b1c..0000000000000000000000000000000000000000
--- a/internal/tui/components/anim/example/main.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package main
-
-import (
- "fmt"
- "image/color"
- "os"
-
- tea "github.com/charmbracelet/bubbletea/v2"
- anim "github.com/charmbracelet/crush/internal/tui/components/anim"
- "github.com/charmbracelet/crush/internal/tui/styles"
- "github.com/charmbracelet/lipgloss/v2"
-)
-
-type model struct {
- anim tea.Model
- bgColor color.Color
- quitting bool
- w, h int
-}
-
-func (m model) Init() tea.Cmd {
- return m.anim.Init()
-}
-
-func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
- switch msg := msg.(type) {
- case tea.WindowSizeMsg:
- m.w, m.h = msg.Width, msg.Height
- return m, nil
- case tea.KeyMsg:
- switch msg.String() {
- case "q", "ctrl+c":
- m.quitting = true
- return m, tea.Quit
- default:
- return m, nil
- }
- case anim.StepMsg:
- var cmd tea.Cmd
- m.anim, cmd = m.anim.Update(msg)
- return m, cmd
- default:
- return m, nil
- }
-}
-
-func (m model) View() tea.View {
- if m.w == 0 || m.h == 0 {
- return tea.NewView("")
- }
-
- v := tea.NewView("")
- v.BackgroundColor = m.bgColor
-
- if m.quitting {
- return v
- }
-
- if a, ok := m.anim.(*anim.Anim); ok {
- l := lipgloss.NewLayer(a.View()).
- Width(a.Width()).
- X(m.w/2 - a.Width()/2).
- Y(m.h / 2)
-
- v = tea.NewView(lipgloss.NewCanvas(l))
- v.BackgroundColor = m.bgColor
- return v
- }
- return v
-}
-
-func main() {
- t := styles.CurrentTheme()
- p := tea.NewProgram(model{
- bgColor: t.BgBase,
- anim: anim.New(anim.Settings{
- Label: "Hello",
- Size: 50,
- LabelColor: t.FgBase,
- GradColorA: t.Primary,
- GradColorB: t.Secondary,
- CycleColors: true,
- }),
- }, tea.WithAltScreen())
-
- if _, err := p.Run(); err != nil {
- fmt.Fprintf(os.Stderr, "Uh oh: %v\n", err)
- os.Exit(1)
- }
-}
diff --git a/internal/tui/components/chat/editor/editor.go b/internal/tui/components/chat/editor/editor.go
index 4fbae325def8b0aca6de94622e4d64b4edf1983e..97254f20b51864b1495323b7b7250aee2867fc46 100644
--- a/internal/tui/components/chat/editor/editor.go
+++ b/internal/tui/components/chat/editor/editor.go
@@ -80,7 +80,7 @@ var DeleteKeyMaps = DeleteAttachmentKeyMaps{
key.WithHelp("ctrl+r+{i}", "delete attachment at index i"),
),
Escape: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
DeleteAllAttachments: key.NewBinding(
@@ -548,7 +548,9 @@ func (m *editorCmp) SetPosition(x, y int) tea.Cmd {
}
func (m *editorCmp) startCompletions() tea.Msg {
- files, _, _ := m.listDirResolver()(".", nil, 0)
+ ls := m.app.Config().Options.TUI.Completions
+ depth, limit := ls.Limits()
+ files, _, _ := m.listDirResolver()(".", nil, depth, limit)
slices.Sort(files)
completionItems := make([]completions.Completion, 0, len(files))
for _, file := range files {
diff --git a/internal/tui/components/chat/editor/keys.go b/internal/tui/components/chat/editor/keys.go
index 9d2274753b4667031bb43a76f54fce18c1decf51..8bc8b2354dfb72120d9e6173256635e903d012fd 100644
--- a/internal/tui/components/chat/editor/keys.go
+++ b/internal/tui/components/chat/editor/keys.go
@@ -61,7 +61,7 @@ var AttachmentsKeyMaps = DeleteAttachmentKeyMaps{
key.WithHelp("ctrl+r+{i}", "delete attachment at index i"),
),
Escape: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
DeleteAllAttachments: key.NewBinding(
diff --git a/internal/tui/components/chat/header/header.go b/internal/tui/components/chat/header/header.go
index edcdc6960123056fc61df7a4332b106d1f417ab0..21861a4a2eda1340f6e01c0748f24cb713f15398 100644
--- a/internal/tui/components/chat/header/header.go
+++ b/internal/tui/components/chat/header/header.go
@@ -6,15 +6,16 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
"github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/x/ansi"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
type Header interface {
@@ -28,11 +29,11 @@ type Header interface {
type header struct {
width int
session session.Session
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
detailsOpen bool
}
-func New(lspClients map[string]*lsp.Client) Header {
+func New(lspClients *csync.Map[string, *lsp.Client]) Header {
return &header{
lspClients: lspClients,
width: 0,
@@ -104,7 +105,7 @@ func (h *header) details(availWidth int) string {
var parts []string
errorCount := 0
- for _, l := range h.lspClients {
+ for l := range h.lspClients.Seq() {
for _, diagnostics := range l.GetDiagnostics() {
for _, diagnostic := range diagnostics {
if diagnostic.Severity == protocol.SeverityError {
diff --git a/internal/tui/components/chat/messages/messages.go b/internal/tui/components/chat/messages/messages.go
index ec55800aab85a2dbb07153c12300dbad892b3b6a..296b02478a7d0738fef2f60ae6b2211d44424a2f 100644
--- a/internal/tui/components/chat/messages/messages.go
+++ b/internal/tui/components/chat/messages/messages.go
@@ -29,7 +29,7 @@ import (
var CopyKey = key.NewBinding(key.WithKeys("c", "y", "C", "Y"), key.WithHelp("c/y", "copy"))
// ClearSelectionKey is the key binding for clearing the current selection in the chat interface.
-var ClearSelectionKey = key.NewBinding(key.WithKeys("esc"), key.WithHelp("esc", "clear selection"))
+var ClearSelectionKey = key.NewBinding(key.WithKeys("esc", "alt+esc"), key.WithHelp("esc", "clear selection"))
// MessageCmp defines the interface for message components in the chat interface.
// It combines standard UI model interfaces with message-specific functionality.
@@ -281,15 +281,14 @@ func (m *messageCmp) renderThinkingContent() string {
if reasoningContent.StartedAt > 0 {
duration := m.message.ThinkingDuration()
if reasoningContent.FinishedAt > 0 {
- if duration.String() == "0s" {
- return ""
- }
m.anim.SetLabel("")
opts := core.StatusOpts{
Title: "Thought for",
Description: duration.String(),
}
- return t.S().Base.PaddingLeft(1).Render(core.Status(opts, m.textWidth()-1))
+ if duration.String() != "0s" {
+ footer = t.S().Base.PaddingLeft(1).Render(core.Status(opts, m.textWidth()-1))
+ }
} else if finishReason != nil && finishReason.Reason == message.FinishReasonCanceled {
footer = t.S().Base.PaddingLeft(1).Render(m.toMarkdown("*Canceled*"))
} else {
diff --git a/internal/tui/components/chat/sidebar/sidebar.go b/internal/tui/components/chat/sidebar/sidebar.go
index 236c5d2e31c6e7f81482757ff750f572e23cc3fb..b50a78c7f8697e4f4db19649a01794cfe7a23bac 100644
--- a/internal/tui/components/chat/sidebar/sidebar.go
+++ b/internal/tui/components/chat/sidebar/sidebar.go
@@ -69,13 +69,13 @@ type sidebarCmp struct {
session session.Session
logo string
cwd string
- lspClients map[string]*lsp.Client
+ lspClients *csync.Map[string, *lsp.Client]
compactMode bool
history history.Service
files *csync.Map[string, SessionFile]
}
-func New(history history.Service, lspClients map[string]*lsp.Client, compact bool) Sidebar {
+func New(history history.Service, lspClients *csync.Map[string, *lsp.Client], compact bool) Sidebar {
return &sidebarCmp{
lspClients: lspClients,
history: history,
diff --git a/internal/tui/components/chat/splash/keys.go b/internal/tui/components/chat/splash/keys.go
index 675c608a94af4aa72b701376f3983506166ac7d7..d36c8d8e7ee2231ef8bc27eb053a5745a0bd3885 100644
--- a/internal/tui/components/chat/splash/keys.go
+++ b/internal/tui/components/chat/splash/keys.go
@@ -46,7 +46,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("←/→", "switch"),
),
Back: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "back"),
),
}
diff --git a/internal/tui/components/chat/splash/splash.go b/internal/tui/components/chat/splash/splash.go
index 7fa46cdd279a2cbe98a86654a23e81a49bc8aebf..187fc35e6ec47a858b99f35e135a8cef3500fbf1 100644
--- a/internal/tui/components/chat/splash/splash.go
+++ b/internal/tui/components/chat/splash/splash.go
@@ -253,6 +253,7 @@ func (s *splashCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return s, cmd
}
if s.needsProjectInit {
+ s.selectedNo = false
return s, s.initializeProject()
}
case key.Matches(msg, s.keyMap.No):
@@ -397,7 +398,8 @@ func (s *splashCmp) setPreferredModel(selectedItem models.ModelOption) tea.Cmd {
}
func (s *splashCmp) getProvider(providerID catwalk.InferenceProvider) (*catwalk.Provider, error) {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
if err != nil {
return nil, err
}
diff --git a/internal/tui/components/completions/keys.go b/internal/tui/components/completions/keys.go
index 82372358028aec2b1384f1b4b6bff90be4a05eb8..dec1059f8cde34b7a65faad279ebe551a2108a3a 100644
--- a/internal/tui/components/completions/keys.go
+++ b/internal/tui/components/completions/keys.go
@@ -28,7 +28,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("enter", "select"),
),
Cancel: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
DownInsert: key.NewBinding(
diff --git a/internal/tui/components/core/core.go b/internal/tui/components/core/core.go
index 18de56b17f08e4513bde34fe9fef7aaf4e08c09f..80c28ba1e11c4ddeb7e6da1f4802577d23e8b4dc 100644
--- a/internal/tui/components/core/core.go
+++ b/internal/tui/components/core/core.go
@@ -110,14 +110,17 @@ func Status(opts StatusOpts, width int) string {
extraContentWidth += 1
}
description = ansi.Truncate(description, width-lipgloss.Width(icon)-lipgloss.Width(title)-2-extraContentWidth, "…")
+ description = t.S().Base.Foreground(descriptionColor).Render(description)
}
- description = t.S().Base.Foreground(descriptionColor).Render(description)
content := []string{}
if icon != "" {
content = append(content, icon)
}
- content = append(content, title, description)
+ content = append(content, title)
+ if description != "" {
+ content = append(content, description)
+ }
if opts.ExtraContent != "" {
content = append(content, opts.ExtraContent)
}
diff --git a/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden b/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden
index 5b396377658610dd0fbc0746fd960f2faaf76f49..db4acad54383ecbc2cc50061ee5ba77491dc545d 100644
--- a/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden
+++ b/internal/tui/components/core/testdata/TestStatus/EmptyDescription.golden
@@ -1 +1 @@
-● [38;2;133;131;146mTitle Only[m [38;2;96;95;107m[m
\ No newline at end of file
+● [38;2;133;131;146mTitle Only[m
\ No newline at end of file
diff --git a/internal/tui/components/dialogs/commands/commands.go b/internal/tui/components/dialogs/commands/commands.go
index 756e687c693da971e9ddd8bb72f08b9fc23eedae..664158fc392a87d8a7725bfa964748f7ef4f8e67 100644
--- a/internal/tui/components/dialogs/commands/commands.go
+++ b/internal/tui/components/dialogs/commands/commands.go
@@ -60,17 +60,18 @@ type commandDialogCmp struct {
}
type (
- SwitchSessionsMsg struct{}
- NewSessionsMsg struct{}
- SwitchModelMsg struct{}
- QuitMsg struct{}
- OpenFilePickerMsg struct{}
- ToggleHelpMsg struct{}
- ToggleCompactModeMsg struct{}
- ToggleThinkingMsg struct{}
- OpenExternalEditorMsg struct{}
- ToggleYoloModeMsg struct{}
- CompactMsg struct {
+ SwitchSessionsMsg struct{}
+ NewSessionsMsg struct{}
+ SwitchModelMsg struct{}
+ QuitMsg struct{}
+ OpenFilePickerMsg struct{}
+ ToggleHelpMsg struct{}
+ ToggleCompactModeMsg struct{}
+ ToggleThinkingMsg struct{}
+ OpenReasoningDialogMsg struct{}
+ OpenExternalEditorMsg struct{}
+ ToggleYoloModeMsg struct{}
+ CompactMsg struct {
SessionID string
}
)
@@ -300,26 +301,41 @@ func (c *commandDialogCmp) defaultCommands() []Command {
})
}
- // Only show thinking toggle for Anthropic models that can reason
+ // Add reasoning toggle for models that support it
cfg := config.Get()
if agentCfg, ok := cfg.Agents["coder"]; ok {
providerCfg := cfg.GetProviderForModel(agentCfg.Model)
model := cfg.GetModelByType(agentCfg.Model)
- if providerCfg != nil && model != nil &&
- providerCfg.Type == catwalk.TypeAnthropic && model.CanReason {
+ if providerCfg != nil && model != nil && model.CanReason {
selectedModel := cfg.Models[agentCfg.Model]
- status := "Enable"
- if selectedModel.Think {
- status = "Disable"
+
+ // Anthropic models: thinking toggle
+ if providerCfg.Type == catwalk.TypeAnthropic {
+ status := "Enable"
+ if selectedModel.Think {
+ status = "Disable"
+ }
+ commands = append(commands, Command{
+ ID: "toggle_thinking",
+ Title: status + " Thinking Mode",
+ Description: "Toggle model thinking for reasoning-capable models",
+ Handler: func(cmd Command) tea.Cmd {
+ return util.CmdHandler(ToggleThinkingMsg{})
+ },
+ })
+ }
+
+ // OpenAI models: reasoning effort dialog
+ if providerCfg.Type == catwalk.TypeOpenAI && model.HasReasoningEffort {
+ commands = append(commands, Command{
+ ID: "select_reasoning_effort",
+ Title: "Select Reasoning Effort",
+ Description: "Choose reasoning effort level (low/medium/high)",
+ Handler: func(cmd Command) tea.Cmd {
+ return util.CmdHandler(OpenReasoningDialogMsg{})
+ },
+ })
}
- commands = append(commands, Command{
- ID: "toggle_thinking",
- Title: status + " Thinking Mode",
- Description: "Toggle model thinking for reasoning-capable models",
- Handler: func(cmd Command) tea.Cmd {
- return util.CmdHandler(ToggleThinkingMsg{})
- },
- })
}
}
// Only show toggle compact mode command if window width is larger than compact breakpoint (90)
diff --git a/internal/tui/components/dialogs/commands/keys.go b/internal/tui/components/dialogs/commands/keys.go
index 9685216817c02cdfaab682f94e0f89aa64af365f..7b79a29c28a024154a3b4d8c763969585409fd00 100644
--- a/internal/tui/components/dialogs/commands/keys.go
+++ b/internal/tui/components/dialogs/commands/keys.go
@@ -31,7 +31,7 @@ func DefaultCommandsDialogKeyMap() CommandsDialogKeyMap {
key.WithHelp("tab", "switch selection"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/components/dialogs/compact/compact.go b/internal/tui/components/dialogs/compact/compact.go
index 86455e3139b4d0eb43baaf509b0fa0e039dd4939..ecde402fd8dfe1f31791834cd4e4bae13ec45e00 100644
--- a/internal/tui/components/dialogs/compact/compact.go
+++ b/internal/tui/components/dialogs/compact/compact.go
@@ -104,17 +104,24 @@ func (c *compactDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
case agent.AgentEvent:
- if msg.Type == agent.AgentEventTypeSummarize {
+ switch msg.Type {
+ case agent.AgentEventTypeSummarize:
if msg.Error != nil {
c.state = stateError
c.progress = "Error: " + msg.Error.Error()
} else if msg.Done {
- return c, util.CmdHandler(
- dialogs.CloseDialogMsg{},
- )
+ return c, util.CmdHandler(dialogs.CloseDialogMsg{})
} else {
c.progress = msg.Progress
}
+ case agent.AgentEventTypeError:
+ // Handle errors that occur during summarization but are sent as separate error events.
+ c.state = stateError
+ if msg.Error != nil {
+ c.progress = "Error: " + msg.Error.Error()
+ } else {
+ c.progress = "An unknown error occurred"
+ }
}
return c, nil
}
diff --git a/internal/tui/components/dialogs/compact/keys.go b/internal/tui/components/dialogs/compact/keys.go
index c3dd98e13035085b7d46e7a2e94450b25a7f0d59..cec1486491e342c28f148a50d37f1129944c002e 100644
--- a/internal/tui/components/dialogs/compact/keys.go
+++ b/internal/tui/components/dialogs/compact/keys.go
@@ -33,7 +33,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("n", "no"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/components/dialogs/filepicker/keys.go b/internal/tui/components/dialogs/filepicker/keys.go
index 9f3b706e3cf677b66cbc3136a7b98a466470d949..72e32f2ab9dd07d8b7165aee74744e8be5fd78e8 100644
--- a/internal/tui/components/dialogs/filepicker/keys.go
+++ b/internal/tui/components/dialogs/filepicker/keys.go
@@ -38,7 +38,7 @@ func DefaultKeyMap() KeyMap {
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "close/exit"),
),
}
diff --git a/internal/tui/components/dialogs/keys.go b/internal/tui/components/dialogs/keys.go
index c382b7e09e15de04efb5b2520bc490ef9d57b985..264ce3d42f6a99f441f961128f109e6baebf4c1b 100644
--- a/internal/tui/components/dialogs/keys.go
+++ b/internal/tui/components/dialogs/keys.go
@@ -12,7 +12,7 @@ type KeyMap struct {
func DefaultKeyMap() KeyMap {
return KeyMap{
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
),
}
}
diff --git a/internal/tui/components/dialogs/models/keys.go b/internal/tui/components/dialogs/models/keys.go
index df546863d87d3a68777e51938f58eee28a5c6473..4ec1a487e865981edc0be5852bb6c044ddf04c1f 100644
--- a/internal/tui/components/dialogs/models/keys.go
+++ b/internal/tui/components/dialogs/models/keys.go
@@ -19,7 +19,7 @@ func DefaultKeyMap() KeyMap {
return KeyMap{
Select: key.NewBinding(
key.WithKeys("enter", "ctrl+y"),
- key.WithHelp("enter", "confirm"),
+ key.WithHelp("enter", "choose"),
),
Next: key.NewBinding(
key.WithKeys("down", "ctrl+n"),
@@ -34,8 +34,8 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("tab", "toggle type"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
- key.WithHelp("esc", "cancel"),
+ key.WithKeys("esc", "alt+esc"),
+ key.WithHelp("esc", "exit"),
),
}
}
diff --git a/internal/tui/components/dialogs/models/list.go b/internal/tui/components/dialogs/models/list.go
index 66b55d85b299cb0bacb4cc2466c7b4146248ba05..77398c4d17d85126ab155a9e9c5b2085c0691672 100644
--- a/internal/tui/components/dialogs/models/list.go
+++ b/internal/tui/components/dialogs/models/list.go
@@ -49,7 +49,8 @@ func NewModelListComponent(keyMap list.KeyMap, inputPlaceholder string, shouldRe
func (m *ModelListComponent) Init() tea.Cmd {
var cmds []tea.Cmd
if len(m.providers) == 0 {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
filteredProviders := []catwalk.Provider{}
for _, p := range providers {
hasAPIKeyEnv := strings.HasPrefix(p.APIKey, "$")
@@ -119,7 +120,7 @@ func (m *ModelListComponent) SetModelType(modelType int) tea.Cmd {
// First, add any configured providers that are not in the known providers list
// These should appear at the top of the list
- knownProviders, err := config.Providers()
+ knownProviders, err := config.Providers(cfg)
if err != nil {
return util.ReportError(err)
}
diff --git a/internal/tui/components/dialogs/models/models.go b/internal/tui/components/dialogs/models/models.go
index 3d9443332dad2a0e23f4aedcc9ddc45249914c64..7c2863706c29180cffcfb88c385a012e39df464c 100644
--- a/internal/tui/components/dialogs/models/models.go
+++ b/internal/tui/components/dialogs/models/models.go
@@ -170,8 +170,10 @@ func (m *modelDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(ModelSelectedMsg{
Model: config.SelectedModel{
- Model: selectedItem.Model.ID,
- Provider: string(selectedItem.Provider.ID),
+ Model: selectedItem.Model.ID,
+ Provider: string(selectedItem.Provider.ID),
+ ReasoningEffort: selectedItem.Model.DefaultReasoningEffort,
+ MaxTokens: selectedItem.Model.DefaultMaxTokens,
},
ModelType: modelType,
}),
@@ -350,7 +352,8 @@ func (m *modelDialogCmp) isProviderConfigured(providerID string) bool {
}
func (m *modelDialogCmp) getProvider(providerID catwalk.InferenceProvider) (*catwalk.Provider, error) {
- providers, err := config.Providers()
+ cfg := config.Get()
+ providers, err := config.Providers(cfg)
if err != nil {
return nil, err
}
@@ -379,8 +382,10 @@ func (m *modelDialogCmp) saveAPIKeyAndContinue(apiKey string) tea.Cmd {
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(ModelSelectedMsg{
Model: config.SelectedModel{
- Model: selectedModel.Model.ID,
- Provider: string(selectedModel.Provider.ID),
+ Model: selectedModel.Model.ID,
+ Provider: string(selectedModel.Provider.ID),
+ ReasoningEffort: selectedModel.Model.DefaultReasoningEffort,
+ MaxTokens: selectedModel.Model.DefaultMaxTokens,
},
ModelType: m.selectedModelType,
}),
diff --git a/internal/tui/components/dialogs/permissions/permissions.go b/internal/tui/components/dialogs/permissions/permissions.go
index 2633c0a2f1a50f78adf010214680c157f302073b..9e0a6b05d7385c354f8faba3110b1c0951f9a97d 100644
--- a/internal/tui/components/dialogs/permissions/permissions.go
+++ b/internal/tui/components/dialogs/permissions/permissions.go
@@ -1,6 +1,7 @@
package permissions
import (
+ "encoding/json"
"fmt"
"strings"
@@ -614,6 +615,35 @@ func (p *permissionDialogCmp) generateDefaultContent() string {
content := p.permission.Description
+ // Add pretty-printed JSON parameters for MCP tools
+ if p.permission.Params != nil {
+ var paramStr string
+
+ // Ensure params is a string
+ if str, ok := p.permission.Params.(string); ok {
+ paramStr = str
+ } else {
+ paramStr = fmt.Sprintf("%v", p.permission.Params)
+ }
+
+ // Try to parse as JSON for pretty printing
+ var parsed any
+ if err := json.Unmarshal([]byte(paramStr), &parsed); err == nil {
+ if b, err := json.MarshalIndent(parsed, "", " "); err == nil {
+ if content != "" {
+ content += "\n\n"
+ }
+ content += string(b)
+ }
+ } else {
+ // Not JSON, show as-is
+ if content != "" {
+ content += "\n\n"
+ }
+ content += paramStr
+ }
+ }
+
content = strings.TrimSpace(content)
content = "\n" + content + "\n"
lines := strings.Split(content, "\n")
diff --git a/internal/tui/components/dialogs/quit/keys.go b/internal/tui/components/dialogs/quit/keys.go
index 3268749b20c703ae1faf7640e253ce557f051c65..2e8dbc199264eb9221544319f81ef859d71e58b5 100644
--- a/internal/tui/components/dialogs/quit/keys.go
+++ b/internal/tui/components/dialogs/quit/keys.go
@@ -37,7 +37,7 @@ func DefaultKeymap() KeyMap {
key.WithHelp("tab", "switch options"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
}
diff --git a/internal/tui/components/dialogs/reasoning/reasoning.go b/internal/tui/components/dialogs/reasoning/reasoning.go
new file mode 100644
index 0000000000000000000000000000000000000000..ba49abd8c58a0e7eb84235e7b68f5f5193a96b1b
--- /dev/null
+++ b/internal/tui/components/dialogs/reasoning/reasoning.go
@@ -0,0 +1,268 @@
+package reasoning
+
+import (
+ "github.com/charmbracelet/bubbles/v2/help"
+ "github.com/charmbracelet/bubbles/v2/key"
+ tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/lipgloss/v2"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/tui/components/core"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs"
+ "github.com/charmbracelet/crush/internal/tui/exp/list"
+ "github.com/charmbracelet/crush/internal/tui/styles"
+ "github.com/charmbracelet/crush/internal/tui/util"
+)
+
+const (
+ ReasoningDialogID dialogs.DialogID = "reasoning"
+
+ defaultWidth int = 50
+)
+
+type listModel = list.FilterableList[list.CompletionItem[EffortOption]]
+
+type EffortOption struct {
+ Title string
+ Effort string
+}
+
+type ReasoningDialog interface {
+ dialogs.DialogModel
+}
+
+type reasoningDialogCmp struct {
+ width int
+ wWidth int // Width of the terminal window
+ wHeight int // Height of the terminal window
+
+ effortList listModel
+ keyMap ReasoningDialogKeyMap
+ help help.Model
+}
+
+type ReasoningEffortSelectedMsg struct {
+ Effort string
+}
+
+type ReasoningDialogKeyMap struct {
+ Next key.Binding
+ Previous key.Binding
+ Select key.Binding
+ Close key.Binding
+}
+
+func DefaultReasoningDialogKeyMap() ReasoningDialogKeyMap {
+ return ReasoningDialogKeyMap{
+ Next: key.NewBinding(
+ key.WithKeys("down", "j", "ctrl+n"),
+ key.WithHelp("↓/j/ctrl+n", "next"),
+ ),
+ Previous: key.NewBinding(
+ key.WithKeys("up", "k", "ctrl+p"),
+ key.WithHelp("↑/k/ctrl+p", "previous"),
+ ),
+ Select: key.NewBinding(
+ key.WithKeys("enter"),
+ key.WithHelp("enter", "select"),
+ ),
+ Close: key.NewBinding(
+ key.WithKeys("esc", "ctrl+c"),
+ key.WithHelp("esc/ctrl+c", "close"),
+ ),
+ }
+}
+
+func (k ReasoningDialogKeyMap) ShortHelp() []key.Binding {
+ return []key.Binding{k.Select, k.Close}
+}
+
+func (k ReasoningDialogKeyMap) FullHelp() [][]key.Binding {
+ return [][]key.Binding{
+ {k.Next, k.Previous},
+ {k.Select, k.Close},
+ }
+}
+
+func NewReasoningDialog() ReasoningDialog {
+ keyMap := DefaultReasoningDialogKeyMap()
+ listKeyMap := list.DefaultKeyMap()
+ listKeyMap.Down.SetEnabled(false)
+ listKeyMap.Up.SetEnabled(false)
+ listKeyMap.DownOneItem = keyMap.Next
+ listKeyMap.UpOneItem = keyMap.Previous
+
+ t := styles.CurrentTheme()
+ inputStyle := t.S().Base.PaddingLeft(1).PaddingBottom(1)
+ effortList := list.NewFilterableList(
+ []list.CompletionItem[EffortOption]{},
+ list.WithFilterInputStyle(inputStyle),
+ list.WithFilterListOptions(
+ list.WithKeyMap(listKeyMap),
+ list.WithWrapNavigation(),
+ list.WithResizeByList(),
+ ),
+ )
+ help := help.New()
+ help.Styles = t.S().Help
+
+ return &reasoningDialogCmp{
+ effortList: effortList,
+ width: defaultWidth,
+ keyMap: keyMap,
+ help: help,
+ }
+}
+
+func (r *reasoningDialogCmp) Init() tea.Cmd {
+ return r.populateEffortOptions()
+}
+
+func (r *reasoningDialogCmp) populateEffortOptions() tea.Cmd {
+ cfg := config.Get()
+ if agentCfg, ok := cfg.Agents["coder"]; ok {
+ selectedModel := cfg.Models[agentCfg.Model]
+ model := cfg.GetModelByType(agentCfg.Model)
+
+ // Get current reasoning effort
+ currentEffort := selectedModel.ReasoningEffort
+ if currentEffort == "" && model != nil {
+ currentEffort = model.DefaultReasoningEffort
+ }
+
+ efforts := []EffortOption{
+ {
+ Title: "Low",
+ Effort: "low",
+ },
+ {
+ Title: "Medium",
+ Effort: "medium",
+ },
+ {
+ Title: "High",
+ Effort: "high",
+ },
+ }
+
+ effortItems := []list.CompletionItem[EffortOption]{}
+ selectedID := ""
+ for _, effort := range efforts {
+ opts := []list.CompletionItemOption{
+ list.WithCompletionID(effort.Effort),
+ }
+ if effort.Effort == currentEffort {
+ opts = append(opts, list.WithCompletionShortcut("current"))
+ selectedID = effort.Effort
+ }
+ effortItems = append(effortItems, list.NewCompletionItem(
+ effort.Title,
+ effort,
+ opts...,
+ ))
+ }
+
+ cmd := r.effortList.SetItems(effortItems)
+ // Set the current effort as the selected item
+ if currentEffort != "" && selectedID != "" {
+ return tea.Sequence(cmd, r.effortList.SetSelected(selectedID))
+ }
+ return cmd
+ }
+ return nil
+}
+
+func (r *reasoningDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
+ switch msg := msg.(type) {
+ case tea.WindowSizeMsg:
+ r.wWidth = msg.Width
+ r.wHeight = msg.Height
+ return r, r.effortList.SetSize(r.listWidth(), r.listHeight())
+ case tea.KeyPressMsg:
+ switch {
+ case key.Matches(msg, r.keyMap.Select):
+ selectedItem := r.effortList.SelectedItem()
+ if selectedItem == nil {
+ return r, nil // No item selected, do nothing
+ }
+ effort := (*selectedItem).Value()
+ return r, tea.Sequence(
+ util.CmdHandler(dialogs.CloseDialogMsg{}),
+ func() tea.Msg {
+ return ReasoningEffortSelectedMsg{
+ Effort: effort.Effort,
+ }
+ },
+ )
+ case key.Matches(msg, r.keyMap.Close):
+ return r, util.CmdHandler(dialogs.CloseDialogMsg{})
+ default:
+ u, cmd := r.effortList.Update(msg)
+ r.effortList = u.(listModel)
+ return r, cmd
+ }
+ }
+ return r, nil
+}
+
+func (r *reasoningDialogCmp) View() string {
+ t := styles.CurrentTheme()
+ listView := r.effortList
+
+ header := t.S().Base.Padding(0, 1, 1, 1).Render(core.Title("Select Reasoning Effort", r.width-4))
+ content := lipgloss.JoinVertical(
+ lipgloss.Left,
+ header,
+ listView.View(),
+ "",
+ t.S().Base.Width(r.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(r.help.View(r.keyMap)),
+ )
+ return r.style().Render(content)
+}
+
+func (r *reasoningDialogCmp) Cursor() *tea.Cursor {
+ if cursor, ok := r.effortList.(util.Cursor); ok {
+ cursor := cursor.Cursor()
+ if cursor != nil {
+ cursor = r.moveCursor(cursor)
+ }
+ return cursor
+ }
+ return nil
+}
+
+func (r *reasoningDialogCmp) listWidth() int {
+ return r.width - 2 // 4 for padding
+}
+
+func (r *reasoningDialogCmp) listHeight() int {
+ listHeight := len(r.effortList.Items()) + 2 + 4 // height based on items + 2 for the input + 4 for the sections
+ return min(listHeight, r.wHeight/2)
+}
+
+func (r *reasoningDialogCmp) moveCursor(cursor *tea.Cursor) *tea.Cursor {
+ row, col := r.Position()
+ offset := row + 3
+ cursor.Y += offset
+ cursor.X = cursor.X + col + 2
+ return cursor
+}
+
+func (r *reasoningDialogCmp) style() lipgloss.Style {
+ t := styles.CurrentTheme()
+ return t.S().Base.
+ Width(r.width).
+ Border(lipgloss.RoundedBorder()).
+ BorderForeground(t.BorderFocus)
+}
+
+func (r *reasoningDialogCmp) Position() (int, int) {
+ row := r.wHeight/4 - 2 // just a bit above the center
+ col := r.wWidth / 2
+ col -= r.width / 2
+ return row, col
+}
+
+func (r *reasoningDialogCmp) ID() dialogs.DialogID {
+ return ReasoningDialogID
+}
diff --git a/internal/tui/components/dialogs/sessions/keys.go b/internal/tui/components/dialogs/sessions/keys.go
index a3ca4b31f0c04c491fa7990f7e69ac546f608a7d..73c50899f7ae7da3655fc8a3e3a3dd34c4c22f95 100644
--- a/internal/tui/components/dialogs/sessions/keys.go
+++ b/internal/tui/components/dialogs/sessions/keys.go
@@ -15,7 +15,7 @@ func DefaultKeyMap() KeyMap {
return KeyMap{
Select: key.NewBinding(
key.WithKeys("enter", "tab", "ctrl+y"),
- key.WithHelp("enter", "confirm"),
+ key.WithHelp("enter", "choose"),
),
Next: key.NewBinding(
key.WithKeys("down", "ctrl+n"),
@@ -26,8 +26,8 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("↑", "previous item"),
),
Close: key.NewBinding(
- key.WithKeys("esc"),
- key.WithHelp("esc", "cancel"),
+ key.WithKeys("esc", "alt+esc"),
+ key.WithHelp("esc", "exit"),
),
}
}
diff --git a/internal/tui/components/dialogs/sessions/sessions.go b/internal/tui/components/dialogs/sessions/sessions.go
index 4e5cbdef7fdb42f4c667de7ac5bdd5066e7be4df..037eb5ebb727a24b8ab9bfda2e2c72943120e819 100644
--- a/internal/tui/components/dialogs/sessions/sessions.go
+++ b/internal/tui/components/dialogs/sessions/sessions.go
@@ -4,6 +4,7 @@ import (
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/components/chat"
"github.com/charmbracelet/crush/internal/tui/components/core"
@@ -99,6 +100,7 @@ func (s *sessionDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
selectedItem := s.sessionsList.SelectedItem()
if selectedItem != nil {
selected := *selectedItem
+ event.SessionSwitched()
return s, tea.Sequence(
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(
diff --git a/internal/tui/components/lsp/lsp.go b/internal/tui/components/lsp/lsp.go
index f2546c945e436ca196064dda5b50d35583d5b2ab..0c0384e91c36744b8f318f9bbc71e5e076a26abf 100644
--- a/internal/tui/components/lsp/lsp.go
+++ b/internal/tui/components/lsp/lsp.go
@@ -4,14 +4,14 @@ import (
"fmt"
"strings"
- "github.com/charmbracelet/lipgloss/v2"
-
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/csync"
"github.com/charmbracelet/crush/internal/lsp"
- "github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/styles"
+ "github.com/charmbracelet/lipgloss/v2"
+ "github.com/charmbracelet/x/powernap/pkg/lsp/protocol"
)
// RenderOptions contains options for rendering LSP lists.
@@ -23,7 +23,7 @@ type RenderOptions struct {
}
// RenderLSPList renders a list of LSP status items with the given options.
-func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []string {
+func RenderLSPList(lspClients *csync.Map[string, *lsp.Client], opts RenderOptions) []string {
t := styles.CurrentTheme()
lspList := []string{}
@@ -56,29 +56,7 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
break
}
- // Determine icon color and description based on state
- icon := t.ItemOfflineIcon
- description := l.LSP.Command
-
- if l.LSP.Disabled {
- description = t.S().Subtle.Render("disabled")
- } else if state, exists := lspStates[l.Name]; exists {
- switch state.State {
- case lsp.StateStarting:
- icon = t.ItemBusyIcon
- description = t.S().Subtle.Render("starting...")
- case lsp.StateReady:
- icon = t.ItemOnlineIcon
- description = l.LSP.Command
- case lsp.StateError:
- icon = t.ItemErrorIcon
- if state.Error != nil {
- description = t.S().Subtle.Render(fmt.Sprintf("error: %s", state.Error.Error()))
- } else {
- description = t.S().Subtle.Render("error")
- }
- }
- }
+ icon, description := iconAndDescription(l, t, lspStates)
// Calculate diagnostic counts if we have LSP clients
var extraContent string
@@ -89,7 +67,7 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
protocol.SeverityHint: 0,
protocol.SeverityInformation: 0,
}
- if client, ok := lspClients[l.Name]; ok {
+ if client, ok := lspClients.Get(l.Name); ok {
for _, diagnostics := range client.GetDiagnostics() {
for _, diagnostic := range diagnostics {
if severity, ok := lspErrs[diagnostic.Severity]; ok {
@@ -131,8 +109,32 @@ func RenderLSPList(lspClients map[string]*lsp.Client, opts RenderOptions) []stri
return lspList
}
+func iconAndDescription(l config.LSP, t *styles.Theme, states map[string]app.LSPClientInfo) (lipgloss.Style, string) {
+ if l.LSP.Disabled {
+ return t.ItemOfflineIcon.Foreground(t.FgMuted), t.S().Subtle.Render("disabled")
+ }
+
+ info := states[l.Name]
+ switch info.State {
+ case lsp.StateStarting:
+ return t.ItemBusyIcon, t.S().Subtle.Render("starting...")
+ case lsp.StateReady:
+ return t.ItemOnlineIcon, ""
+ case lsp.StateError:
+ description := t.S().Subtle.Render("error")
+ if info.Error != nil {
+ description = t.S().Subtle.Render(fmt.Sprintf("error: %s", info.Error.Error()))
+ }
+ return t.ItemErrorIcon, description
+ case lsp.StateDisabled:
+ return t.ItemOfflineIcon.Foreground(t.FgMuted), t.S().Subtle.Render("inactive")
+ default:
+ return t.ItemOfflineIcon, ""
+ }
+}
+
// RenderLSPBlock renders a complete LSP block with optional truncation indicator.
-func RenderLSPBlock(lspClients map[string]*lsp.Client, opts RenderOptions, showTruncationIndicator bool) string {
+func RenderLSPBlock(lspClients *csync.Map[string, *lsp.Client], opts RenderOptions, showTruncationIndicator bool) string {
t := styles.CurrentTheme()
lspList := RenderLSPList(lspClients, opts)
diff --git a/internal/tui/components/mcp/mcp.go b/internal/tui/components/mcp/mcp.go
index d11826b77749ba65276b5336a5d88cdbc8552881..fd3bd012732397538cc263b2eff92ae617e866d8 100644
--- a/internal/tui/components/mcp/mcp.go
+++ b/internal/tui/components/mcp/mcp.go
@@ -55,7 +55,7 @@ func RenderMCPList(opts RenderOptions) []string {
// Determine icon and color based on state
icon := t.ItemOfflineIcon
- description := l.MCP.Command
+ description := ""
extraContent := ""
if state, exists := mcpStates[l.Name]; exists {
diff --git a/internal/tui/exp/diffview/diffview.go b/internal/tui/exp/diffview/diffview.go
index eaea2837fcaa7522294143f0385bcbb0879316bd..cda4b74b28843beda36bda17c4fbf29137017422 100644
--- a/internal/tui/exp/diffview/diffview.go
+++ b/internal/tui/exp/diffview/diffview.go
@@ -408,7 +408,7 @@ func (dv *DiffView) renderUnified() string {
content = ansi.GraphemeWidth.Cut(content, dv.xOffset, len(content))
content = ansi.Truncate(content, dv.codeWidth, "…")
leadingEllipsis = dv.xOffset > 0 && strings.TrimSpace(content) != ""
- return
+ return content, leadingEllipsis
}
outer:
@@ -531,7 +531,7 @@ func (dv *DiffView) renderSplit() string {
content = ansi.GraphemeWidth.Cut(content, dv.xOffset, len(content))
content = ansi.Truncate(content, dv.codeWidth, "…")
leadingEllipsis = dv.xOffset > 0 && strings.TrimSpace(content) != ""
- return
+ return content, leadingEllipsis
}
outer:
@@ -716,7 +716,7 @@ func (dv *DiffView) hunkShownLines(h *udiff.Hunk) (before, after int) {
before++
}
}
- return
+ return before, after
}
func (dv *DiffView) lineStyleForType(t udiff.OpKind) LineStyle {
diff --git a/internal/tui/exp/diffview/split.go b/internal/tui/exp/diffview/split.go
index ed4fedb543e7ea34f72f20e3c255a85706a4abcf..5a0f13116160b8c7178293944e3a45e4fcb7d8d3 100644
--- a/internal/tui/exp/diffview/split.go
+++ b/internal/tui/exp/diffview/split.go
@@ -69,5 +69,5 @@ func hunkToSplit(h *udiff.Hunk) (sh splitHunk) {
sh.lines = append(sh.lines, &sl)
}
- return
+ return sh
}
diff --git a/internal/tui/exp/list/filterable_group.go b/internal/tui/exp/list/filterable_group.go
index 57aa3da3b19420a5635e35419d2865ea033eb27f..6e9a5dc7eaad66d32ec34baf7e41d35ab3233048 100644
--- a/internal/tui/exp/list/filterable_group.go
+++ b/internal/tui/exp/list/filterable_group.go
@@ -180,11 +180,6 @@ func (f *filterableGroupList[T]) inputHeight() int {
return lipgloss.Height(f.inputStyle.Render(f.input.View()))
}
-type groupMatch[T FilterableItem] struct {
- group Group[T]
- score int
-}
-
func (f *filterableGroupList[T]) clearItemState() []tea.Cmd {
var cmds []tea.Cmd
for _, item := range slices.Collect(f.items.Seq()) {
@@ -211,25 +206,6 @@ func (f *filterableGroupList[T]) setMatchIndexes(item T, indexes []int) {
}
}
-func (f *filterableGroupList[T]) findMatchingGroups(firstWord string) []groupMatch[T] {
- var matchedGroups []groupMatch[T]
- for _, g := range f.groups {
- groupName := f.getGroupName(g)
- matches := fuzzy.Find(firstWord, []string{groupName})
- if len(matches) > 0 && matches[0].Score > 0 {
- matchedGroups = append(matchedGroups, groupMatch[T]{
- group: g,
- score: matches[0].Score,
- })
- }
- }
- // Sort by score (higher scores first - exact matches will have higher scores)
- sort.SliceStable(matchedGroups, func(i, j int) bool {
- return matchedGroups[i].score > matchedGroups[j].score
- })
- return matchedGroups
-}
-
func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string) []T {
if query == "" {
// No query, return all items with cleared match indexes
@@ -241,24 +217,31 @@ func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string
return items
}
- // Build search words
- words := make([]string, len(group.Items))
+ name := f.getGroupName(group) + " "
+
+ names := make([]string, len(group.Items))
for i, item := range group.Items {
- words[i] = strings.ToLower(item.FilterValue())
+ names[i] = strings.ToLower(name + item.FilterValue())
}
- // Perform fuzzy search
- matches := fuzzy.Find(query, words)
+ matches := fuzzy.Find(query, names)
sort.SliceStable(matches, func(i, j int) bool {
return matches[i].Score > matches[j].Score
})
if len(matches) > 0 {
- // Found matches, return only those with highlights
var matchedItems []T
for _, match := range matches {
item := group.Items[match.Index]
- f.setMatchIndexes(item, match.MatchedIndexes)
+ var idxs []int
+ for _, idx := range match.MatchedIndexes {
+ // adjusts removing group name highlights
+ if idx < len(name) {
+ continue
+ }
+ idxs = append(idxs, idx-len(name))
+ }
+ f.setMatchIndexes(item, idxs)
matchedItems = append(matchedItems, item)
}
return matchedItems
@@ -267,20 +250,6 @@ func (f *filterableGroupList[T]) filterItemsInGroup(group Group[T], query string
return []T{}
}
-func (f *filterableGroupList[T]) searchAllGroups(query string) []Group[T] {
- var newGroups []Group[T]
- for _, g := range f.groups {
- matchedItems := f.filterItemsInGroup(g, query)
- if len(matchedItems) > 0 {
- newGroups = append(newGroups, Group[T]{
- Section: g.Section,
- Items: matchedItems,
- })
- }
- }
- return newGroups
-}
-
func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
cmds := f.clearItemState()
f.selectedItem = ""
@@ -289,51 +258,24 @@ func (f *filterableGroupList[T]) Filter(query string) tea.Cmd {
return f.groupedList.SetGroups(f.groups)
}
- lowerQuery := strings.ToLower(query)
- queryWords := strings.Fields(lowerQuery)
- firstWord := queryWords[0]
+ query = strings.ToLower(strings.ReplaceAll(query, " ", ""))
- // Find groups that match the first word
- matchedGroups := f.findMatchingGroups(firstWord)
-
- var newGroups []Group[T]
- if len(matchedGroups) > 0 {
- // Filter within matched groups using remaining words
- remainingQuery := ""
- if len(queryWords) > 1 {
- remainingQuery = strings.Join(queryWords[1:], " ")
- }
-
- for _, matchedGroup := range matchedGroups {
- matchedItems := f.filterItemsInGroup(matchedGroup.group, remainingQuery)
- if len(matchedItems) > 0 {
- newGroups = append(newGroups, Group[T]{
- Section: matchedGroup.group.Section,
- Items: matchedItems,
- })
- }
+ var result []Group[T]
+ for _, g := range f.groups {
+ if matches := fuzzy.Find(query, []string{f.getGroupName(g)}); len(matches) > 0 && matches[0].Score > 0 {
+ result = append(result, g)
+ continue
}
-
- // add any matching items from other groups
- allGroups := f.searchAllGroups(lowerQuery)
- for _, g := range allGroups {
- exists := false
- for _, existing := range newGroups {
- if existing.Section.ID() == g.Section.ID() {
- exists = true
- break
- }
- }
- if !exists {
- newGroups = append(newGroups, g)
- }
+ matchedItems := f.filterItemsInGroup(g, query)
+ if len(matchedItems) > 0 {
+ result = append(result, Group[T]{
+ Section: g.Section,
+ Items: matchedItems,
+ })
}
- } else {
- // No group matches, search all groups
- newGroups = f.searchAllGroups(lowerQuery)
}
- cmds = append(cmds, f.groupedList.SetGroups(newGroups))
+ cmds = append(cmds, f.groupedList.SetGroups(result))
return tea.Batch(cmds...)
}
diff --git a/internal/tui/exp/list/list.go b/internal/tui/exp/list/list.go
index 8995e0360a6a72868d0819214a410257d1c8fa2b..fd789f90b89b016abb9b9fb5c79227da7ef30fd9 100644
--- a/internal/tui/exp/list/list.go
+++ b/internal/tui/exp/list/list.go
@@ -1372,7 +1372,7 @@ func (l *list[T]) findWordBoundaries(col, line int) (startCol, endCol int) {
if startCol == -1 {
return 0, 0
}
- return
+ return startCol, endCol
}
func (l *list[T]) findParagraphBoundaries(line int) (startLine, endLine int, found bool) {
diff --git a/internal/tui/page/chat/chat.go b/internal/tui/page/chat/chat.go
index c843ce7b8cf3702eac4a9ce1b081204fe73f05c5..2918925068cb2f012bead47bbf44260c6255288c 100644
--- a/internal/tui/page/chat/chat.go
+++ b/internal/tui/page/chat/chat.go
@@ -9,6 +9,7 @@ import (
"github.com/charmbracelet/bubbles/v2/key"
"github.com/charmbracelet/bubbles/v2/spinner"
tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/catwalk/pkg/catwalk"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/history"
@@ -26,9 +27,11 @@ import (
"github.com/charmbracelet/crush/internal/tui/components/completions"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/core/layout"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/commands"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/filepicker"
"github.com/charmbracelet/crush/internal/tui/components/dialogs/models"
+ "github.com/charmbracelet/crush/internal/tui/components/dialogs/reasoning"
"github.com/charmbracelet/crush/internal/tui/page"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
@@ -255,6 +258,10 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return p, tea.Batch(p.SetSize(p.width, p.height), cmd)
case commands.ToggleThinkingMsg:
return p, p.toggleThinking()
+ case commands.OpenReasoningDialogMsg:
+ return p, p.openReasoningDialog()
+ case reasoning.ReasoningEffortSelectedMsg:
+ return p, p.handleReasoningEffortSelected(msg.Effort)
case commands.OpenExternalEditorMsg:
u, cmd := p.editor.Update(msg)
p.editor = u.(editor.Editor)
@@ -549,6 +556,49 @@ func (p *chatPage) toggleThinking() tea.Cmd {
}
}
+func (p *chatPage) openReasoningDialog() tea.Cmd {
+ return func() tea.Msg {
+ cfg := config.Get()
+ agentCfg := cfg.Agents["coder"]
+ model := cfg.GetModelByType(agentCfg.Model)
+ providerCfg := cfg.GetProviderForModel(agentCfg.Model)
+
+ if providerCfg != nil && model != nil &&
+ providerCfg.Type == catwalk.TypeOpenAI && model.HasReasoningEffort {
+ // Return the OpenDialogMsg directly so it bubbles up to the main TUI
+ return dialogs.OpenDialogMsg{
+ Model: reasoning.NewReasoningDialog(),
+ }
+ }
+ return nil
+ }
+}
+
+func (p *chatPage) handleReasoningEffortSelected(effort string) tea.Cmd {
+ return func() tea.Msg {
+ cfg := config.Get()
+ agentCfg := cfg.Agents["coder"]
+ currentModel := cfg.Models[agentCfg.Model]
+
+ // Update the model configuration
+ currentModel.ReasoningEffort = effort
+ cfg.Models[agentCfg.Model] = currentModel
+
+ // Update the agent with the new configuration
+ if err := p.app.UpdateAgentModel(); err != nil {
+ return util.InfoMsg{
+ Type: util.InfoTypeError,
+ Msg: "Failed to update reasoning effort: " + err.Error(),
+ }
+ }
+
+ return util.InfoMsg{
+ Type: util.InfoTypeInfo,
+ Msg: "Reasoning effort set to " + effort,
+ }
+ }
+}
+
func (p *chatPage) setCompactMode(compact bool) {
if p.compact == compact {
return
@@ -716,7 +766,7 @@ func (p *chatPage) Bindings() []key.Binding {
cancelBinding := p.keyMap.Cancel
if p.isCanceling {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "press again to cancel"),
)
}
@@ -785,7 +835,7 @@ func (p *chatPage) Help() help.KeyMap {
shortList = append(shortList,
// Go back
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "back"),
),
)
@@ -820,7 +870,7 @@ func (p *chatPage) Help() help.KeyMap {
key.WithHelp("tab/enter", "complete"),
),
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
key.NewBinding(
@@ -835,18 +885,18 @@ func (p *chatPage) Help() help.KeyMap {
}
if p.app.CoderAgent != nil && p.app.CoderAgent.IsBusy() {
cancelBinding := key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
)
if p.isCanceling {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "press again to cancel"),
)
}
if p.app.CoderAgent != nil && p.app.CoderAgent.QueuedPrompts(p.session.ID) > 0 {
cancelBinding = key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "clear queue"),
)
}
@@ -992,7 +1042,7 @@ func (p *chatPage) Help() help.KeyMap {
key.WithHelp("ctrl+r+r", "delete all attachments"),
),
key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel delete mode"),
),
})
diff --git a/internal/tui/page/chat/keys.go b/internal/tui/page/chat/keys.go
index ef896aaab10fe36ee8ce88d3f70a3f03e3c61d3e..679a97c69522c0e831e59bddc7b0c1ddcc55fbb9 100644
--- a/internal/tui/page/chat/keys.go
+++ b/internal/tui/page/chat/keys.go
@@ -23,7 +23,7 @@ func DefaultKeyMap() KeyMap {
key.WithHelp("ctrl+f", "add attachment"),
),
Cancel: key.NewBinding(
- key.WithKeys("esc"),
+ key.WithKeys("esc", "alt+esc"),
key.WithHelp("esc", "cancel"),
),
Tab: key.NewBinding(
diff --git a/internal/tui/tui.go b/internal/tui/tui.go
index 0986aca31dcd779ca6fe611e1d71eff8ad6908e9..74d82e15514c70ee96b507a01b8f611d3ade6a4d 100644
--- a/internal/tui/tui.go
+++ b/internal/tui/tui.go
@@ -3,6 +3,7 @@ package tui
import (
"context"
"fmt"
+ "math/rand"
"strings"
"time"
@@ -10,6 +11,7 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/event"
"github.com/charmbracelet/crush/internal/llm/agent"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/pubsub"
@@ -196,6 +198,7 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
if a.app.CoderAgent.IsBusy() {
return a, util.ReportWarn("Agent is busy, please wait...")
}
+
config.Get().UpdatePreferredModel(msg.ModelType, msg.Model)
// Update the agent with the new model/provider configuration
@@ -211,6 +214,8 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// File Picker
case commands.OpenFilePickerMsg:
+ event.FilePickerOpened()
+
if a.dialog.ActiveDialogID() == filepicker.FilePickerID {
// If the commands dialog is already open, close it
return a, util.CmdHandler(dialogs.CloseDialogMsg{})
@@ -597,6 +602,12 @@ func (a *appModel) View() tea.View {
view.Layer = canvas
view.Cursor = cursor
+ view.ProgressBar = tea.NewProgressBar(tea.ProgressBarNone, 0)
+ if a.app.CoderAgent.IsBusy() {
+ // use a random percentage to prevent the ghostty from hiding it after
+ // a timeout.
+ view.ProgressBar = tea.NewProgressBar(tea.ProgressBarIndeterminate, rand.Intn(100))
+ }
return view
}
diff --git a/main.go b/main.go
index 072e3b35d2a2f408d8ed6a09423712b324df8b96..e75cb03e3575cf902c2ff4b44ddd15e0405f0b60 100644
--- a/main.go
+++ b/main.go
@@ -3,21 +3,14 @@ package main
import (
"log/slog"
"net/http"
+ _ "net/http/pprof"
"os"
- _ "net/http/pprof" // profiling
-
- _ "github.com/joho/godotenv/autoload" // automatically load .env files
-
"github.com/charmbracelet/crush/internal/cmd"
- "github.com/charmbracelet/crush/internal/log"
+ _ "github.com/joho/godotenv/autoload"
)
func main() {
- defer log.RecoverPanic("main", func() {
- slog.Error("Application terminated due to unhandled panic")
- })
-
if os.Getenv("CRUSH_PROFILE") != "" {
go func() {
slog.Info("Serving pprof at localhost:6060")
diff --git a/schema.json b/schema.json
index 060f9738884da739a186898d859ac5618c35b5b8..014155f1b1f22309ec6381f44c41e97b3b3825dc 100644
--- a/schema.json
+++ b/schema.json
@@ -3,6 +3,44 @@
"$id": "https://github.com/charmbracelet/crush/internal/config/config",
"$ref": "#/$defs/Config",
"$defs": {
+ "Attribution": {
+ "properties": {
+ "co_authored_by": {
+ "type": "boolean",
+ "description": "Add Co-Authored-By trailer to commit messages",
+ "default": true
+ },
+ "generated_with": {
+ "type": "boolean",
+ "description": "Add Generated with Crush line to commit messages and issues and PRs",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
+ "Completions": {
+ "properties": {
+ "max_depth": {
+ "type": "integer",
+ "description": "Maximum depth for the ls tool",
+ "default": 0,
+ "examples": [
+ 10
+ ]
+ },
+ "max_items": {
+ "type": "integer",
+ "description": "Maximum number of items to return for the ls tool",
+ "default": 1000,
+ "examples": [
+ 100
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ },
"Config": {
"properties": {
"$schema": {
@@ -37,14 +75,21 @@
"permissions": {
"$ref": "#/$defs/Permissions",
"description": "Permission settings for tool usage"
+ },
+ "tools": {
+ "$ref": "#/$defs/Tools",
+ "description": "Tool configurations"
}
},
"additionalProperties": false,
- "type": "object"
+ "type": "object",
+ "required": [
+ "tools"
+ ]
},
"LSPConfig": {
"properties": {
- "enabled": {
+ "disabled": {
"type": "boolean",
"description": "Whether this LSP server is disabled",
"default": false
@@ -70,9 +115,6 @@
"type": "object",
"description": "Environment variables to set to the LSP server command"
},
- "options": {
- "description": "LSP server-specific configuration options"
- },
"filetypes": {
"items": {
"type": "string",
@@ -87,6 +129,26 @@
},
"type": "array",
"description": "File types this LSP server handles"
+ },
+ "root_markers": {
+ "items": {
+ "type": "string",
+ "examples": [
+ "go.mod",
+ "package.json",
+ "Cargo.toml"
+ ]
+ },
+ "type": "array",
+ "description": "Files or directories that indicate the project root"
+ },
+ "init_options": {
+ "type": "object",
+ "description": "Initialization options passed to the LSP server during initialize request"
+ },
+ "options": {
+ "type": "object",
+ "description": "LSP server-specific settings passed during initialization"
}
},
"additionalProperties": false,
@@ -278,6 +340,20 @@
},
"type": "array",
"description": "Tools to disable"
+ },
+ "disable_provider_auto_update": {
+ "type": "boolean",
+ "description": "Disable providers auto-update",
+ "default": false
+ },
+ "attribution": {
+ "$ref": "#/$defs/Attribution",
+ "description": "Attribution settings for generated content"
+ },
+ "disable_metrics": {
+ "type": "boolean",
+ "description": "Disable sending metrics",
+ "default": false
}
},
"additionalProperties": false,
@@ -437,10 +513,51 @@
"split"
],
"description": "Diff mode for the TUI interface"
+ },
+ "completions": {
+ "$ref": "#/$defs/Completions",
+ "description": "Completions UI options"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "completions"
+ ]
+ },
+ "ToolLs": {
+ "properties": {
+ "max_depth": {
+ "type": "integer",
+ "description": "Maximum depth for the ls tool",
+ "default": 0,
+ "examples": [
+ 10
+ ]
+ },
+ "max_items": {
+ "type": "integer",
+ "description": "Maximum number of items to return for the ls tool",
+ "default": 1000,
+ "examples": [
+ 100
+ ]
}
},
"additionalProperties": false,
"type": "object"
+ },
+ "Tools": {
+ "properties": {
+ "ls": {
+ "$ref": "#/$defs/ToolLs"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "ls"
+ ]
}
}
}
diff --git a/scripts/run-issue-labeler.sh b/scripts/run-labeler.sh
similarity index 56%
rename from scripts/run-issue-labeler.sh
rename to scripts/run-labeler.sh
index 041a95e29d5e9ac1ee0da39873d0a7c2ef24375d..4ac4bc2a5f2fda35ee439b69923e74e0bce1578f 100755
--- a/scripts/run-issue-labeler.sh
+++ b/scripts/run-labeler.sh
@@ -2,11 +2,11 @@ ISSUES=$(gh issue list --state=all --limit=1000 --json "number" -t '{{range .}}{
PRS=$(gh pr list --state=all --limit=1000 --json "number" -t '{{range .}}{{printf "%.0f\n" .number}}{{end}}')
for issue in $ISSUES; do
- echo "Dispatching issue-labeler.yml for $issue"
- gh workflow run issue-labeler.yml -f issue-number="$issue"
+ echo "Dispatching labeler.yml for $issue"
+ gh workflow run labeler.yml -f issue-number="$issue"
done
for pr in $PRS; do
- echo "Dispatching issue-labeler.yml for $pr"
- gh workflow run issue-labeler.yml -f issue-number="$pr"
+ echo "Dispatching labeler.yml for $pr"
+ gh workflow run labeler.yml -f issue-number="$pr"
done